v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
hydrogen.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 #include "hydrogen.h"
30 
31 #include "codegen.h"
32 #include "full-codegen.h"
33 #include "hashmap.h"
34 #include "lithium-allocator.h"
35 #include "parser.h"
36 #include "scopeinfo.h"
37 #include "scopes.h"
38 #include "stub-cache.h"
39 
40 #if V8_TARGET_ARCH_IA32
42 #elif V8_TARGET_ARCH_X64
44 #elif V8_TARGET_ARCH_ARM
46 #elif V8_TARGET_ARCH_MIPS
48 #else
49 #error Unsupported target architecture.
50 #endif
51 
52 namespace v8 {
53 namespace internal {
54 
56  : block_id_(graph->GetNextBlockID()),
57  graph_(graph),
58  phis_(4, graph->zone()),
59  first_(NULL),
60  last_(NULL),
61  end_(NULL),
62  loop_information_(NULL),
63  predecessors_(2, graph->zone()),
64  dominator_(NULL),
65  dominated_blocks_(4, graph->zone()),
66  last_environment_(NULL),
67  argument_count_(-1),
68  first_instruction_index_(-1),
69  last_instruction_index_(-1),
70  deleted_phis_(4, graph->zone()),
71  parent_loop_header_(NULL),
72  is_inline_return_target_(false),
73  is_deoptimizing_(false),
74  dominates_loop_successors_(false) { }
75 
76 
78  ASSERT(!IsLoopHeader());
79  loop_information_ = new(zone()) HLoopInformation(this, zone());
80 }
81 
82 
85  loop_information_ = NULL;
86 }
87 
88 
90  ASSERT(!IsStartBlock());
91  phis_.Add(phi, zone());
92  phi->SetBlock(this);
93 }
94 
95 
97  ASSERT(phi->block() == this);
98  ASSERT(phis_.Contains(phi));
99  ASSERT(phi->HasNoUses() || !phi->is_live());
100  phi->Kill();
101  phis_.RemoveElement(phi);
102  phi->SetBlock(NULL);
103 }
104 
105 
107  ASSERT(!IsStartBlock() || !IsFinished());
108  ASSERT(!instr->IsLinked());
109  ASSERT(!IsFinished());
110  if (first_ == NULL) {
111  HBlockEntry* entry = new(zone()) HBlockEntry();
112  entry->InitializeAsFirst(this);
113  first_ = last_ = entry;
114  }
115  instr->InsertAfter(last_);
116 }
117 
118 
119 HDeoptimize* HBasicBlock::CreateDeoptimize(
120  HDeoptimize::UseEnvironment has_uses) {
122  if (has_uses == HDeoptimize::kNoUses)
123  return new(zone()) HDeoptimize(0, zone());
124 
125  HEnvironment* environment = last_environment();
126  HDeoptimize* instr = new(zone()) HDeoptimize(environment->length(), zone());
127  for (int i = 0; i < environment->length(); i++) {
128  HValue* val = environment->values()->at(i);
129  instr->AddEnvironmentValue(val, zone());
130  }
131 
132  return instr;
133 }
134 
135 
136 HSimulate* HBasicBlock::CreateSimulate(int ast_id) {
138  HEnvironment* environment = last_environment();
139  ASSERT(ast_id == AstNode::kNoNumber ||
140  environment->closure()->shared()->VerifyBailoutId(ast_id));
141 
142  int push_count = environment->push_count();
143  int pop_count = environment->pop_count();
144 
145  HSimulate* instr = new(zone()) HSimulate(ast_id, pop_count, zone());
146  for (int i = push_count - 1; i >= 0; --i) {
147  instr->AddPushedValue(environment->ExpressionStackAt(i));
148  }
149  for (int i = 0; i < environment->assigned_variables()->length(); ++i) {
150  int index = environment->assigned_variables()->at(i);
151  instr->AddAssignedValue(index, environment->Lookup(index));
152  }
153  environment->ClearHistory();
154  return instr;
155 }
156 
157 
159  ASSERT(!IsFinished());
160  AddInstruction(end);
161  end_ = end;
162  for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
163  it.Current()->RegisterPredecessor(this);
164  }
165 }
166 
167 
169  bool drop_extra = state != NULL && state->drop_extra();
170  bool arguments_pushed = state != NULL && state->arguments_pushed();
171 
172  if (block->IsInlineReturnTarget()) {
173  AddInstruction(new(zone()) HLeaveInlined(arguments_pushed));
174  last_environment_ = last_environment()->DiscardInlined(drop_extra);
175  }
176 
178  HGoto* instr = new(zone()) HGoto(block);
179  Finish(instr);
180 }
181 
182 
184  HBasicBlock* target,
185  FunctionState* state) {
186  bool drop_extra = state != NULL && state->drop_extra();
187  bool arguments_pushed = state != NULL && state->arguments_pushed();
188 
189  ASSERT(target->IsInlineReturnTarget());
190  ASSERT(return_value != NULL);
191  AddInstruction(new(zone()) HLeaveInlined(arguments_pushed));
192  last_environment_ = last_environment()->DiscardInlined(drop_extra);
193  last_environment()->Push(return_value);
195  HGoto* instr = new(zone()) HGoto(target);
196  Finish(instr);
197 }
198 
199 
202  ASSERT(first() == NULL);
203  UpdateEnvironment(env);
204 }
205 
206 
207 void HBasicBlock::SetJoinId(int ast_id) {
208  int length = predecessors_.length();
209  ASSERT(length > 0);
210  for (int i = 0; i < length; i++) {
211  HBasicBlock* predecessor = predecessors_[i];
212  ASSERT(predecessor->end()->IsGoto());
213  HSimulate* simulate = HSimulate::cast(predecessor->end()->previous());
214  // We only need to verify the ID once.
215  ASSERT(i != 0 ||
216  predecessor->last_environment()->closure()->shared()
217  ->VerifyBailoutId(ast_id));
218  simulate->set_ast_id(ast_id);
219  }
220 }
221 
222 
224  HBasicBlock* current = other->dominator();
225  while (current != NULL) {
226  if (current == this) return true;
227  current = current->dominator();
228  }
229  return false;
230 }
231 
232 
234  const HBasicBlock* current = this;
235  int result = (current->IsLoopHeader()) ? 1 : 0;
236  while (current->parent_loop_header() != NULL) {
237  current = current->parent_loop_header();
238  result++;
239  }
240  return result;
241 }
242 
243 
245  ASSERT(IsLoopHeader());
246 
247  SetJoinId(stmt->EntryId());
248  if (predecessors()->length() == 1) {
249  // This is a degenerated loop.
251  return;
252  }
253 
254  // Only the first entry into the loop is from outside the loop. All other
255  // entries must be back edges.
256  for (int i = 1; i < predecessors()->length(); ++i) {
258  }
259 }
260 
261 
262 void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
263  if (HasPredecessor()) {
264  // Only loop header blocks can have a predecessor added after
265  // instructions have been added to the block (they have phis for all
266  // values in the environment, these phis may be eliminated later).
267  ASSERT(IsLoopHeader() || first_ == NULL);
268  HEnvironment* incoming_env = pred->last_environment();
269  if (IsLoopHeader()) {
270  ASSERT(phis()->length() == incoming_env->length());
271  for (int i = 0; i < phis_.length(); ++i) {
272  phis_[i]->AddInput(incoming_env->values()->at(i));
273  }
274  } else {
276  }
277  } else if (!HasEnvironment() && !IsFinished()) {
278  ASSERT(!IsLoopHeader());
280  }
281 
282  predecessors_.Add(pred, zone());
283 }
284 
285 
286 void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
287  ASSERT(!dominated_blocks_.Contains(block));
288  // Keep the list of dominated blocks sorted such that if there is two
289  // succeeding block in this list, the predecessor is before the successor.
290  int index = 0;
291  while (index < dominated_blocks_.length() &&
292  dominated_blocks_[index]->block_id() < block->block_id()) {
293  ++index;
294  }
295  dominated_blocks_.InsertAt(index, block, zone());
296 }
297 
298 
300  if (dominator_ == NULL) {
301  dominator_ = other;
302  other->AddDominatedBlock(this);
303  } else if (other->dominator() != NULL) {
304  HBasicBlock* first = dominator_;
305  HBasicBlock* second = other;
306 
307  while (first != second) {
308  if (first->block_id() > second->block_id()) {
309  first = first->dominator();
310  } else {
311  second = second->dominator();
312  }
313  ASSERT(first != NULL && second != NULL);
314  }
315 
316  if (dominator_ != first) {
317  ASSERT(dominator_->dominated_blocks_.Contains(this));
318  dominator_->dominated_blocks_.RemoveElement(this);
319  dominator_ = first;
320  first->AddDominatedBlock(this);
321  }
322  }
323 }
324 
325 
327  // Mark blocks that dominate all subsequent reachable blocks inside their
328  // loop. Exploit the fact that blocks are sorted in reverse post order. When
329  // the loop is visited in increasing block id order, if the number of
330  // non-loop-exiting successor edges at the dominator_candidate block doesn't
331  // exceed the number of previously encountered predecessor edges, there is no
332  // path from the loop header to any block with higher id that doesn't go
333  // through the dominator_candidate block. In this case, the
334  // dominator_candidate block is guaranteed to dominate all blocks reachable
335  // from it with higher ids.
337  int outstanding_successors = 1; // one edge from the pre-header
338  // Header always dominates everything.
340  for (int j = block_id(); j <= last->block_id(); ++j) {
341  HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
342  for (HPredecessorIterator it(dominator_candidate); !it.Done();
343  it.Advance()) {
344  HBasicBlock* predecessor = it.Current();
345  // Don't count back edges.
346  if (predecessor->block_id() < dominator_candidate->block_id()) {
347  outstanding_successors--;
348  }
349  }
350 
351  // If more successors than predecessors have been seen in the loop up to
352  // now, it's not possible to guarantee that the current block dominates
353  // all of the blocks with higher IDs. In this case, assume conservatively
354  // that those paths through loop that don't go through the current block
355  // contain all of the loop's dependencies. Also be careful to record
356  // dominator information about the current loop that's being processed,
357  // and not nested loops, which will be processed when
358  // AssignLoopSuccessorDominators gets called on their header.
359  ASSERT(outstanding_successors >= 0);
360  HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
361  if (outstanding_successors == 0 &&
362  (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
363  dominator_candidate->MarkAsLoopSuccessorDominator();
364  }
365  HControlInstruction* end = dominator_candidate->end();
366  for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
367  HBasicBlock* successor = it.Current();
368  // Only count successors that remain inside the loop and don't loop back
369  // to a loop header.
370  if (successor->block_id() > dominator_candidate->block_id() &&
371  successor->block_id() <= last->block_id()) {
372  // Backwards edges must land on loop headers.
373  ASSERT(successor->block_id() > dominator_candidate->block_id() ||
374  successor->IsLoopHeader());
375  outstanding_successors++;
376  }
377  }
378  }
379 }
380 
381 
383  for (int i = 0; i < predecessors_.length(); ++i) {
384  if (predecessors_[i] == predecessor) return i;
385  }
386  UNREACHABLE();
387  return -1;
388 }
389 
390 
391 #ifdef DEBUG
392 void HBasicBlock::Verify() {
393  // Check that every block is finished.
394  ASSERT(IsFinished());
395  ASSERT(block_id() >= 0);
396 
397  // Check that the incoming edges are in edge split form.
398  if (predecessors_.length() > 1) {
399  for (int i = 0; i < predecessors_.length(); ++i) {
400  ASSERT(predecessors_[i]->end()->SecondSuccessor() == NULL);
401  }
402  }
403 }
404 #endif
405 
406 
408  this->back_edges_.Add(block, block->zone());
409  AddBlock(block);
410 }
411 
412 
414  int max_id = -1;
415  HBasicBlock* result = NULL;
416  for (int i = 0; i < back_edges_.length(); ++i) {
417  HBasicBlock* cur = back_edges_[i];
418  if (cur->block_id() > max_id) {
419  max_id = cur->block_id();
420  result = cur;
421  }
422  }
423  return result;
424 }
425 
426 
427 void HLoopInformation::AddBlock(HBasicBlock* block) {
428  if (block == loop_header()) return;
429  if (block->parent_loop_header() == loop_header()) return;
430  if (block->parent_loop_header() != NULL) {
431  AddBlock(block->parent_loop_header());
432  } else {
434  blocks_.Add(block, block->zone());
435  for (int i = 0; i < block->predecessors()->length(); ++i) {
436  AddBlock(block->predecessors()->at(i));
437  }
438  }
439 }
440 
441 
442 #ifdef DEBUG
443 
444 // Checks reachability of the blocks in this graph and stores a bit in
445 // the BitVector "reachable()" for every block that can be reached
446 // from the start block of the graph. If "dont_visit" is non-null, the given
447 // block is treated as if it would not be part of the graph. "visited_count()"
448 // returns the number of reachable blocks.
449 class ReachabilityAnalyzer BASE_EMBEDDED {
450  public:
451  ReachabilityAnalyzer(HBasicBlock* entry_block,
452  int block_count,
453  HBasicBlock* dont_visit)
454  : visited_count_(0),
455  stack_(16, entry_block->zone()),
456  reachable_(block_count, entry_block->zone()),
457  dont_visit_(dont_visit) {
458  PushBlock(entry_block);
459  Analyze();
460  }
461 
462  int visited_count() const { return visited_count_; }
463  const BitVector* reachable() const { return &reachable_; }
464 
465  private:
466  void PushBlock(HBasicBlock* block) {
467  if (block != NULL && block != dont_visit_ &&
468  !reachable_.Contains(block->block_id())) {
469  reachable_.Add(block->block_id());
470  stack_.Add(block, block->zone());
471  visited_count_++;
472  }
473  }
474 
475  void Analyze() {
476  while (!stack_.is_empty()) {
477  HControlInstruction* end = stack_.RemoveLast()->end();
478  for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
479  PushBlock(it.Current());
480  }
481  }
482  }
483 
484  int visited_count_;
485  ZoneList<HBasicBlock*> stack_;
486  BitVector reachable_;
487  HBasicBlock* dont_visit_;
488 };
489 
490 
491 void HGraph::Verify(bool do_full_verify) const {
492  for (int i = 0; i < blocks_.length(); i++) {
493  HBasicBlock* block = blocks_.at(i);
494 
495  block->Verify();
496 
497  // Check that every block contains at least one node and that only the last
498  // node is a control instruction.
499  HInstruction* current = block->first();
500  ASSERT(current != NULL && current->IsBlockEntry());
501  while (current != NULL) {
502  ASSERT((current->next() == NULL) == current->IsControlInstruction());
503  ASSERT(current->block() == block);
504  current->Verify();
505  current = current->next();
506  }
507 
508  // Check that successors are correctly set.
509  HBasicBlock* first = block->end()->FirstSuccessor();
510  HBasicBlock* second = block->end()->SecondSuccessor();
511  ASSERT(second == NULL || first != NULL);
512 
513  // Check that the predecessor array is correct.
514  if (first != NULL) {
515  ASSERT(first->predecessors()->Contains(block));
516  if (second != NULL) {
517  ASSERT(second->predecessors()->Contains(block));
518  }
519  }
520 
521  // Check that phis have correct arguments.
522  for (int j = 0; j < block->phis()->length(); j++) {
523  HPhi* phi = block->phis()->at(j);
524  phi->Verify();
525  }
526 
527  // Check that all join blocks have predecessors that end with an
528  // unconditional goto and agree on their environment node id.
529  if (block->predecessors()->length() >= 2) {
530  int id = block->predecessors()->first()->last_environment()->ast_id();
531  for (int k = 0; k < block->predecessors()->length(); k++) {
532  HBasicBlock* predecessor = block->predecessors()->at(k);
533  ASSERT(predecessor->end()->IsGoto());
534  ASSERT(predecessor->last_environment()->ast_id() == id);
535  }
536  }
537  }
538 
539  // Check special property of first block to have no predecessors.
540  ASSERT(blocks_.at(0)->predecessors()->is_empty());
541 
542  if (do_full_verify) {
543  // Check that the graph is fully connected.
544  ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL);
545  ASSERT(analyzer.visited_count() == blocks_.length());
546 
547  // Check that entry block dominator is NULL.
548  ASSERT(entry_block_->dominator() == NULL);
549 
550  // Check dominators.
551  for (int i = 0; i < blocks_.length(); ++i) {
552  HBasicBlock* block = blocks_.at(i);
553  if (block->dominator() == NULL) {
554  // Only start block may have no dominator assigned to.
555  ASSERT(i == 0);
556  } else {
557  // Assert that block is unreachable if dominator must not be visited.
558  ReachabilityAnalyzer dominator_analyzer(entry_block_,
559  blocks_.length(),
560  block->dominator());
561  ASSERT(!dominator_analyzer.reachable()->Contains(block->block_id()));
562  }
563  }
564  }
565 }
566 
567 #endif
568 
569 
570 HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
571  Object* value) {
572  if (!pointer->is_set()) {
573  HConstant* constant = new(zone()) HConstant(Handle<Object>(value),
575  constant->InsertAfter(GetConstantUndefined());
576  pointer->set(constant);
577  }
578  return pointer->get();
579 }
580 
581 
583  return GetConstant(&constant_1_, Smi::FromInt(1));
584 }
585 
586 
588  return GetConstant(&constant_minus1_, Smi::FromInt(-1));
589 }
590 
591 
593  return GetConstant(&constant_true_, isolate()->heap()->true_value());
594 }
595 
596 
598  return GetConstant(&constant_false_, isolate()->heap()->false_value());
599 }
600 
601 
603  return GetConstant(&constant_hole_, isolate()->heap()->the_hole_value());
604 }
605 
606 
607 HGraphBuilder::HGraphBuilder(CompilationInfo* info,
608  TypeFeedbackOracle* oracle,
609  Zone* zone)
610  : function_state_(NULL),
611  initial_function_state_(this, info, oracle, NORMAL_RETURN),
612  ast_context_(NULL),
613  break_scope_(NULL),
614  graph_(NULL),
615  current_block_(NULL),
616  inlined_count_(0),
617  globals_(10, zone),
618  zone_(zone),
619  inline_bailout_(false) {
620  // This is not initialized in the initializer list because the
621  // constructor for the initial state relies on function_state_ == NULL
622  // to know it's the initial state.
623  function_state_= &initial_function_state_;
624 }
625 
627  HBasicBlock* second,
628  int join_id) {
629  if (first == NULL) {
630  return second;
631  } else if (second == NULL) {
632  return first;
633  } else {
634  HBasicBlock* join_block = graph_->CreateBasicBlock();
635  first->Goto(join_block);
636  second->Goto(join_block);
637  join_block->SetJoinId(join_id);
638  return join_block;
639  }
640 }
641 
642 
643 HBasicBlock* HGraphBuilder::JoinContinue(IterationStatement* statement,
644  HBasicBlock* exit_block,
645  HBasicBlock* continue_block) {
646  if (continue_block != NULL) {
647  if (exit_block != NULL) exit_block->Goto(continue_block);
648  continue_block->SetJoinId(statement->ContinueId());
649  return continue_block;
650  }
651  return exit_block;
652 }
653 
654 
655 HBasicBlock* HGraphBuilder::CreateLoop(IterationStatement* statement,
656  HBasicBlock* loop_entry,
657  HBasicBlock* body_exit,
658  HBasicBlock* loop_successor,
659  HBasicBlock* break_block) {
660  if (body_exit != NULL) body_exit->Goto(loop_entry);
661  loop_entry->PostProcessLoopHeader(statement);
662  if (break_block != NULL) {
663  if (loop_successor != NULL) loop_successor->Goto(break_block);
664  break_block->SetJoinId(statement->ExitId());
665  return break_block;
666  }
667  return loop_successor;
668 }
669 
670 
672  Finish(instruction);
674 }
675 
676 
677 HGraph::HGraph(CompilationInfo* info, Zone* zone)
678  : isolate_(info->isolate()),
679  next_block_id_(0),
680  entry_block_(NULL),
681  blocks_(8, zone),
682  values_(16, zone),
683  phi_list_(NULL),
684  zone_(zone),
685  is_recursive_(false) {
686  start_environment_ =
687  new(zone) HEnvironment(NULL, info->scope(), info->closure(), zone);
688  start_environment_->set_ast_id(AstNode::kFunctionEntryId);
689  entry_block_ = CreateBasicBlock();
690  entry_block_->SetInitialEnvironment(start_environment_);
691 }
692 
693 
694 Handle<Code> HGraph::Compile(CompilationInfo* info, Zone* zone) {
695  int values = GetMaximumValueID();
696  if (values > LUnallocated::kMaxVirtualRegisters) {
697  if (FLAG_trace_bailout) {
698  PrintF("Not enough virtual registers for (values).\n");
699  }
700  return Handle<Code>::null();
701  }
702  LAllocator allocator(values, this);
703  LChunkBuilder builder(info, this, &allocator);
704  LChunk* chunk = builder.Build();
705  if (chunk == NULL) return Handle<Code>::null();
706 
707  if (!allocator.Allocate(chunk)) {
708  if (FLAG_trace_bailout) {
709  PrintF("Not enough virtual registers (regalloc).\n");
710  }
711  return Handle<Code>::null();
712  }
713 
714  MacroAssembler assembler(info->isolate(), NULL, 0);
715  LCodeGen generator(chunk, &assembler, info, zone);
716 
717  chunk->MarkEmptyBlocks();
718 
719  if (generator.GenerateCode()) {
720  if (FLAG_trace_codegen) {
721  PrintF("Crankshaft Compiler - ");
722  }
725  Handle<Code> code =
726  CodeGenerator::MakeCodeEpilogue(&assembler, flags, info);
727  generator.FinishCode(code);
728  CodeGenerator::PrintCode(code, info);
729  return code;
730  }
731  return Handle<Code>::null();
732 }
733 
734 
736  HBasicBlock* result = new(zone()) HBasicBlock(this);
737  blocks_.Add(result, zone());
738  return result;
739 }
740 
741 
743  if (!FLAG_use_canonicalizing) return;
744  HPhase phase("H_Canonicalize", this);
745  for (int i = 0; i < blocks()->length(); ++i) {
746  HInstruction* instr = blocks()->at(i)->first();
747  while (instr != NULL) {
748  HValue* value = instr->Canonicalize();
749  if (value != instr) instr->DeleteAndReplaceWith(value);
750  instr = instr->next();
751  }
752  }
753 }
754 
755 
757  HPhase phase("H_Block ordering");
758  BitVector visited(blocks_.length(), zone());
759 
760  ZoneList<HBasicBlock*> reverse_result(8, zone());
761  HBasicBlock* start = blocks_[0];
762  Postorder(start, &visited, &reverse_result, NULL);
763 
764  blocks_.Rewind(0);
765  int index = 0;
766  for (int i = reverse_result.length() - 1; i >= 0; --i) {
767  HBasicBlock* b = reverse_result[i];
768  blocks_.Add(b, zone());
769  b->set_block_id(index++);
770  }
771 }
772 
773 
774 void HGraph::PostorderLoopBlocks(HLoopInformation* loop,
775  BitVector* visited,
776  ZoneList<HBasicBlock*>* order,
777  HBasicBlock* loop_header) {
778  for (int i = 0; i < loop->blocks()->length(); ++i) {
779  HBasicBlock* b = loop->blocks()->at(i);
780  for (HSuccessorIterator it(b->end()); !it.Done(); it.Advance()) {
781  Postorder(it.Current(), visited, order, loop_header);
782  }
783  if (b->IsLoopHeader() && b != loop->loop_header()) {
784  PostorderLoopBlocks(b->loop_information(), visited, order, loop_header);
785  }
786  }
787 }
788 
789 
790 void HGraph::Postorder(HBasicBlock* block,
791  BitVector* visited,
792  ZoneList<HBasicBlock*>* order,
793  HBasicBlock* loop_header) {
794  if (block == NULL || visited->Contains(block->block_id())) return;
795  if (block->parent_loop_header() != loop_header) return;
796  visited->Add(block->block_id());
797  if (block->IsLoopHeader()) {
798  PostorderLoopBlocks(block->loop_information(), visited, order, loop_header);
799  for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
800  Postorder(it.Current(), visited, order, block);
801  }
802  } else {
803  ASSERT(block->IsFinished());
804  for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
805  Postorder(it.Current(), visited, order, loop_header);
806  }
807  }
808  ASSERT(block->end()->FirstSuccessor() == NULL ||
809  order->Contains(block->end()->FirstSuccessor()) ||
810  block->end()->FirstSuccessor()->IsLoopHeader());
811  ASSERT(block->end()->SecondSuccessor() == NULL ||
812  order->Contains(block->end()->SecondSuccessor()) ||
813  block->end()->SecondSuccessor()->IsLoopHeader());
814  order->Add(block, zone());
815 }
816 
817 
819  HPhase phase("H_Assign dominators", this);
820  for (int i = 0; i < blocks_.length(); ++i) {
821  HBasicBlock* block = blocks_[i];
822  if (block->IsLoopHeader()) {
823  // Only the first predecessor of a loop header is from outside the loop.
824  // All others are back edges, and thus cannot dominate the loop header.
825  block->AssignCommonDominator(block->predecessors()->first());
827  } else {
828  for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
829  blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
830  }
831  }
832  }
833 }
834 
835 // Mark all blocks that are dominated by an unconditional soft deoptimize to
836 // prevent code motion across those blocks.
838  HPhase phase("H_Propagate deoptimizing mark", this);
839  MarkAsDeoptimizingRecursively(entry_block());
840 }
841 
842 void HGraph::MarkAsDeoptimizingRecursively(HBasicBlock* block) {
843  for (int i = 0; i < block->dominated_blocks()->length(); ++i) {
844  HBasicBlock* dominated = block->dominated_blocks()->at(i);
845  if (block->IsDeoptimizing()) dominated->MarkAsDeoptimizing();
846  MarkAsDeoptimizingRecursively(dominated);
847  }
848 }
849 
851  HPhase phase("H_Redundant phi elimination", this);
852 
853  // Worklist of phis that can potentially be eliminated. Initialized with
854  // all phi nodes. When elimination of a phi node modifies another phi node
855  // the modified phi node is added to the worklist.
856  ZoneList<HPhi*> worklist(blocks_.length(), zone());
857  for (int i = 0; i < blocks_.length(); ++i) {
858  worklist.AddAll(*blocks_[i]->phis(), zone());
859  }
860 
861  while (!worklist.is_empty()) {
862  HPhi* phi = worklist.RemoveLast();
863  HBasicBlock* block = phi->block();
864 
865  // Skip phi node if it was already replaced.
866  if (block == NULL) continue;
867 
868  // Get replacement value if phi is redundant.
869  HValue* replacement = phi->GetRedundantReplacement();
870 
871  if (replacement != NULL) {
872  // Iterate through the uses and replace them all.
873  for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
874  HValue* value = it.value();
875  value->SetOperandAt(it.index(), replacement);
876  if (value->IsPhi()) worklist.Add(HPhi::cast(value), zone());
877  }
878  block->RemovePhi(phi);
879  }
880  }
881 }
882 
883 
885  HPhase phase("H_Unreachable phi elimination", this);
886 
887  // Initialize worklist.
888  ZoneList<HPhi*> phi_list(blocks_.length(), zone());
889  ZoneList<HPhi*> worklist(blocks_.length(), zone());
890  for (int i = 0; i < blocks_.length(); ++i) {
891  for (int j = 0; j < blocks_[i]->phis()->length(); j++) {
892  HPhi* phi = blocks_[i]->phis()->at(j);
893  phi_list.Add(phi, zone());
894  // We can't eliminate phis in the receiver position in the environment
895  // because in case of throwing an error we need this value to
896  // construct a stack trace.
897  if (phi->HasRealUses() || phi->IsReceiver()) {
898  phi->set_is_live(true);
899  worklist.Add(phi, zone());
900  }
901  }
902  }
903 
904  // Iteratively mark live phis.
905  while (!worklist.is_empty()) {
906  HPhi* phi = worklist.RemoveLast();
907  for (int i = 0; i < phi->OperandCount(); i++) {
908  HValue* operand = phi->OperandAt(i);
909  if (operand->IsPhi() && !HPhi::cast(operand)->is_live()) {
910  HPhi::cast(operand)->set_is_live(true);
911  worklist.Add(HPhi::cast(operand), zone());
912  }
913  }
914  }
915 
916  // Remove unreachable phis.
917  for (int i = 0; i < phi_list.length(); i++) {
918  HPhi* phi = phi_list[i];
919  if (!phi->is_live()) {
920  HBasicBlock* block = phi->block();
921  block->RemovePhi(phi);
922  block->RecordDeletedPhi(phi->merged_index());
923  }
924  }
925 }
926 
927 
929  int block_count = blocks_.length();
930  for (int i = 0; i < block_count; ++i) {
931  for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
932  HPhi* phi = blocks_[i]->phis()->at(j);
933  // We don't support phi uses of arguments for now.
934  if (phi->CheckFlag(HValue::kIsArguments)) return false;
935  }
936  }
937  return true;
938 }
939 
940 
942  int block_count = blocks_.length();
943  for (int i = 0; i < block_count; ++i) {
944  for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
945  HPhi* phi = blocks_[i]->phis()->at(j);
946  // Check for the hole value (from an uninitialized const).
947  for (int k = 0; k < phi->OperandCount(); k++) {
948  if (phi->OperandAt(k) == GetConstantHole()) return false;
949  }
950  }
951  }
952  return true;
953 }
954 
955 
957  int block_count = blocks_.length();
958  phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone());
959  for (int i = 0; i < block_count; ++i) {
960  for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
961  HPhi* phi = blocks_[i]->phis()->at(j);
962  phi_list_->Add(phi, zone());
963  }
964  }
965 }
966 
967 
968 void HGraph::InferTypes(ZoneList<HValue*>* worklist) {
969  BitVector in_worklist(GetMaximumValueID(), zone());
970  for (int i = 0; i < worklist->length(); ++i) {
971  ASSERT(!in_worklist.Contains(worklist->at(i)->id()));
972  in_worklist.Add(worklist->at(i)->id());
973  }
974 
975  while (!worklist->is_empty()) {
976  HValue* current = worklist->RemoveLast();
977  in_worklist.Remove(current->id());
978  if (current->UpdateInferredType()) {
979  for (HUseIterator it(current->uses()); !it.Done(); it.Advance()) {
980  HValue* use = it.value();
981  if (!in_worklist.Contains(use->id())) {
982  in_worklist.Add(use->id());
983  worklist->Add(use, zone());
984  }
985  }
986  }
987  }
988 }
989 
990 
991 class HRangeAnalysis BASE_EMBEDDED {
992  public:
993  explicit HRangeAnalysis(HGraph* graph) :
994  graph_(graph), zone_(graph->zone()), changed_ranges_(16, zone_) { }
995 
996  void Analyze();
997 
998  private:
999  void TraceRange(const char* msg, ...);
1000  void Analyze(HBasicBlock* block);
1001  void InferControlFlowRange(HCompareIDAndBranch* test, HBasicBlock* dest);
1002  void UpdateControlFlowRange(Token::Value op, HValue* value, HValue* other);
1003  void InferRange(HValue* value);
1004  void RollBackTo(int index);
1005  void AddRange(HValue* value, Range* range);
1006 
1007  HGraph* graph_;
1008  Zone* zone_;
1009  ZoneList<HValue*> changed_ranges_;
1010 };
1011 
1012 
1013 void HRangeAnalysis::TraceRange(const char* msg, ...) {
1014  if (FLAG_trace_range) {
1015  va_list arguments;
1016  va_start(arguments, msg);
1017  OS::VPrint(msg, arguments);
1018  va_end(arguments);
1019  }
1020 }
1021 
1022 
1023 void HRangeAnalysis::Analyze() {
1024  HPhase phase("H_Range analysis", graph_);
1025  Analyze(graph_->entry_block());
1026 }
1027 
1028 
1029 void HRangeAnalysis::Analyze(HBasicBlock* block) {
1030  TraceRange("Analyzing block B%d\n", block->block_id());
1031 
1032  int last_changed_range = changed_ranges_.length() - 1;
1033 
1034  // Infer range based on control flow.
1035  if (block->predecessors()->length() == 1) {
1036  HBasicBlock* pred = block->predecessors()->first();
1037  if (pred->end()->IsCompareIDAndBranch()) {
1038  InferControlFlowRange(HCompareIDAndBranch::cast(pred->end()), block);
1039  }
1040  }
1041 
1042  // Process phi instructions.
1043  for (int i = 0; i < block->phis()->length(); ++i) {
1044  HPhi* phi = block->phis()->at(i);
1045  InferRange(phi);
1046  }
1047 
1048  // Go through all instructions of the current block.
1049  HInstruction* instr = block->first();
1050  while (instr != block->end()) {
1051  InferRange(instr);
1052  instr = instr->next();
1053  }
1054 
1055  // Continue analysis in all dominated blocks.
1056  for (int i = 0; i < block->dominated_blocks()->length(); ++i) {
1057  Analyze(block->dominated_blocks()->at(i));
1058  }
1059 
1060  RollBackTo(last_changed_range);
1061 }
1062 
1063 
1064 void HRangeAnalysis::InferControlFlowRange(HCompareIDAndBranch* test,
1065  HBasicBlock* dest) {
1066  ASSERT((test->FirstSuccessor() == dest) == (test->SecondSuccessor() != dest));
1067  if (test->GetInputRepresentation().IsInteger32()) {
1068  Token::Value op = test->token();
1069  if (test->SecondSuccessor() == dest) {
1070  op = Token::NegateCompareOp(op);
1071  }
1072  Token::Value inverted_op = Token::InvertCompareOp(op);
1073  UpdateControlFlowRange(op, test->left(), test->right());
1074  UpdateControlFlowRange(inverted_op, test->right(), test->left());
1075  }
1076 }
1077 
1078 
1079 // We know that value [op] other. Use this information to update the range on
1080 // value.
1081 void HRangeAnalysis::UpdateControlFlowRange(Token::Value op,
1082  HValue* value,
1083  HValue* other) {
1084  Range temp_range;
1085  Range* range = other->range() != NULL ? other->range() : &temp_range;
1086  Range* new_range = NULL;
1087 
1088  TraceRange("Control flow range infer %d %s %d\n",
1089  value->id(),
1090  Token::Name(op),
1091  other->id());
1092 
1093  if (op == Token::EQ || op == Token::EQ_STRICT) {
1094  // The same range has to apply for value.
1095  new_range = range->Copy(zone_);
1096  } else if (op == Token::LT || op == Token::LTE) {
1097  new_range = range->CopyClearLower(zone_);
1098  if (op == Token::LT) {
1099  new_range->AddConstant(-1);
1100  }
1101  } else if (op == Token::GT || op == Token::GTE) {
1102  new_range = range->CopyClearUpper(zone_);
1103  if (op == Token::GT) {
1104  new_range->AddConstant(1);
1105  }
1106  }
1107 
1108  if (new_range != NULL && !new_range->IsMostGeneric()) {
1109  AddRange(value, new_range);
1110  }
1111 }
1112 
1113 
1114 void HRangeAnalysis::InferRange(HValue* value) {
1115  ASSERT(!value->HasRange());
1116  if (!value->representation().IsNone()) {
1117  value->ComputeInitialRange(zone_);
1118  Range* range = value->range();
1119  TraceRange("Initial inferred range of %d (%s) set to [%d,%d]\n",
1120  value->id(),
1121  value->Mnemonic(),
1122  range->lower(),
1123  range->upper());
1124  }
1125 }
1126 
1127 
1128 void HRangeAnalysis::RollBackTo(int index) {
1129  for (int i = index + 1; i < changed_ranges_.length(); ++i) {
1130  changed_ranges_[i]->RemoveLastAddedRange();
1131  }
1132  changed_ranges_.Rewind(index + 1);
1133 }
1134 
1135 
1136 void HRangeAnalysis::AddRange(HValue* value, Range* range) {
1137  Range* original_range = value->range();
1138  value->AddNewRange(range, zone_);
1139  changed_ranges_.Add(value, zone_);
1140  Range* new_range = value->range();
1141  TraceRange("Updated range of %d set to [%d,%d]\n",
1142  value->id(),
1143  new_range->lower(),
1144  new_range->upper());
1145  if (original_range != NULL) {
1146  TraceRange("Original range was [%d,%d]\n",
1147  original_range->lower(),
1148  original_range->upper());
1149  }
1150  TraceRange("New information was [%d,%d]\n",
1151  range->lower(),
1152  range->upper());
1153 }
1154 
1155 
1156 void TraceGVN(const char* msg, ...) {
1157  va_list arguments;
1158  va_start(arguments, msg);
1159  OS::VPrint(msg, arguments);
1160  va_end(arguments);
1161 }
1162 
1163 // Wrap TraceGVN in macros to avoid the expense of evaluating its arguments when
1164 // --trace-gvn is off.
1165 #define TRACE_GVN_1(msg, a1) \
1166  if (FLAG_trace_gvn) { \
1167  TraceGVN(msg, a1); \
1168  }
1169 
1170 #define TRACE_GVN_2(msg, a1, a2) \
1171  if (FLAG_trace_gvn) { \
1172  TraceGVN(msg, a1, a2); \
1173  }
1174 
1175 #define TRACE_GVN_3(msg, a1, a2, a3) \
1176  if (FLAG_trace_gvn) { \
1177  TraceGVN(msg, a1, a2, a3); \
1178  }
1179 
1180 #define TRACE_GVN_4(msg, a1, a2, a3, a4) \
1181  if (FLAG_trace_gvn) { \
1182  TraceGVN(msg, a1, a2, a3, a4); \
1183  }
1184 
1185 #define TRACE_GVN_5(msg, a1, a2, a3, a4, a5) \
1186  if (FLAG_trace_gvn) { \
1187  TraceGVN(msg, a1, a2, a3, a4, a5); \
1188  }
1189 
1190 
1191 HValueMap::HValueMap(Zone* zone, const HValueMap* other)
1192  : array_size_(other->array_size_),
1193  lists_size_(other->lists_size_),
1194  count_(other->count_),
1195  present_flags_(other->present_flags_),
1196  array_(zone->NewArray<HValueMapListElement>(other->array_size_)),
1197  lists_(zone->NewArray<HValueMapListElement>(other->lists_size_)),
1198  free_list_head_(other->free_list_head_) {
1199  memcpy(array_, other->array_, array_size_ * sizeof(HValueMapListElement));
1200  memcpy(lists_, other->lists_, lists_size_ * sizeof(HValueMapListElement));
1201 }
1202 
1203 
1204 void HValueMap::Kill(GVNFlagSet flags) {
1205  GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(flags);
1206  if (!present_flags_.ContainsAnyOf(depends_flags)) return;
1207  present_flags_.RemoveAll();
1208  for (int i = 0; i < array_size_; ++i) {
1209  HValue* value = array_[i].value;
1210  if (value != NULL) {
1211  // Clear list of collisions first, so we know if it becomes empty.
1212  int kept = kNil; // List of kept elements.
1213  int next;
1214  for (int current = array_[i].next; current != kNil; current = next) {
1215  next = lists_[current].next;
1216  HValue* value = lists_[current].value;
1217  if (value->gvn_flags().ContainsAnyOf(depends_flags)) {
1218  // Drop it.
1219  count_--;
1220  lists_[current].next = free_list_head_;
1221  free_list_head_ = current;
1222  } else {
1223  // Keep it.
1224  lists_[current].next = kept;
1225  kept = current;
1226  present_flags_.Add(value->gvn_flags());
1227  }
1228  }
1229  array_[i].next = kept;
1230 
1231  // Now possibly drop directly indexed element.
1232  value = array_[i].value;
1233  if (value->gvn_flags().ContainsAnyOf(depends_flags)) { // Drop it.
1234  count_--;
1235  int head = array_[i].next;
1236  if (head == kNil) {
1237  array_[i].value = NULL;
1238  } else {
1239  array_[i].value = lists_[head].value;
1240  array_[i].next = lists_[head].next;
1241  lists_[head].next = free_list_head_;
1242  free_list_head_ = head;
1243  }
1244  } else {
1245  present_flags_.Add(value->gvn_flags()); // Keep it.
1246  }
1247  }
1248  }
1249 }
1250 
1251 
1252 HValue* HValueMap::Lookup(HValue* value) const {
1253  uint32_t hash = static_cast<uint32_t>(value->Hashcode());
1254  uint32_t pos = Bound(hash);
1255  if (array_[pos].value != NULL) {
1256  if (array_[pos].value->Equals(value)) return array_[pos].value;
1257  int next = array_[pos].next;
1258  while (next != kNil) {
1259  if (lists_[next].value->Equals(value)) return lists_[next].value;
1260  next = lists_[next].next;
1261  }
1262  }
1263  return NULL;
1264 }
1265 
1266 
1267 void HValueMap::Resize(int new_size, Zone* zone) {
1268  ASSERT(new_size > count_);
1269  // Hashing the values into the new array has no more collisions than in the
1270  // old hash map, so we can use the existing lists_ array, if we are careful.
1271 
1272  // Make sure we have at least one free element.
1273  if (free_list_head_ == kNil) {
1274  ResizeLists(lists_size_ << 1, zone);
1275  }
1276 
1277  HValueMapListElement* new_array =
1278  zone->NewArray<HValueMapListElement>(new_size);
1279  memset(new_array, 0, sizeof(HValueMapListElement) * new_size);
1280 
1281  HValueMapListElement* old_array = array_;
1282  int old_size = array_size_;
1283 
1284  int old_count = count_;
1285  count_ = 0;
1286  // Do not modify present_flags_. It is currently correct.
1287  array_size_ = new_size;
1288  array_ = new_array;
1289 
1290  if (old_array != NULL) {
1291  // Iterate over all the elements in lists, rehashing them.
1292  for (int i = 0; i < old_size; ++i) {
1293  if (old_array[i].value != NULL) {
1294  int current = old_array[i].next;
1295  while (current != kNil) {
1296  Insert(lists_[current].value, zone);
1297  int next = lists_[current].next;
1298  lists_[current].next = free_list_head_;
1299  free_list_head_ = current;
1300  current = next;
1301  }
1302  // Rehash the directly stored value.
1303  Insert(old_array[i].value, zone);
1304  }
1305  }
1306  }
1307  USE(old_count);
1308  ASSERT(count_ == old_count);
1309 }
1310 
1311 
1312 void HValueMap::ResizeLists(int new_size, Zone* zone) {
1313  ASSERT(new_size > lists_size_);
1314 
1315  HValueMapListElement* new_lists =
1316  zone->NewArray<HValueMapListElement>(new_size);
1317  memset(new_lists, 0, sizeof(HValueMapListElement) * new_size);
1318 
1319  HValueMapListElement* old_lists = lists_;
1320  int old_size = lists_size_;
1321 
1322  lists_size_ = new_size;
1323  lists_ = new_lists;
1324 
1325  if (old_lists != NULL) {
1326  memcpy(lists_, old_lists, old_size * sizeof(HValueMapListElement));
1327  }
1328  for (int i = old_size; i < lists_size_; ++i) {
1329  lists_[i].next = free_list_head_;
1330  free_list_head_ = i;
1331  }
1332 }
1333 
1334 
1335 void HValueMap::Insert(HValue* value, Zone* zone) {
1336  ASSERT(value != NULL);
1337  // Resizing when half of the hashtable is filled up.
1338  if (count_ >= array_size_ >> 1) Resize(array_size_ << 1, zone);
1339  ASSERT(count_ < array_size_);
1340  count_++;
1341  uint32_t pos = Bound(static_cast<uint32_t>(value->Hashcode()));
1342  if (array_[pos].value == NULL) {
1343  array_[pos].value = value;
1344  array_[pos].next = kNil;
1345  } else {
1346  if (free_list_head_ == kNil) {
1347  ResizeLists(lists_size_ << 1, zone);
1348  }
1349  int new_element_pos = free_list_head_;
1350  ASSERT(new_element_pos != kNil);
1351  free_list_head_ = lists_[free_list_head_].next;
1352  lists_[new_element_pos].value = value;
1353  lists_[new_element_pos].next = array_[pos].next;
1354  ASSERT(array_[pos].next == kNil || lists_[array_[pos].next].value != NULL);
1355  array_[pos].next = new_element_pos;
1356  }
1357 }
1358 
1359 
1360 HSideEffectMap::HSideEffectMap() : count_(0) {
1361  memset(data_, 0, kNumberOfTrackedSideEffects * kPointerSize);
1362 }
1363 
1364 
1365 HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) {
1366  *this = *other; // Calls operator=.
1367 }
1368 
1369 
1370 HSideEffectMap& HSideEffectMap::operator= (const HSideEffectMap& other) {
1371  if (this != &other) {
1372  memcpy(data_, other.data_, kNumberOfTrackedSideEffects * kPointerSize);
1373  }
1374  return *this;
1375 }
1376 
1377 void HSideEffectMap::Kill(GVNFlagSet flags) {
1378  for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
1379  GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
1380  if (flags.Contains(changes_flag)) {
1381  if (data_[i] != NULL) count_--;
1382  data_[i] = NULL;
1383  }
1384  }
1385 }
1386 
1387 
1388 void HSideEffectMap::Store(GVNFlagSet flags, HInstruction* instr) {
1389  for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
1390  GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
1391  if (flags.Contains(changes_flag)) {
1392  if (data_[i] == NULL) count_++;
1393  data_[i] = instr;
1394  }
1395  }
1396 }
1397 
1398 
1399 class HStackCheckEliminator BASE_EMBEDDED {
1400  public:
1401  explicit HStackCheckEliminator(HGraph* graph) : graph_(graph) { }
1402 
1403  void Process();
1404 
1405  private:
1406  HGraph* graph_;
1407 };
1408 
1409 
1410 void HStackCheckEliminator::Process() {
1411  // For each loop block walk the dominator tree from the backwards branch to
1412  // the loop header. If a call instruction is encountered the backwards branch
1413  // is dominated by a call and the stack check in the backwards branch can be
1414  // removed.
1415  for (int i = 0; i < graph_->blocks()->length(); i++) {
1416  HBasicBlock* block = graph_->blocks()->at(i);
1417  if (block->IsLoopHeader()) {
1418  HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
1419  HBasicBlock* dominator = back_edge;
1420  while (true) {
1421  HInstruction* instr = dominator->first();
1422  while (instr != NULL) {
1423  if (instr->IsCall()) {
1424  block->loop_information()->stack_check()->Eliminate();
1425  break;
1426  }
1427  instr = instr->next();
1428  }
1429 
1430  // Done when the loop header is processed.
1431  if (dominator == block) break;
1432 
1433  // Move up the dominator tree.
1434  dominator = dominator->dominator();
1435  }
1436  }
1437  }
1438 }
1439 
1440 
1441 // Simple sparse set with O(1) add, contains, and clear.
1442 class SparseSet {
1443  public:
1444  SparseSet(Zone* zone, int capacity)
1445  : capacity_(capacity),
1446  length_(0),
1447  dense_(zone->NewArray<int>(capacity)),
1448  sparse_(zone->NewArray<int>(capacity)) {
1449 #ifndef NVALGRIND
1450  // Initialize the sparse array to make valgrind happy.
1451  memset(sparse_, 0, sizeof(sparse_[0]) * capacity);
1452 #endif
1453  }
1454 
1455  bool Contains(int n) const {
1456  ASSERT(0 <= n && n < capacity_);
1457  int d = sparse_[n];
1458  return 0 <= d && d < length_ && dense_[d] == n;
1459  }
1460 
1461  bool Add(int n) {
1462  if (Contains(n)) return false;
1463  dense_[length_] = n;
1464  sparse_[n] = length_;
1465  ++length_;
1466  return true;
1467  }
1468 
1469  void Clear() { length_ = 0; }
1470 
1471  private:
1472  int capacity_;
1473  int length_;
1474  int* dense_;
1475  int* sparse_;
1476 
1477  DISALLOW_COPY_AND_ASSIGN(SparseSet);
1478 };
1479 
1480 
1481 class HGlobalValueNumberer BASE_EMBEDDED {
1482  public:
1483  explicit HGlobalValueNumberer(HGraph* graph, CompilationInfo* info)
1484  : graph_(graph),
1485  info_(info),
1486  removed_side_effects_(false),
1487  block_side_effects_(graph->blocks()->length(), graph->zone()),
1488  loop_side_effects_(graph->blocks()->length(), graph->zone()),
1489  visited_on_paths_(graph->zone(), graph->blocks()->length()) {
1490  ASSERT(info->isolate()->heap()->allow_allocation(false));
1491  block_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length(),
1492  graph_->zone());
1493  loop_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length(),
1494  graph_->zone());
1495  }
1497  ASSERT(!info_->isolate()->heap()->allow_allocation(true));
1498  }
1499 
1500  // Returns true if values with side effects are removed.
1501  bool Analyze();
1502 
1503  private:
1504  GVNFlagSet CollectSideEffectsOnPathsToDominatedBlock(
1505  HBasicBlock* dominator,
1506  HBasicBlock* dominated);
1507  void AnalyzeGraph();
1508  void ComputeBlockSideEffects();
1509  void LoopInvariantCodeMotion();
1510  void ProcessLoopBlock(HBasicBlock* block,
1511  HBasicBlock* before_loop,
1512  GVNFlagSet loop_kills,
1513  GVNFlagSet* accumulated_first_time_depends,
1514  GVNFlagSet* accumulated_first_time_changes);
1515  bool AllowCodeMotion();
1516  bool ShouldMove(HInstruction* instr, HBasicBlock* loop_header);
1517 
1518  HGraph* graph() { return graph_; }
1519  CompilationInfo* info() { return info_; }
1520  Zone* zone() const { return graph_->zone(); }
1521 
1522  HGraph* graph_;
1523  CompilationInfo* info_;
1524  bool removed_side_effects_;
1525 
1526  // A map of block IDs to their side effects.
1527  ZoneList<GVNFlagSet> block_side_effects_;
1528 
1529  // A map of loop header block IDs to their loop's side effects.
1530  ZoneList<GVNFlagSet> loop_side_effects_;
1531 
1532  // Used when collecting side effects on paths from dominator to
1533  // dominated.
1534  SparseSet visited_on_paths_;
1535 };
1536 
1537 
1538 bool HGlobalValueNumberer::Analyze() {
1539  removed_side_effects_ = false;
1540  ComputeBlockSideEffects();
1541  if (FLAG_loop_invariant_code_motion) {
1542  LoopInvariantCodeMotion();
1543  }
1544  AnalyzeGraph();
1545  return removed_side_effects_;
1546 }
1547 
1548 
1549 void HGlobalValueNumberer::ComputeBlockSideEffects() {
1550  // The Analyze phase of GVN can be called multiple times. Clear loop side
1551  // effects before computing them to erase the contents from previous Analyze
1552  // passes.
1553  for (int i = 0; i < loop_side_effects_.length(); ++i) {
1554  loop_side_effects_[i].RemoveAll();
1555  }
1556  for (int i = graph_->blocks()->length() - 1; i >= 0; --i) {
1557  // Compute side effects for the block.
1558  HBasicBlock* block = graph_->blocks()->at(i);
1559  HInstruction* instr = block->first();
1560  int id = block->block_id();
1561  GVNFlagSet side_effects;
1562  while (instr != NULL) {
1563  side_effects.Add(instr->ChangesFlags());
1564  if (instr->IsSoftDeoptimize()) {
1565  block_side_effects_[id].RemoveAll();
1566  side_effects.RemoveAll();
1567  break;
1568  }
1569  instr = instr->next();
1570  }
1571  block_side_effects_[id].Add(side_effects);
1572 
1573  // Loop headers are part of their loop.
1574  if (block->IsLoopHeader()) {
1575  loop_side_effects_[id].Add(side_effects);
1576  }
1577 
1578  // Propagate loop side effects upwards.
1579  if (block->HasParentLoopHeader()) {
1580  int header_id = block->parent_loop_header()->block_id();
1581  loop_side_effects_[header_id].Add(block->IsLoopHeader()
1582  ? loop_side_effects_[id]
1583  : side_effects);
1584  }
1585  }
1586 }
1587 
1588 
1590  char underlying_buffer[kLastFlag * 128];
1591  Vector<char> buffer(underlying_buffer, sizeof(underlying_buffer));
1592 #if DEBUG
1593  int offset = 0;
1594  const char* separator = "";
1595  const char* comma = ", ";
1596  buffer[0] = 0;
1597  uint32_t set_depends_on = 0;
1598  uint32_t set_changes = 0;
1599  for (int bit = 0; bit < kLastFlag; ++bit) {
1600  if ((flags.ToIntegral() & (1 << bit)) != 0) {
1601  if (bit % 2 == 0) {
1602  set_changes++;
1603  } else {
1604  set_depends_on++;
1605  }
1606  }
1607  }
1608  bool positive_changes = set_changes < (kLastFlag / 2);
1609  bool positive_depends_on = set_depends_on < (kLastFlag / 2);
1610  if (set_changes > 0) {
1611  if (positive_changes) {
1612  offset += OS::SNPrintF(buffer + offset, "changes [");
1613  } else {
1614  offset += OS::SNPrintF(buffer + offset, "changes all except [");
1615  }
1616  for (int bit = 0; bit < kLastFlag; ++bit) {
1617  if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_changes) {
1618  switch (static_cast<GVNFlag>(bit)) {
1619 #define DECLARE_FLAG(type) \
1620  case kChanges##type: \
1621  offset += OS::SNPrintF(buffer + offset, separator); \
1622  offset += OS::SNPrintF(buffer + offset, #type); \
1623  separator = comma; \
1624  break;
1627 #undef DECLARE_FLAG
1628  default:
1629  break;
1630  }
1631  }
1632  }
1633  offset += OS::SNPrintF(buffer + offset, "]");
1634  }
1635  if (set_depends_on > 0) {
1636  separator = "";
1637  if (set_changes > 0) {
1638  offset += OS::SNPrintF(buffer + offset, ", ");
1639  }
1640  if (positive_depends_on) {
1641  offset += OS::SNPrintF(buffer + offset, "depends on [");
1642  } else {
1643  offset += OS::SNPrintF(buffer + offset, "depends on all except [");
1644  }
1645  for (int bit = 0; bit < kLastFlag; ++bit) {
1646  if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_depends_on) {
1647  switch (static_cast<GVNFlag>(bit)) {
1648 #define DECLARE_FLAG(type) \
1649  case kDependsOn##type: \
1650  offset += OS::SNPrintF(buffer + offset, separator); \
1651  offset += OS::SNPrintF(buffer + offset, #type); \
1652  separator = comma; \
1653  break;
1656 #undef DECLARE_FLAG
1657  default:
1658  break;
1659  }
1660  }
1661  }
1662  offset += OS::SNPrintF(buffer + offset, "]");
1663  }
1664 #else
1665  OS::SNPrintF(buffer, "0x%08X", flags.ToIntegral());
1666 #endif
1667  size_t string_len = strlen(underlying_buffer) + 1;
1668  ASSERT(string_len <= sizeof(underlying_buffer));
1669  char* result = new char[strlen(underlying_buffer) + 1];
1670  memcpy(result, underlying_buffer, string_len);
1671  return SmartArrayPointer<char>(result);
1672 }
1673 
1674 
1675 void HGlobalValueNumberer::LoopInvariantCodeMotion() {
1676  for (int i = graph_->blocks()->length() - 1; i >= 0; --i) {
1677  HBasicBlock* block = graph_->blocks()->at(i);
1678  if (block->IsLoopHeader()) {
1679  GVNFlagSet side_effects = loop_side_effects_[block->block_id()];
1680  TRACE_GVN_2("Try loop invariant motion for block B%d %s\n",
1681  block->block_id(),
1682  *GetGVNFlagsString(side_effects));
1683 
1684  GVNFlagSet accumulated_first_time_depends;
1685  GVNFlagSet accumulated_first_time_changes;
1686  HBasicBlock* last = block->loop_information()->GetLastBackEdge();
1687  for (int j = block->block_id(); j <= last->block_id(); ++j) {
1688  ProcessLoopBlock(graph_->blocks()->at(j), block, side_effects,
1689  &accumulated_first_time_depends,
1690  &accumulated_first_time_changes);
1691  }
1692  }
1693  }
1694 }
1695 
1696 
1697 void HGlobalValueNumberer::ProcessLoopBlock(
1698  HBasicBlock* block,
1699  HBasicBlock* loop_header,
1700  GVNFlagSet loop_kills,
1701  GVNFlagSet* first_time_depends,
1702  GVNFlagSet* first_time_changes) {
1703  HBasicBlock* pre_header = loop_header->predecessors()->at(0);
1704  GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills);
1705  TRACE_GVN_2("Loop invariant motion for B%d %s\n",
1706  block->block_id(),
1707  *GetGVNFlagsString(depends_flags));
1708  HInstruction* instr = block->first();
1709  while (instr != NULL) {
1710  HInstruction* next = instr->next();
1711  bool hoisted = false;
1712  if (instr->CheckFlag(HValue::kUseGVN)) {
1713  TRACE_GVN_4("Checking instruction %d (%s) %s. Loop %s\n",
1714  instr->id(),
1715  instr->Mnemonic(),
1716  *GetGVNFlagsString(instr->gvn_flags()),
1717  *GetGVNFlagsString(loop_kills));
1718  bool can_hoist = !instr->gvn_flags().ContainsAnyOf(depends_flags);
1719  if (instr->IsTransitionElementsKind()) {
1720  // It's possible to hoist transitions out of a loop as long as the
1721  // hoisting wouldn't move the transition past an instruction that has a
1722  // DependsOn flag for anything it changes.
1723  GVNFlagSet hoist_depends_blockers =
1724  HValue::ConvertChangesToDependsFlags(instr->ChangesFlags());
1725 
1726  // In addition, the transition must not be hoisted above elements kind
1727  // changes, or if the transition is destructive to the elements buffer,
1728  // changes to array pointer or array contents.
1729  GVNFlagSet hoist_change_blockers;
1730  hoist_change_blockers.Add(kChangesElementsKind);
1731  HTransitionElementsKind* trans = HTransitionElementsKind::cast(instr);
1732  if (trans->original_map()->has_fast_double_elements()) {
1733  hoist_change_blockers.Add(kChangesElementsPointer);
1734  hoist_change_blockers.Add(kChangesDoubleArrayElements);
1735  }
1736  if (trans->transitioned_map()->has_fast_double_elements()) {
1737  hoist_change_blockers.Add(kChangesElementsPointer);
1738  hoist_change_blockers.Add(kChangesArrayElements);
1739  }
1740  if (FLAG_trace_gvn) {
1741  GVNFlagSet hoist_blockers = hoist_depends_blockers;
1742  hoist_blockers.Add(hoist_change_blockers);
1743  GVNFlagSet first_time = *first_time_changes;
1744  first_time.Add(*first_time_depends);
1745  TRACE_GVN_4("Checking dependencies on HTransitionElementsKind "
1746  "%d (%s) hoist blockers: %s; "
1747  "first-time accumulated: %s\n",
1748  instr->id(),
1749  instr->Mnemonic(),
1750  *GetGVNFlagsString(hoist_blockers),
1751  *GetGVNFlagsString(first_time));
1752  }
1753  // It's possible to hoist transition from the current loop loop only if
1754  // they dominate all of the successor blocks in the same loop and there
1755  // are not any instructions that have Changes/DependsOn that intervene
1756  // between it and the beginning of the loop header.
1757  bool in_nested_loop = block != loop_header &&
1758  ((block->parent_loop_header() != loop_header) ||
1759  block->IsLoopHeader());
1760  can_hoist = !in_nested_loop &&
1761  block->IsLoopSuccessorDominator() &&
1762  !first_time_depends->ContainsAnyOf(hoist_depends_blockers) &&
1763  !first_time_changes->ContainsAnyOf(hoist_change_blockers);
1764  }
1765 
1766  if (can_hoist) {
1767  bool inputs_loop_invariant = true;
1768  for (int i = 0; i < instr->OperandCount(); ++i) {
1769  if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) {
1770  inputs_loop_invariant = false;
1771  }
1772  }
1773 
1774  if (inputs_loop_invariant && ShouldMove(instr, loop_header)) {
1775  TRACE_GVN_1("Hoisting loop invariant instruction %d\n", instr->id());
1776  // Move the instruction out of the loop.
1777  instr->Unlink();
1778  instr->InsertBefore(pre_header->end());
1779  if (instr->HasSideEffects()) removed_side_effects_ = true;
1780  hoisted = true;
1781  }
1782  }
1783  }
1784  if (!hoisted) {
1785  // If an instruction is not hoisted, we have to account for its side
1786  // effects when hoisting later HTransitionElementsKind instructions.
1787  GVNFlagSet previous_depends = *first_time_depends;
1788  GVNFlagSet previous_changes = *first_time_changes;
1789  first_time_depends->Add(instr->DependsOnFlags());
1790  first_time_changes->Add(instr->ChangesFlags());
1791  if (!(previous_depends == *first_time_depends)) {
1792  TRACE_GVN_1("Updated first-time accumulated %s\n",
1793  *GetGVNFlagsString(*first_time_depends));
1794  }
1795  if (!(previous_changes == *first_time_changes)) {
1796  TRACE_GVN_1("Updated first-time accumulated %s\n",
1797  *GetGVNFlagsString(*first_time_changes));
1798  }
1799  }
1800  instr = next;
1801  }
1802 }
1803 
1804 
1805 bool HGlobalValueNumberer::AllowCodeMotion() {
1806  return info()->shared_info()->opt_count() + 1 < Compiler::kDefaultMaxOptCount;
1807 }
1808 
1809 
1810 bool HGlobalValueNumberer::ShouldMove(HInstruction* instr,
1811  HBasicBlock* loop_header) {
1812  // If we've disabled code motion or we're in a block that unconditionally
1813  // deoptimizes, don't move any instructions.
1814  return AllowCodeMotion() && !instr->block()->IsDeoptimizing();
1815 }
1816 
1817 
1818 GVNFlagSet HGlobalValueNumberer::CollectSideEffectsOnPathsToDominatedBlock(
1819  HBasicBlock* dominator, HBasicBlock* dominated) {
1820  GVNFlagSet side_effects;
1821  for (int i = 0; i < dominated->predecessors()->length(); ++i) {
1822  HBasicBlock* block = dominated->predecessors()->at(i);
1823  if (dominator->block_id() < block->block_id() &&
1824  block->block_id() < dominated->block_id() &&
1825  visited_on_paths_.Add(block->block_id())) {
1826  side_effects.Add(block_side_effects_[block->block_id()]);
1827  if (block->IsLoopHeader()) {
1828  side_effects.Add(loop_side_effects_[block->block_id()]);
1829  }
1830  side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock(
1831  dominator, block));
1832  }
1833  }
1834  return side_effects;
1835 }
1836 
1837 
1838 // Each instance of this class is like a "stack frame" for the recursive
1839 // traversal of the dominator tree done during GVN (the stack is handled
1840 // as a double linked list).
1841 // We reuse frames when possible so the list length is limited by the depth
1842 // of the dominator tree but this forces us to initialize each frame calling
1843 // an explicit "Initialize" method instead of a using constructor.
1845  public:
1847  HBasicBlock* entry_block,
1848  HValueMap* entry_map) {
1849  return new(zone)
1850  GvnBasicBlockState(NULL, entry_block, entry_map, NULL, zone);
1851  }
1852 
1853  HBasicBlock* block() { return block_; }
1854  HValueMap* map() { return map_; }
1855  HSideEffectMap* dominators() { return &dominators_; }
1856 
1858  Zone* zone,
1859  HBasicBlock** dominator) {
1860  // This assignment needs to happen before calling next_dominated() because
1861  // that call can reuse "this" if we are at the last dominated block.
1862  *dominator = block();
1863  GvnBasicBlockState* result = next_dominated(zone);
1864  if (result == NULL) {
1865  GvnBasicBlockState* dominator_state = pop();
1866  if (dominator_state != NULL) {
1867  // This branch is guaranteed not to return NULL because pop() never
1868  // returns a state where "is_done() == true".
1869  *dominator = dominator_state->block();
1870  result = dominator_state->next_dominated(zone);
1871  } else {
1872  // Unnecessary (we are returning NULL) but done for cleanness.
1873  *dominator = NULL;
1874  }
1875  }
1876  return result;
1877  }
1878 
1879  private:
1880  void Initialize(HBasicBlock* block,
1881  HValueMap* map,
1882  HSideEffectMap* dominators,
1883  bool copy_map,
1884  Zone* zone) {
1885  block_ = block;
1886  map_ = copy_map ? map->Copy(zone) : map;
1887  dominated_index_ = -1;
1888  length_ = block->dominated_blocks()->length();
1889  if (dominators != NULL) {
1890  dominators_ = *dominators;
1891  }
1892  }
1893  bool is_done() { return dominated_index_ >= length_; }
1894 
1895  GvnBasicBlockState(GvnBasicBlockState* previous,
1896  HBasicBlock* block,
1897  HValueMap* map,
1898  HSideEffectMap* dominators,
1899  Zone* zone)
1900  : previous_(previous), next_(NULL) {
1901  Initialize(block, map, dominators, true, zone);
1902  }
1903 
1904  GvnBasicBlockState* next_dominated(Zone* zone) {
1905  dominated_index_++;
1906  if (dominated_index_ == length_ - 1) {
1907  // No need to copy the map for the last child in the dominator tree.
1908  Initialize(block_->dominated_blocks()->at(dominated_index_),
1909  map(),
1910  dominators(),
1911  false,
1912  zone);
1913  return this;
1914  } else if (dominated_index_ < length_) {
1915  return push(zone,
1916  block_->dominated_blocks()->at(dominated_index_),
1917  dominators());
1918  } else {
1919  return NULL;
1920  }
1921  }
1922 
1923  GvnBasicBlockState* push(Zone* zone,
1924  HBasicBlock* block,
1925  HSideEffectMap* dominators) {
1926  if (next_ == NULL) {
1927  next_ =
1928  new(zone) GvnBasicBlockState(this, block, map(), dominators, zone);
1929  } else {
1930  next_->Initialize(block, map(), dominators, true, zone);
1931  }
1932  return next_;
1933  }
1934  GvnBasicBlockState* pop() {
1935  GvnBasicBlockState* result = previous_;
1936  while (result != NULL && result->is_done()) {
1937  TRACE_GVN_2("Backtracking from block B%d to block b%d\n",
1938  block()->block_id(),
1939  previous_->block()->block_id())
1940  result = result->previous_;
1941  }
1942  return result;
1943  }
1944 
1945  GvnBasicBlockState* previous_;
1946  GvnBasicBlockState* next_;
1947  HBasicBlock* block_;
1948  HValueMap* map_;
1949  HSideEffectMap dominators_;
1950  int dominated_index_;
1951  int length_;
1952 };
1953 
1954 // This is a recursive traversal of the dominator tree but it has been turned
1955 // into a loop to avoid stack overflows.
1956 // The logical "stack frames" of the recursion are kept in a list of
1957 // GvnBasicBlockState instances.
1958 void HGlobalValueNumberer::AnalyzeGraph() {
1959  HBasicBlock* entry_block = graph_->entry_block();
1960  HValueMap* entry_map = new(zone()) HValueMap(zone());
1961  GvnBasicBlockState* current =
1962  GvnBasicBlockState::CreateEntry(zone(), entry_block, entry_map);
1963 
1964  while (current != NULL) {
1965  HBasicBlock* block = current->block();
1966  HValueMap* map = current->map();
1967  HSideEffectMap* dominators = current->dominators();
1968 
1969  TRACE_GVN_2("Analyzing block B%d%s\n",
1970  block->block_id(),
1971  block->IsLoopHeader() ? " (loop header)" : "");
1972 
1973  // If this is a loop header kill everything killed by the loop.
1974  if (block->IsLoopHeader()) {
1975  map->Kill(loop_side_effects_[block->block_id()]);
1976  }
1977 
1978  // Go through all instructions of the current block.
1979  HInstruction* instr = block->first();
1980  while (instr != NULL) {
1981  HInstruction* next = instr->next();
1982  GVNFlagSet flags = instr->ChangesFlags();
1983  if (!flags.IsEmpty()) {
1984  // Clear all instructions in the map that are affected by side effects.
1985  // Store instruction as the dominating one for tracked side effects.
1986  map->Kill(flags);
1987  dominators->Store(flags, instr);
1988  TRACE_GVN_2("Instruction %d %s\n", instr->id(),
1989  *GetGVNFlagsString(flags));
1990  }
1991  if (instr->CheckFlag(HValue::kUseGVN)) {
1992  ASSERT(!instr->HasObservableSideEffects());
1993  HValue* other = map->Lookup(instr);
1994  if (other != NULL) {
1995  ASSERT(instr->Equals(other) && other->Equals(instr));
1996  TRACE_GVN_4("Replacing value %d (%s) with value %d (%s)\n",
1997  instr->id(),
1998  instr->Mnemonic(),
1999  other->id(),
2000  other->Mnemonic());
2001  if (instr->HasSideEffects()) removed_side_effects_ = true;
2002  instr->DeleteAndReplaceWith(other);
2003  } else {
2004  map->Add(instr, zone());
2005  }
2006  }
2007  if (instr->CheckFlag(HValue::kTrackSideEffectDominators)) {
2008  for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
2009  HValue* other = dominators->at(i);
2010  GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
2011  GVNFlag depends_on_flag = HValue::DependsOnFlagFromInt(i);
2012  if (instr->DependsOnFlags().Contains(depends_on_flag) &&
2013  (other != NULL)) {
2014  TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n",
2015  i,
2016  instr->id(),
2017  instr->Mnemonic(),
2018  other->id(),
2019  other->Mnemonic());
2020  instr->SetSideEffectDominator(changes_flag, other);
2021  }
2022  }
2023  }
2024  instr = next;
2025  }
2026 
2027  HBasicBlock* dominator_block;
2028  GvnBasicBlockState* next =
2029  current->next_in_dominator_tree_traversal(zone(), &dominator_block);
2030 
2031  if (next != NULL) {
2032  HBasicBlock* dominated = next->block();
2033  HValueMap* successor_map = next->map();
2034  HSideEffectMap* successor_dominators = next->dominators();
2035 
2036  // Kill everything killed on any path between this block and the
2037  // dominated block. We don't have to traverse these paths if the
2038  // value map and the dominators list is already empty. If the range
2039  // of block ids (block_id, dominated_id) is empty there are no such
2040  // paths.
2041  if ((!successor_map->IsEmpty() || !successor_dominators->IsEmpty()) &&
2042  dominator_block->block_id() + 1 < dominated->block_id()) {
2043  visited_on_paths_.Clear();
2044  GVNFlagSet side_effects_on_all_paths =
2045  CollectSideEffectsOnPathsToDominatedBlock(dominator_block,
2046  dominated);
2047  successor_map->Kill(side_effects_on_all_paths);
2048  successor_dominators->Kill(side_effects_on_all_paths);
2049  }
2050  }
2051  current = next;
2052  }
2053 }
2054 
2055 
2056 class HInferRepresentation BASE_EMBEDDED {
2057  public:
2058  explicit HInferRepresentation(HGraph* graph)
2059  : graph_(graph),
2060  worklist_(8, graph->zone()),
2061  in_worklist_(graph->GetMaximumValueID(), graph->zone()) { }
2062 
2063  void Analyze();
2064 
2065  private:
2066  Representation TryChange(HValue* current);
2067  void AddToWorklist(HValue* current);
2068  void InferBasedOnInputs(HValue* current);
2069  void AddDependantsToWorklist(HValue* current);
2070  void InferBasedOnUses(HValue* current);
2071 
2072  Zone* zone() const { return graph_->zone(); }
2073 
2074  HGraph* graph_;
2075  ZoneList<HValue*> worklist_;
2076  BitVector in_worklist_;
2077 };
2078 
2079 
2080 void HInferRepresentation::AddToWorklist(HValue* current) {
2081  if (current->representation().IsSpecialization()) return;
2082  if (!current->CheckFlag(HValue::kFlexibleRepresentation)) return;
2083  if (in_worklist_.Contains(current->id())) return;
2084  worklist_.Add(current, zone());
2085  in_worklist_.Add(current->id());
2086 }
2087 
2088 
2089 // This method tries to specialize the representation type of the value
2090 // given as a parameter. The value is asked to infer its representation type
2091 // based on its inputs. If the inferred type is more specialized, then this
2092 // becomes the new representation type of the node.
2093 void HInferRepresentation::InferBasedOnInputs(HValue* current) {
2094  Representation r = current->representation();
2095  if (r.IsSpecialization()) return;
2096  ASSERT(current->CheckFlag(HValue::kFlexibleRepresentation));
2097  Representation inferred = current->InferredRepresentation();
2098  if (inferred.IsSpecialization()) {
2099  if (FLAG_trace_representation) {
2100  PrintF("Changing #%d representation %s -> %s based on inputs\n",
2101  current->id(),
2102  r.Mnemonic(),
2103  inferred.Mnemonic());
2104  }
2105  current->ChangeRepresentation(inferred);
2106  AddDependantsToWorklist(current);
2107  }
2108 }
2109 
2110 
2111 void HInferRepresentation::AddDependantsToWorklist(HValue* value) {
2112  for (HUseIterator it(value->uses()); !it.Done(); it.Advance()) {
2113  AddToWorklist(it.value());
2114  }
2115  for (int i = 0; i < value->OperandCount(); ++i) {
2116  AddToWorklist(value->OperandAt(i));
2117  }
2118 }
2119 
2120 
2121 // This method calculates whether specializing the representation of the value
2122 // given as the parameter has a benefit in terms of less necessary type
2123 // conversions. If there is a benefit, then the representation of the value is
2124 // specialized.
2125 void HInferRepresentation::InferBasedOnUses(HValue* value) {
2126  Representation r = value->representation();
2127  if (r.IsSpecialization() || value->HasNoUses()) return;
2128  ASSERT(value->CheckFlag(HValue::kFlexibleRepresentation));
2129  Representation new_rep = TryChange(value);
2130  if (!new_rep.IsNone()) {
2131  if (!value->representation().Equals(new_rep)) {
2132  if (FLAG_trace_representation) {
2133  PrintF("Changing #%d representation %s -> %s based on uses\n",
2134  value->id(),
2135  r.Mnemonic(),
2136  new_rep.Mnemonic());
2137  }
2138  value->ChangeRepresentation(new_rep);
2139  AddDependantsToWorklist(value);
2140  }
2141  }
2142 }
2143 
2144 
2145 Representation HInferRepresentation::TryChange(HValue* value) {
2146  // Array of use counts for each representation.
2147  int use_count[Representation::kNumRepresentations] = { 0 };
2148 
2149  for (HUseIterator it(value->uses()); !it.Done(); it.Advance()) {
2150  HValue* use = it.value();
2151  Representation rep = use->ObservedInputRepresentation(it.index());
2152  if (rep.IsNone()) continue;
2153  if (FLAG_trace_representation) {
2154  PrintF("%d %s is used by %d %s as %s\n",
2155  value->id(),
2156  value->Mnemonic(),
2157  use->id(),
2158  use->Mnemonic(),
2159  rep.Mnemonic());
2160  }
2161  if (use->IsPhi()) HPhi::cast(use)->AddIndirectUsesTo(&use_count[0]);
2162  use_count[rep.kind()] += use->LoopWeight();
2163  }
2164  int tagged_count = use_count[Representation::kTagged];
2165  int double_count = use_count[Representation::kDouble];
2166  int int32_count = use_count[Representation::kInteger32];
2167  int non_tagged_count = double_count + int32_count;
2168 
2169  // If a non-loop phi has tagged uses, don't convert it to untagged.
2170  if (value->IsPhi() && !value->block()->IsLoopHeader() && tagged_count > 0) {
2171  return Representation::None();
2172  }
2173 
2174  // Prefer unboxing over boxing, the latter is more expensive.
2175  if (tagged_count > non_tagged_count) return Representation::None();
2176 
2177  // Prefer Integer32 over Double, if possible.
2178  if (int32_count > 0 && value->IsConvertibleToInteger()) {
2179  return Representation::Integer32();
2180  }
2181 
2182  if (double_count > 0) return Representation::Double();
2183 
2184  return Representation::None();
2185 }
2186 
2187 
2188 void HInferRepresentation::Analyze() {
2189  HPhase phase("H_Infer representations", graph_);
2190 
2191  // (1) Initialize bit vectors and count real uses. Each phi gets a
2192  // bit-vector of length <number of phis>.
2193  const ZoneList<HPhi*>* phi_list = graph_->phi_list();
2194  int phi_count = phi_list->length();
2195  ZoneList<BitVector*> connected_phis(phi_count, graph_->zone());
2196  for (int i = 0; i < phi_count; ++i) {
2197  phi_list->at(i)->InitRealUses(i);
2198  BitVector* connected_set = new(zone()) BitVector(phi_count, graph_->zone());
2199  connected_set->Add(i);
2200  connected_phis.Add(connected_set, zone());
2201  }
2202 
2203  // (2) Do a fixed point iteration to find the set of connected phis. A
2204  // phi is connected to another phi if its value is used either directly or
2205  // indirectly through a transitive closure of the def-use relation.
2206  bool change = true;
2207  while (change) {
2208  change = false;
2209  // We normally have far more "forward edges" than "backward edges",
2210  // so we terminate faster when we walk backwards.
2211  for (int i = phi_count - 1; i >= 0; --i) {
2212  HPhi* phi = phi_list->at(i);
2213  for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
2214  HValue* use = it.value();
2215  if (use->IsPhi()) {
2216  int id = HPhi::cast(use)->phi_id();
2217  if (connected_phis[i]->UnionIsChanged(*connected_phis[id]))
2218  change = true;
2219  }
2220  }
2221  }
2222  }
2223 
2224  // (3a) Use the phi reachability information from step 2 to
2225  // push information about values which can't be converted to integer
2226  // without deoptimization through the phi use-def chains, avoiding
2227  // unnecessary deoptimizations later.
2228  for (int i = 0; i < phi_count; ++i) {
2229  HPhi* phi = phi_list->at(i);
2230  bool cti = phi->AllOperandsConvertibleToInteger();
2231  if (cti) continue;
2232 
2233  for (BitVector::Iterator it(connected_phis.at(i));
2234  !it.Done();
2235  it.Advance()) {
2236  HPhi* phi = phi_list->at(it.Current());
2237  phi->set_is_convertible_to_integer(false);
2238  phi->ResetInteger32Uses();
2239  }
2240  }
2241 
2242  // (3b) Use the phi reachability information from step 2 to
2243  // sum up the non-phi use counts of all connected phis.
2244  for (int i = 0; i < phi_count; ++i) {
2245  HPhi* phi = phi_list->at(i);
2246  for (BitVector::Iterator it(connected_phis.at(i));
2247  !it.Done();
2248  it.Advance()) {
2249  int index = it.Current();
2250  HPhi* it_use = phi_list->at(index);
2251  if (index != i) phi->AddNonPhiUsesFrom(it_use); // Don't count twice.
2252  }
2253  }
2254 
2255  // Initialize work list
2256  for (int i = 0; i < graph_->blocks()->length(); ++i) {
2257  HBasicBlock* block = graph_->blocks()->at(i);
2258  const ZoneList<HPhi*>* phis = block->phis();
2259  for (int j = 0; j < phis->length(); ++j) {
2260  AddToWorklist(phis->at(j));
2261  }
2262 
2263  HInstruction* current = block->first();
2264  while (current != NULL) {
2265  AddToWorklist(current);
2266  current = current->next();
2267  }
2268  }
2269 
2270  // Do a fixed point iteration, trying to improve representations
2271  while (!worklist_.is_empty()) {
2272  HValue* current = worklist_.RemoveLast();
2273  in_worklist_.Remove(current->id());
2274  InferBasedOnInputs(current);
2275  InferBasedOnUses(current);
2276  }
2277 }
2278 
2279 
2281  HPhase phase("H_Inferring types", this);
2282  InitializeInferredTypes(0, this->blocks_.length() - 1);
2283 }
2284 
2285 
2286 void HGraph::InitializeInferredTypes(int from_inclusive, int to_inclusive) {
2287  for (int i = from_inclusive; i <= to_inclusive; ++i) {
2288  HBasicBlock* block = blocks_[i];
2289 
2290  const ZoneList<HPhi*>* phis = block->phis();
2291  for (int j = 0; j < phis->length(); j++) {
2292  phis->at(j)->UpdateInferredType();
2293  }
2294 
2295  HInstruction* current = block->first();
2296  while (current != NULL) {
2297  current->UpdateInferredType();
2298  current = current->next();
2299  }
2300 
2301  if (block->IsLoopHeader()) {
2302  HBasicBlock* last_back_edge =
2303  block->loop_information()->GetLastBackEdge();
2304  InitializeInferredTypes(i + 1, last_back_edge->block_id());
2305  // Skip all blocks already processed by the recursive call.
2306  i = last_back_edge->block_id();
2307  // Update phis of the loop header now after the whole loop body is
2308  // guaranteed to be processed.
2309  ZoneList<HValue*> worklist(block->phis()->length(), zone());
2310  for (int j = 0; j < block->phis()->length(); ++j) {
2311  worklist.Add(block->phis()->at(j), zone());
2312  }
2313  InferTypes(&worklist);
2314  }
2315  }
2316 }
2317 
2318 
2319 void HGraph::PropagateMinusZeroChecks(HValue* value, BitVector* visited) {
2320  HValue* current = value;
2321  while (current != NULL) {
2322  if (visited->Contains(current->id())) return;
2323 
2324  // For phis, we must propagate the check to all of its inputs.
2325  if (current->IsPhi()) {
2326  visited->Add(current->id());
2327  HPhi* phi = HPhi::cast(current);
2328  for (int i = 0; i < phi->OperandCount(); ++i) {
2329  PropagateMinusZeroChecks(phi->OperandAt(i), visited);
2330  }
2331  break;
2332  }
2333 
2334  // For multiplication and division, we must propagate to the left and
2335  // the right side.
2336  if (current->IsMul()) {
2337  HMul* mul = HMul::cast(current);
2338  mul->EnsureAndPropagateNotMinusZero(visited);
2339  PropagateMinusZeroChecks(mul->left(), visited);
2340  PropagateMinusZeroChecks(mul->right(), visited);
2341  } else if (current->IsDiv()) {
2342  HDiv* div = HDiv::cast(current);
2343  div->EnsureAndPropagateNotMinusZero(visited);
2344  PropagateMinusZeroChecks(div->left(), visited);
2345  PropagateMinusZeroChecks(div->right(), visited);
2346  }
2347 
2348  current = current->EnsureAndPropagateNotMinusZero(visited);
2349  }
2350 }
2351 
2352 
2353 void HGraph::InsertRepresentationChangeForUse(HValue* value,
2354  HValue* use_value,
2355  int use_index,
2356  Representation to) {
2357  // Insert the representation change right before its use. For phi-uses we
2358  // insert at the end of the corresponding predecessor.
2359  HInstruction* next = NULL;
2360  if (use_value->IsPhi()) {
2361  next = use_value->block()->predecessors()->at(use_index)->end();
2362  } else {
2363  next = HInstruction::cast(use_value);
2364  }
2365 
2366  // For constants we try to make the representation change at compile
2367  // time. When a representation change is not possible without loss of
2368  // information we treat constants like normal instructions and insert the
2369  // change instructions for them.
2370  HInstruction* new_value = NULL;
2371  bool is_truncating = use_value->CheckFlag(HValue::kTruncatingToInt32);
2372  bool deoptimize_on_undefined =
2373  use_value->CheckFlag(HValue::kDeoptimizeOnUndefined);
2374  if (value->IsConstant()) {
2375  HConstant* constant = HConstant::cast(value);
2376  // Try to create a new copy of the constant with the new representation.
2377  new_value = is_truncating
2378  ? constant->CopyToTruncatedInt32(zone())
2379  : constant->CopyToRepresentation(to, zone());
2380  }
2381 
2382  if (new_value == NULL) {
2383  new_value = new(zone()) HChange(value, to,
2384  is_truncating, deoptimize_on_undefined);
2385  }
2386 
2387  new_value->InsertBefore(next);
2388  use_value->SetOperandAt(use_index, new_value);
2389 }
2390 
2391 
2392 void HGraph::InsertRepresentationChangesForValue(HValue* value) {
2393  Representation r = value->representation();
2394  if (r.IsNone()) return;
2395  if (value->HasNoUses()) return;
2396 
2397  for (HUseIterator it(value->uses()); !it.Done(); it.Advance()) {
2398  HValue* use_value = it.value();
2399  int use_index = it.index();
2400  Representation req = use_value->RequiredInputRepresentation(use_index);
2401  if (req.IsNone() || req.Equals(r)) continue;
2402  InsertRepresentationChangeForUse(value, use_value, use_index, req);
2403  }
2404  if (value->HasNoUses()) {
2405  ASSERT(value->IsConstant());
2406  value->DeleteAndReplaceWith(NULL);
2407  }
2408 
2409  // The only purpose of a HForceRepresentation is to represent the value
2410  // after the (possible) HChange instruction. We make it disappear.
2411  if (value->IsForceRepresentation()) {
2412  value->DeleteAndReplaceWith(HForceRepresentation::cast(value)->value());
2413  }
2414 }
2415 
2416 
2418  HPhase phase("H_Representation changes", this);
2419 
2420  // Compute truncation flag for phis: Initially assume that all
2421  // int32-phis allow truncation and iteratively remove the ones that
2422  // are used in an operation that does not allow a truncating
2423  // conversion.
2424  // TODO(fschneider): Replace this with a worklist-based iteration.
2425  for (int i = 0; i < phi_list()->length(); i++) {
2426  HPhi* phi = phi_list()->at(i);
2427  if (phi->representation().IsInteger32()) {
2429  }
2430  }
2431  bool change = true;
2432  while (change) {
2433  change = false;
2434  for (int i = 0; i < phi_list()->length(); i++) {
2435  HPhi* phi = phi_list()->at(i);
2436  if (!phi->CheckFlag(HValue::kTruncatingToInt32)) continue;
2439  change = true;
2440  }
2441  }
2442  }
2443 
2444  for (int i = 0; i < blocks_.length(); ++i) {
2445  // Process phi instructions first.
2446  const ZoneList<HPhi*>* phis = blocks_[i]->phis();
2447  for (int j = 0; j < phis->length(); j++) {
2448  InsertRepresentationChangesForValue(phis->at(j));
2449  }
2450 
2451  // Process normal instructions.
2452  HInstruction* current = blocks_[i]->first();
2453  while (current != NULL) {
2454  InsertRepresentationChangesForValue(current);
2455  current = current->next();
2456  }
2457  }
2458 }
2459 
2460 
2461 void HGraph::RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi* phi) {
2462  if (phi->CheckFlag(HValue::kDeoptimizeOnUndefined)) return;
2464  for (int i = 0; i < phi->OperandCount(); ++i) {
2465  HValue* input = phi->OperandAt(i);
2466  if (input->IsPhi()) {
2467  RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi::cast(input));
2468  }
2469  }
2470 }
2471 
2472 
2474  HPhase phase("H_MarkDeoptimizeOnUndefined", this);
2475  // Compute DeoptimizeOnUndefined flag for phis.
2476  // Any phi that can reach a use with DeoptimizeOnUndefined set must
2477  // have DeoptimizeOnUndefined set. Currently only HCompareIDAndBranch, with
2478  // double input representation, has this flag set.
2479  // The flag is used by HChange tagged->double, which must deoptimize
2480  // if one of its uses has this flag set.
2481  for (int i = 0; i < phi_list()->length(); i++) {
2482  HPhi* phi = phi_list()->at(i);
2483  if (phi->representation().IsDouble()) {
2484  for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
2485  if (it.value()->CheckFlag(HValue::kDeoptimizeOnUndefined)) {
2486  RecursivelyMarkPhiDeoptimizeOnUndefined(phi);
2487  break;
2488  }
2489  }
2490  }
2491  }
2492 }
2493 
2494 
2496  BitVector visited(GetMaximumValueID(), zone());
2497  for (int i = 0; i < blocks_.length(); ++i) {
2498  for (HInstruction* current = blocks_[i]->first();
2499  current != NULL;
2500  current = current->next()) {
2501  if (current->IsChange()) {
2502  HChange* change = HChange::cast(current);
2503  // Propagate flags for negative zero checks upwards from conversions
2504  // int32-to-tagged and int32-to-double.
2505  Representation from = change->value()->representation();
2506  ASSERT(from.Equals(change->from()));
2507  if (from.IsInteger32()) {
2508  ASSERT(change->to().IsTagged() || change->to().IsDouble());
2509  ASSERT(visited.IsEmpty());
2510  PropagateMinusZeroChecks(change->value(), &visited);
2511  visited.Clear();
2512  }
2513  }
2514  }
2515  }
2516 }
2517 
2518 
2519 // Implementation of utility class to encapsulate the translation state for
2520 // a (possibly inlined) function.
2522  CompilationInfo* info,
2523  TypeFeedbackOracle* oracle,
2524  ReturnHandlingFlag return_handling)
2525  : owner_(owner),
2526  compilation_info_(info),
2527  oracle_(oracle),
2528  call_context_(NULL),
2529  return_handling_(return_handling),
2530  function_return_(NULL),
2531  test_context_(NULL),
2532  entry_(NULL),
2533  arguments_elements_(NULL),
2534  outer_(owner->function_state()) {
2535  if (outer_ != NULL) {
2536  // State for an inline function.
2537  if (owner->ast_context()->IsTest()) {
2538  HBasicBlock* if_true = owner->graph()->CreateBasicBlock();
2539  HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
2540  if_true->MarkAsInlineReturnTarget();
2541  if_false->MarkAsInlineReturnTarget();
2542  Expression* cond = TestContext::cast(owner->ast_context())->condition();
2543  // The AstContext constructor pushed on the context stack. This newed
2544  // instance is the reason that AstContext can't be BASE_EMBEDDED.
2545  test_context_ = new TestContext(owner, cond, if_true, if_false);
2546  } else {
2547  function_return_ = owner->graph()->CreateBasicBlock();
2549  }
2550  // Set this after possibly allocating a new TestContext above.
2551  call_context_ = owner->ast_context();
2552  }
2553 
2554  // Push on the state stack.
2555  owner->set_function_state(this);
2556 }
2557 
2558 
2560  delete test_context_;
2561  owner_->set_function_state(outer_);
2562 }
2563 
2564 
2565 // Implementation of utility classes to represent an expression's context in
2566 // the AST.
2568  : owner_(owner),
2569  kind_(kind),
2570  outer_(owner->ast_context()),
2571  for_typeof_(false) {
2572  owner->set_ast_context(this); // Push.
2573 #ifdef DEBUG
2574  ASSERT(owner->environment()->frame_type() == JS_FUNCTION);
2575  original_length_ = owner->environment()->length();
2576 #endif
2577 }
2578 
2579 
2581  owner_->set_ast_context(outer_); // Pop.
2582 }
2583 
2584 
2586  ASSERT(owner()->HasStackOverflow() ||
2587  owner()->current_block() == NULL ||
2588  (owner()->environment()->length() == original_length_ &&
2589  owner()->environment()->frame_type() == JS_FUNCTION));
2590 }
2591 
2592 
2594  ASSERT(owner()->HasStackOverflow() ||
2595  owner()->current_block() == NULL ||
2596  (owner()->environment()->length() == original_length_ + 1 &&
2597  owner()->environment()->frame_type() == JS_FUNCTION));
2598 }
2599 
2600 
2602  // The value is simply ignored.
2603 }
2604 
2605 
2607  // The value is tracked in the bailout environment, and communicated
2608  // through the environment as the result of the expression.
2609  if (!arguments_allowed() && value->CheckFlag(HValue::kIsArguments)) {
2610  owner()->Bailout("bad value context for arguments value");
2611  }
2612  owner()->Push(value);
2613 }
2614 
2615 
2617  BuildBranch(value);
2618 }
2619 
2620 
2622  ASSERT(!instr->IsControlInstruction());
2623  owner()->AddInstruction(instr);
2624  if (instr->HasObservableSideEffects()) owner()->AddSimulate(ast_id);
2625 }
2626 
2627 
2629  ASSERT(!instr->HasObservableSideEffects());
2630  HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
2631  HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
2632  instr->SetSuccessorAt(0, empty_true);
2633  instr->SetSuccessorAt(1, empty_false);
2634  owner()->current_block()->Finish(instr);
2635  HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id);
2636  owner()->set_current_block(join);
2637 }
2638 
2639 
2641  ASSERT(!instr->IsControlInstruction());
2642  if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
2643  return owner()->Bailout("bad value context for arguments object value");
2644  }
2645  owner()->AddInstruction(instr);
2646  owner()->Push(instr);
2647  if (instr->HasObservableSideEffects()) owner()->AddSimulate(ast_id);
2648 }
2649 
2650 
2652  ASSERT(!instr->HasObservableSideEffects());
2653  if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
2654  return owner()->Bailout("bad value context for arguments object value");
2655  }
2656  HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
2657  HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
2658  instr->SetSuccessorAt(0, materialize_true);
2659  instr->SetSuccessorAt(1, materialize_false);
2660  owner()->current_block()->Finish(instr);
2661  owner()->set_current_block(materialize_true);
2662  owner()->Push(owner()->graph()->GetConstantTrue());
2663  owner()->set_current_block(materialize_false);
2664  owner()->Push(owner()->graph()->GetConstantFalse());
2665  HBasicBlock* join =
2666  owner()->CreateJoin(materialize_true, materialize_false, ast_id);
2667  owner()->set_current_block(join);
2668 }
2669 
2670 
2672  ASSERT(!instr->IsControlInstruction());
2673  HGraphBuilder* builder = owner();
2674  builder->AddInstruction(instr);
2675  // We expect a simulate after every expression with side effects, though
2676  // this one isn't actually needed (and wouldn't work if it were targeted).
2677  if (instr->HasObservableSideEffects()) {
2678  builder->Push(instr);
2679  builder->AddSimulate(ast_id);
2680  builder->Pop();
2681  }
2682  BuildBranch(instr);
2683 }
2684 
2685 
2687  ASSERT(!instr->HasObservableSideEffects());
2688  HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
2689  HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
2690  instr->SetSuccessorAt(0, empty_true);
2691  instr->SetSuccessorAt(1, empty_false);
2692  owner()->current_block()->Finish(instr);
2693  empty_true->Goto(if_true(), owner()->function_state());
2694  empty_false->Goto(if_false(), owner()->function_state());
2696 }
2697 
2698 
2699 void TestContext::BuildBranch(HValue* value) {
2700  // We expect the graph to be in edge-split form: there is no edge that
2701  // connects a branch node to a join node. We conservatively ensure that
2702  // property by always adding an empty block on the outgoing edges of this
2703  // branch.
2704  HGraphBuilder* builder = owner();
2705  if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
2706  builder->Bailout("arguments object value in a test context");
2707  }
2708  HBasicBlock* empty_true = builder->graph()->CreateBasicBlock();
2709  HBasicBlock* empty_false = builder->graph()->CreateBasicBlock();
2710  unsigned test_id = condition()->test_id();
2711  ToBooleanStub::Types expected(builder->oracle()->ToBooleanTypes(test_id));
2712  HBranch* test = new(zone()) HBranch(value, empty_true, empty_false, expected);
2713  builder->current_block()->Finish(test);
2714 
2715  empty_true->Goto(if_true(), owner()->function_state());
2716  empty_false->Goto(if_false(), owner()->function_state());
2717  builder->set_current_block(NULL);
2718 }
2719 
2720 
2721 // HGraphBuilder infrastructure for bailing out and checking bailouts.
2722 #define CHECK_BAILOUT(call) \
2723  do { \
2724  call; \
2725  if (HasStackOverflow()) return; \
2726  } while (false)
2727 
2728 
2729 #define CHECK_ALIVE(call) \
2730  do { \
2731  call; \
2732  if (HasStackOverflow() || current_block() == NULL) return; \
2733  } while (false)
2734 
2735 
2736 void HGraphBuilder::Bailout(const char* reason) {
2737  if (FLAG_trace_bailout) {
2739  info()->shared_info()->DebugName()->ToCString());
2740  PrintF("Bailout in HGraphBuilder: @\"%s\": %s\n", *name, reason);
2741  }
2742  SetStackOverflow();
2743 }
2744 
2745 
2746 void HGraphBuilder::VisitForEffect(Expression* expr) {
2747  EffectContext for_effect(this);
2748  Visit(expr);
2749 }
2750 
2751 
2752 void HGraphBuilder::VisitForValue(Expression* expr, ArgumentsAllowedFlag flag) {
2753  ValueContext for_value(this, flag);
2754  Visit(expr);
2755 }
2756 
2757 
2758 void HGraphBuilder::VisitForTypeOf(Expression* expr) {
2759  ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
2760  for_value.set_for_typeof(true);
2761  Visit(expr);
2762 }
2763 
2764 
2765 
2766 void HGraphBuilder::VisitForControl(Expression* expr,
2767  HBasicBlock* true_block,
2768  HBasicBlock* false_block) {
2769  TestContext for_test(this, expr, true_block, false_block);
2770  Visit(expr);
2771 }
2772 
2773 
2774 HValue* HGraphBuilder::VisitArgument(Expression* expr) {
2775  VisitForValue(expr);
2776  if (HasStackOverflow() || current_block() == NULL) return NULL;
2777  HValue* value = Pop();
2778  Push(AddInstruction(new(zone()) HPushArgument(value)));
2779  return value;
2780 }
2781 
2782 
2783 void HGraphBuilder::VisitArgumentList(ZoneList<Expression*>* arguments) {
2784  for (int i = 0; i < arguments->length(); i++) {
2785  CHECK_ALIVE(VisitArgument(arguments->at(i)));
2786  }
2787 }
2788 
2789 
2790 void HGraphBuilder::VisitExpressions(ZoneList<Expression*>* exprs) {
2791  for (int i = 0; i < exprs->length(); ++i) {
2792  CHECK_ALIVE(VisitForValue(exprs->at(i)));
2793  }
2794 }
2795 
2796 
2798  graph_ = new(zone()) HGraph(info(), zone());
2799  if (FLAG_hydrogen_stats) HStatistics::Instance()->Initialize(info());
2800 
2801  {
2802  HPhase phase("H_Block building");
2803  current_block_ = graph()->entry_block();
2804 
2805  Scope* scope = info()->scope();
2806  if (scope->HasIllegalRedeclaration()) {
2807  Bailout("function with illegal redeclaration");
2808  return NULL;
2809  }
2810  if (scope->calls_eval()) {
2811  Bailout("function calls eval");
2812  return NULL;
2813  }
2814  SetUpScope(scope);
2815 
2816  // Add an edge to the body entry. This is warty: the graph's start
2817  // environment will be used by the Lithium translation as the initial
2818  // environment on graph entry, but it has now been mutated by the
2819  // Hydrogen translation of the instructions in the start block. This
2820  // environment uses values which have not been defined yet. These
2821  // Hydrogen instructions will then be replayed by the Lithium
2822  // translation, so they cannot have an environment effect. The edge to
2823  // the body's entry block (along with some special logic for the start
2824  // block in HInstruction::InsertAfter) seals the start block from
2825  // getting unwanted instructions inserted.
2826  //
2827  // TODO(kmillikin): Fix this. Stop mutating the initial environment.
2828  // Make the Hydrogen instructions in the initial block into Hydrogen
2829  // values (but not instructions), present in the initial environment and
2830  // not replayed by the Lithium translation.
2831  HEnvironment* initial_env = environment()->CopyWithoutHistory();
2832  HBasicBlock* body_entry = CreateBasicBlock(initial_env);
2833  current_block()->Goto(body_entry);
2834  body_entry->SetJoinId(AstNode::kFunctionEntryId);
2835  set_current_block(body_entry);
2836 
2837  // Handle implicit declaration of the function name in named function
2838  // expressions before other declarations.
2839  if (scope->is_function_scope() && scope->function() != NULL) {
2840  VisitVariableDeclaration(scope->function());
2841  }
2842  VisitDeclarations(scope->declarations());
2844 
2845  HValue* context = environment()->LookupContext();
2847  new(zone()) HStackCheck(context, HStackCheck::kFunctionEntry));
2848 
2849  VisitStatements(info()->function()->body());
2850  if (HasStackOverflow()) return NULL;
2851 
2852  if (current_block() != NULL) {
2853  HReturn* instr = new(zone()) HReturn(graph()->GetConstantUndefined());
2854  current_block()->FinishExit(instr);
2856  }
2857  }
2858 
2859  graph()->OrderBlocks();
2860  graph()->AssignDominators();
2861 
2862 #ifdef DEBUG
2863  // Do a full verify after building the graph and computing dominators.
2864  graph()->Verify(true);
2865 #endif
2866 
2868  if (!graph()->CheckConstPhiUses()) {
2869  Bailout("Unsupported phi use of const variable");
2870  return NULL;
2871  }
2873  if (!graph()->CheckArgumentsPhiUses()) {
2874  Bailout("Unsupported phi use of arguments");
2875  return NULL;
2876  }
2877  if (FLAG_eliminate_dead_phis) graph()->EliminateUnreachablePhis();
2878  graph()->CollectPhis();
2879 
2880  if (graph()->has_osr_loop_entry()) {
2881  const ZoneList<HPhi*>* phis = graph()->osr_loop_entry()->phis();
2882  for (int j = 0; j < phis->length(); j++) {
2883  HPhi* phi = phis->at(j);
2884  graph()->osr_values()->at(phi->merged_index())->set_incoming_value(phi);
2885  }
2886  }
2887 
2888  HInferRepresentation rep(graph());
2889  rep.Analyze();
2890 
2893 
2895  graph()->Canonicalize();
2896 
2897  // Perform common subexpression elimination and loop-invariant code motion.
2898  if (FLAG_use_gvn) {
2899  HPhase phase("H_Global value numbering", graph());
2900  HGlobalValueNumberer gvn(graph(), info());
2901  bool removed_side_effects = gvn.Analyze();
2902  // Trigger a second analysis pass to further eliminate duplicate values that
2903  // could only be discovered by removing side-effect-generating instructions
2904  // during the first pass.
2905  if (FLAG_smi_only_arrays && removed_side_effects) {
2906  removed_side_effects = gvn.Analyze();
2907  ASSERT(!removed_side_effects);
2908  }
2909  }
2910 
2911  if (FLAG_use_range) {
2912  HRangeAnalysis rangeAnalysis(graph());
2913  rangeAnalysis.Analyze();
2914  }
2916 
2917  // Eliminate redundant stack checks on backwards branches.
2918  HStackCheckEliminator sce(graph());
2919  sce.Process();
2920 
2923 
2924  return graph();
2925 }
2926 
2927 
2928 // We try to "factor up" HBoundsCheck instructions towards the root of the
2929 // dominator tree.
2930 // For now we handle checks where the index is like "exp + int32value".
2931 // If in the dominator tree we check "exp + v1" and later (dominated)
2932 // "exp + v2", if v2 <= v1 we can safely remove the second check, and if
2933 // v2 > v1 we can use v2 in the 1st check and again remove the second.
2934 // To do so we keep a dictionary of all checks where the key if the pair
2935 // "exp, length".
2936 // The class BoundsCheckKey represents this key.
2937 class BoundsCheckKey : public ZoneObject {
2938  public:
2939  HValue* IndexBase() const { return index_base_; }
2940  HValue* Length() const { return length_; }
2941 
2942  uint32_t Hash() {
2943  return static_cast<uint32_t>(index_base_->Hashcode() ^ length_->Hashcode());
2944  }
2945 
2946  static BoundsCheckKey* Create(Zone* zone,
2948  int32_t* offset) {
2949  HValue* index_base = NULL;
2950  HConstant* constant = NULL;
2951  bool is_sub = false;
2952 
2953  if (check->index()->IsAdd()) {
2954  HAdd* index = HAdd::cast(check->index());
2955  if (index->left()->IsConstant()) {
2956  constant = HConstant::cast(index->left());
2957  index_base = index->right();
2958  } else if (index->right()->IsConstant()) {
2959  constant = HConstant::cast(index->right());
2960  index_base = index->left();
2961  }
2962  } else if (check->index()->IsSub()) {
2963  HSub* index = HSub::cast(check->index());
2964  is_sub = true;
2965  if (index->left()->IsConstant()) {
2966  constant = HConstant::cast(index->left());
2967  index_base = index->right();
2968  } else if (index->right()->IsConstant()) {
2969  constant = HConstant::cast(index->right());
2970  index_base = index->left();
2971  }
2972  }
2973 
2974  if (constant != NULL && constant->HasInteger32Value()) {
2975  *offset = is_sub ? - constant->Integer32Value()
2976  : constant->Integer32Value();
2977  } else {
2978  *offset = 0;
2979  index_base = check->index();
2980  }
2981 
2982  return new(zone) BoundsCheckKey(index_base, check->length());
2983  }
2984 
2985  private:
2986  BoundsCheckKey(HValue* index_base, HValue* length)
2987  : index_base_(index_base),
2988  length_(length) { }
2989 
2990  HValue* index_base_;
2991  HValue* length_;
2992 };
2993 
2994 
2995 // Data about each HBoundsCheck that can be eliminated or moved.
2996 // It is the "value" in the dictionary indexed by "base-index, length"
2997 // (the key is BoundsCheckKey).
2998 // We scan the code with a dominator tree traversal.
2999 // Traversing the dominator tree we keep a stack (implemented as a singly
3000 // linked list) of "data" for each basic block that contains a relevant check
3001 // with the same key (the dictionary holds the head of the list).
3002 // We also keep all the "data" created for a given basic block in a list, and
3003 // use it to "clean up" the dictionary when backtracking in the dominator tree
3004 // traversal.
3005 // Doing this each dictionary entry always directly points to the check that
3006 // is dominating the code being examined now.
3007 // We also track the current "offset" of the index expression and use it to
3008 // decide if any check is already "covered" (so it can be removed) or not.
3010  public:
3011  BoundsCheckKey* Key() const { return key_; }
3012  int32_t LowerOffset() const { return lower_offset_; }
3013  int32_t UpperOffset() const { return upper_offset_; }
3014  HBasicBlock* BasicBlock() const { return basic_block_; }
3015  HBoundsCheck* Check() const { return check_; }
3016  BoundsCheckBbData* NextInBasicBlock() const { return next_in_bb_; }
3017  BoundsCheckBbData* FatherInDominatorTree() const { return father_in_dt_; }
3018 
3019  bool OffsetIsCovered(int32_t offset) const {
3020  return offset >= LowerOffset() && offset <= UpperOffset();
3021  }
3022 
3023  // This method removes new_check and modifies the current check so that it
3024  // also "covers" what new_check covered.
3025  // The obvious precondition is that new_check follows Check() in the
3026  // same basic block, and that new_offset is not covered (otherwise we
3027  // could simply remove new_check).
3028  // As a consequence LowerOffset() or UpperOffset() change (the covered
3029  // range grows).
3030  //
3031  // In the general case the check covering the current range should be like
3032  // these two checks:
3033  // 0 <= Key()->IndexBase() + LowerOffset()
3034  // Key()->IndexBase() + UpperOffset() < Key()->Length()
3035  //
3036  // We can transform the second check like this:
3037  // Key()->IndexBase() + LowerOffset() <
3038  // Key()->Length() + (LowerOffset() - UpperOffset())
3039  // so we can handle both checks with a single unsigned comparison.
3040  //
3041  // The bulk of this method changes Check()->index() and Check()->length()
3042  // replacing them with new HAdd instructions to perform the transformation
3043  // described above.
3044  void CoverCheck(HBoundsCheck* new_check,
3045  int32_t new_offset) {
3046  ASSERT(new_check->index()->representation().IsInteger32());
3047 
3048  if (new_offset > upper_offset_) {
3049  upper_offset_ = new_offset;
3050  } else if (new_offset < lower_offset_) {
3051  lower_offset_ = new_offset;
3052  } else {
3053  ASSERT(false);
3054  }
3055 
3056  BuildOffsetAdd(&added_index_,
3057  &added_index_offset_,
3058  Key()->IndexBase(),
3059  new_check->index()->representation(),
3060  lower_offset_);
3061  Check()->SetOperandAt(0, added_index_);
3062  BuildOffsetAdd(&added_length_,
3063  &added_length_offset_,
3064  Key()->Length(),
3065  new_check->length()->representation(),
3066  lower_offset_ - upper_offset_);
3067  Check()->SetOperandAt(1, added_length_);
3068 
3069  new_check->DeleteAndReplaceWith(NULL);
3070  }
3071 
3073  RemoveZeroAdd(&added_index_, &added_index_offset_);
3074  RemoveZeroAdd(&added_length_, &added_length_offset_);
3075  }
3076 
3078  int32_t lower_offset,
3079  int32_t upper_offset,
3080  HBasicBlock* bb,
3082  BoundsCheckBbData* next_in_bb,
3083  BoundsCheckBbData* father_in_dt)
3084  : key_(key),
3085  lower_offset_(lower_offset),
3086  upper_offset_(upper_offset),
3087  basic_block_(bb),
3088  check_(check),
3089  added_index_offset_(NULL),
3090  added_index_(NULL),
3091  added_length_offset_(NULL),
3092  added_length_(NULL),
3093  next_in_bb_(next_in_bb),
3094  father_in_dt_(father_in_dt) { }
3095 
3096  private:
3097  BoundsCheckKey* key_;
3098  int32_t lower_offset_;
3099  int32_t upper_offset_;
3100  HBasicBlock* basic_block_;
3101  HBoundsCheck* check_;
3102  HConstant* added_index_offset_;
3103  HAdd* added_index_;
3104  HConstant* added_length_offset_;
3105  HAdd* added_length_;
3106  BoundsCheckBbData* next_in_bb_;
3107  BoundsCheckBbData* father_in_dt_;
3108 
3109  void BuildOffsetAdd(HAdd** add,
3110  HConstant** constant,
3111  HValue* original_value,
3112  Representation representation,
3113  int32_t new_offset) {
3114  HConstant* new_constant = new(BasicBlock()->zone())
3115  HConstant(Handle<Object>(Smi::FromInt(new_offset)),
3117  if (*add == NULL) {
3118  new_constant->InsertBefore(Check());
3119  *add = new(BasicBlock()->zone()) HAdd(NULL,
3120  original_value,
3121  new_constant);
3122  (*add)->AssumeRepresentation(representation);
3123  (*add)->InsertBefore(Check());
3124  } else {
3125  new_constant->InsertBefore(*add);
3126  (*constant)->DeleteAndReplaceWith(new_constant);
3127  }
3128  *constant = new_constant;
3129  }
3130 
3131  void RemoveZeroAdd(HAdd** add, HConstant** constant) {
3132  if (*add != NULL && (*constant)->Integer32Value() == 0) {
3133  (*add)->DeleteAndReplaceWith((*add)->left());
3134  (*constant)->DeleteAndReplaceWith(NULL);
3135  }
3136  }
3137 };
3138 
3139 
3140 static bool BoundsCheckKeyMatch(void* key1, void* key2) {
3141  BoundsCheckKey* k1 = static_cast<BoundsCheckKey*>(key1);
3142  BoundsCheckKey* k2 = static_cast<BoundsCheckKey*>(key2);
3143  return k1->IndexBase() == k2->IndexBase() && k1->Length() == k2->Length();
3144 }
3145 
3146 
3148  public:
3150  return reinterpret_cast<BoundsCheckBbData**>(
3151  &(Lookup(key, key->Hash(), true, ZoneAllocationPolicy(zone))->value));
3152  }
3153 
3154  void Insert(BoundsCheckKey* key, BoundsCheckBbData* data, Zone* zone) {
3155  Lookup(key, key->Hash(), true, ZoneAllocationPolicy(zone))->value = data;
3156  }
3157 
3158  void Delete(BoundsCheckKey* key) {
3159  Remove(key, key->Hash());
3160  }
3161 
3162  explicit BoundsCheckTable(Zone* zone)
3163  : ZoneHashMap(BoundsCheckKeyMatch, ZoneHashMap::kDefaultHashMapCapacity,
3164  ZoneAllocationPolicy(zone)) { }
3165 };
3166 
3167 
3168 // Eliminates checks in bb and recursively in the dominated blocks.
3169 // Also replace the results of check instructions with the original value, if
3170 // the result is used. This is safe now, since we don't do code motion after
3171 // this point. It enables better register allocation since the value produced
3172 // by check instructions is really a copy of the original value.
3173 void HGraph::EliminateRedundantBoundsChecks(HBasicBlock* bb,
3174  BoundsCheckTable* table) {
3175  BoundsCheckBbData* bb_data_list = NULL;
3176 
3177  for (HInstruction* i = bb->first(); i != NULL; i = i->next()) {
3178  if (!i->IsBoundsCheck()) continue;
3179 
3180  HBoundsCheck* check = HBoundsCheck::cast(i);
3181  check->ReplaceAllUsesWith(check->index());
3182 
3183  if (!FLAG_array_bounds_checks_elimination) continue;
3184 
3185  int32_t offset;
3186  BoundsCheckKey* key =
3187  BoundsCheckKey::Create(zone(), check, &offset);
3188  BoundsCheckBbData** data_p = table->LookupOrInsert(key, zone());
3189  BoundsCheckBbData* data = *data_p;
3190  if (data == NULL) {
3191  bb_data_list = new(zone()) BoundsCheckBbData(key,
3192  offset,
3193  offset,
3194  bb,
3195  check,
3196  bb_data_list,
3197  NULL);
3198  *data_p = bb_data_list;
3199  } else if (data->OffsetIsCovered(offset)) {
3200  check->DeleteAndReplaceWith(NULL);
3201  } else if (data->BasicBlock() == bb) {
3202  data->CoverCheck(check, offset);
3203  } else {
3204  int32_t new_lower_offset = offset < data->LowerOffset()
3205  ? offset
3206  : data->LowerOffset();
3207  int32_t new_upper_offset = offset > data->UpperOffset()
3208  ? offset
3209  : data->UpperOffset();
3210  bb_data_list = new(zone()) BoundsCheckBbData(key,
3211  new_lower_offset,
3212  new_upper_offset,
3213  bb,
3214  check,
3215  bb_data_list,
3216  data);
3217  table->Insert(key, bb_data_list, zone());
3218  }
3219  }
3220 
3221  for (int i = 0; i < bb->dominated_blocks()->length(); ++i) {
3222  EliminateRedundantBoundsChecks(bb->dominated_blocks()->at(i), table);
3223  }
3224 
3225  for (BoundsCheckBbData* data = bb_data_list;
3226  data != NULL;
3227  data = data->NextInBasicBlock()) {
3228  data->RemoveZeroOperations();
3229  if (data->FatherInDominatorTree()) {
3230  table->Insert(data->Key(), data->FatherInDominatorTree(), zone());
3231  } else {
3232  table->Delete(data->Key());
3233  }
3234  }
3235 }
3236 
3237 
3239  HPhase phase("H_Eliminate bounds checks", this);
3240  AssertNoAllocation no_gc;
3241  BoundsCheckTable checks_table(zone());
3242  EliminateRedundantBoundsChecks(entry_block(), &checks_table);
3243 }
3244 
3245 
3246 static void DehoistArrayIndex(ArrayInstructionInterface* array_operation) {
3247  HValue* index = array_operation->GetKey();
3248 
3249  HConstant* constant;
3250  HValue* subexpression;
3251  int32_t sign;
3252  if (index->IsAdd()) {
3253  sign = 1;
3254  HAdd* add = HAdd::cast(index);
3255  if (add->left()->IsConstant()) {
3256  subexpression = add->right();
3257  constant = HConstant::cast(add->left());
3258  } else if (add->right()->IsConstant()) {
3259  subexpression = add->left();
3260  constant = HConstant::cast(add->right());
3261  } else {
3262  return;
3263  }
3264  } else if (index->IsSub()) {
3265  sign = -1;
3266  HSub* sub = HSub::cast(index);
3267  if (sub->left()->IsConstant()) {
3268  subexpression = sub->right();
3269  constant = HConstant::cast(sub->left());
3270  } else if (sub->right()->IsConstant()) {
3271  subexpression = sub->left();
3272  constant = HConstant::cast(sub->right());
3273  } return;
3274  } else {
3275  return;
3276  }
3277 
3278  if (!constant->HasInteger32Value()) return;
3279  int32_t value = constant->Integer32Value() * sign;
3280  // We limit offset values to 30 bits because we want to avoid the risk of
3281  // overflows when the offset is added to the object header size.
3282  if (value >= 1 << 30 || value < 0) return;
3283  array_operation->SetKey(subexpression);
3284  if (index->HasNoUses()) {
3285  index->DeleteAndReplaceWith(NULL);
3286  }
3287  ASSERT(value >= 0);
3288  array_operation->SetIndexOffset(static_cast<uint32_t>(value));
3289  array_operation->SetDehoisted(true);
3290 }
3291 
3292 
3294  if (!FLAG_array_index_dehoisting) return;
3295 
3296  HPhase phase("H_Dehoist index computations", this);
3297  for (int i = 0; i < blocks()->length(); ++i) {
3298  for (HInstruction* instr = blocks()->at(i)->first();
3299  instr != NULL;
3300  instr = instr->next()) {
3301  ArrayInstructionInterface* array_instruction = NULL;
3302  if (instr->IsLoadKeyedFastElement()) {
3304  array_instruction = static_cast<ArrayInstructionInterface*>(op);
3305  } else if (instr->IsLoadKeyedFastDoubleElement()) {
3308  array_instruction = static_cast<ArrayInstructionInterface*>(op);
3309  } else if (instr->IsLoadKeyedSpecializedArrayElement()) {
3312  array_instruction = static_cast<ArrayInstructionInterface*>(op);
3313  } else if (instr->IsStoreKeyedFastElement()) {
3315  array_instruction = static_cast<ArrayInstructionInterface*>(op);
3316  } else if (instr->IsStoreKeyedFastDoubleElement()) {
3319  array_instruction = static_cast<ArrayInstructionInterface*>(op);
3320  } else if (instr->IsStoreKeyedSpecializedArrayElement()) {
3323  array_instruction = static_cast<ArrayInstructionInterface*>(op);
3324  } else {
3325  continue;
3326  }
3327  DehoistArrayIndex(array_instruction);
3328  }
3329  }
3330 }
3331 
3332 
3334  ASSERT(current_block() != NULL);
3335  current_block()->AddInstruction(instr);
3336  return instr;
3337 }
3338 
3339 
3340 void HGraphBuilder::AddSimulate(int ast_id) {
3341  ASSERT(current_block() != NULL);
3342  current_block()->AddSimulate(ast_id);
3343 }
3344 
3345 
3346 void HGraphBuilder::AddPhi(HPhi* instr) {
3347  ASSERT(current_block() != NULL);
3348  current_block()->AddPhi(instr);
3349 }
3350 
3351 
3352 void HGraphBuilder::PushAndAdd(HInstruction* instr) {
3353  Push(instr);
3354  AddInstruction(instr);
3355 }
3356 
3357 
3358 template <class Instruction>
3359 HInstruction* HGraphBuilder::PreProcessCall(Instruction* call) {
3360  int count = call->argument_count();
3361  ZoneList<HValue*> arguments(count, zone());
3362  for (int i = 0; i < count; ++i) {
3363  arguments.Add(Pop(), zone());
3364  }
3365 
3366  while (!arguments.is_empty()) {
3367  AddInstruction(new(zone()) HPushArgument(arguments.RemoveLast()));
3368  }
3369  return call;
3370 }
3371 
3372 
3373 void HGraphBuilder::SetUpScope(Scope* scope) {
3374  HConstant* undefined_constant = new(zone()) HConstant(
3375  isolate()->factory()->undefined_value(), Representation::Tagged());
3376  AddInstruction(undefined_constant);
3377  graph_->set_undefined_constant(undefined_constant);
3378 
3379  HArgumentsObject* object = new(zone()) HArgumentsObject;
3380  AddInstruction(object);
3381  graph()->SetArgumentsObject(object);
3382 
3383  // Set the initial values of parameters including "this". "This" has
3384  // parameter index 0.
3385  ASSERT_EQ(scope->num_parameters() + 1, environment()->parameter_count());
3386 
3387  for (int i = 0; i < environment()->parameter_count(); ++i) {
3388  HInstruction* parameter = AddInstruction(new(zone()) HParameter(i));
3389  environment()->Bind(i, parameter);
3390  }
3391 
3392  // First special is HContext.
3393  HInstruction* context = AddInstruction(new(zone()) HContext);
3394  environment()->BindContext(context);
3395 
3396  // Initialize specials and locals to undefined.
3397  for (int i = environment()->parameter_count() + 1;
3398  i < environment()->length();
3399  ++i) {
3400  environment()->Bind(i, undefined_constant);
3401  }
3402 
3403  // Handle the arguments and arguments shadow variables specially (they do
3404  // not have declarations).
3405  if (scope->arguments() != NULL) {
3406  if (!scope->arguments()->IsStackAllocated()) {
3407  return Bailout("context-allocated arguments");
3408  }
3409 
3410  environment()->Bind(scope->arguments(),
3411  graph()->GetArgumentsObject());
3412  }
3413 }
3414 
3415 
3416 void HGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
3417  for (int i = 0; i < statements->length(); i++) {
3418  CHECK_ALIVE(Visit(statements->at(i)));
3419  }
3420 }
3421 
3422 
3423 HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
3424  HBasicBlock* b = graph()->CreateBasicBlock();
3425  b->SetInitialEnvironment(env);
3426  return b;
3427 }
3428 
3429 
3430 HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
3431  HBasicBlock* header = graph()->CreateBasicBlock();
3432  HEnvironment* entry_env = environment()->CopyAsLoopHeader(header);
3433  header->SetInitialEnvironment(entry_env);
3434  header->AttachLoopInformation();
3435  return header;
3436 }
3437 
3438 
3439 void HGraphBuilder::VisitBlock(Block* stmt) {
3440  ASSERT(!HasStackOverflow());
3441  ASSERT(current_block() != NULL);
3442  ASSERT(current_block()->HasPredecessor());
3443  if (stmt->scope() != NULL) {
3444  return Bailout("ScopedBlock");
3445  }
3446  BreakAndContinueInfo break_info(stmt);
3447  { BreakAndContinueScope push(&break_info, this);
3448  CHECK_BAILOUT(VisitStatements(stmt->statements()));
3449  }
3450  HBasicBlock* break_block = break_info.break_block();
3451  if (break_block != NULL) {
3452  if (current_block() != NULL) current_block()->Goto(break_block);
3453  break_block->SetJoinId(stmt->ExitId());
3454  set_current_block(break_block);
3455  }
3456 }
3457 
3458 
3459 void HGraphBuilder::VisitExpressionStatement(ExpressionStatement* stmt) {
3460  ASSERT(!HasStackOverflow());
3461  ASSERT(current_block() != NULL);
3462  ASSERT(current_block()->HasPredecessor());
3463  VisitForEffect(stmt->expression());
3464 }
3465 
3466 
3467 void HGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
3468  ASSERT(!HasStackOverflow());
3469  ASSERT(current_block() != NULL);
3470  ASSERT(current_block()->HasPredecessor());
3471 }
3472 
3473 
3474 void HGraphBuilder::VisitIfStatement(IfStatement* stmt) {
3475  ASSERT(!HasStackOverflow());
3476  ASSERT(current_block() != NULL);
3477  ASSERT(current_block()->HasPredecessor());
3478  if (stmt->condition()->ToBooleanIsTrue()) {
3479  AddSimulate(stmt->ThenId());
3480  Visit(stmt->then_statement());
3481  } else if (stmt->condition()->ToBooleanIsFalse()) {
3482  AddSimulate(stmt->ElseId());
3483  Visit(stmt->else_statement());
3484  } else {
3485  HBasicBlock* cond_true = graph()->CreateBasicBlock();
3486  HBasicBlock* cond_false = graph()->CreateBasicBlock();
3487  CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false));
3488 
3489  if (cond_true->HasPredecessor()) {
3490  cond_true->SetJoinId(stmt->ThenId());
3491  set_current_block(cond_true);
3492  CHECK_BAILOUT(Visit(stmt->then_statement()));
3493  cond_true = current_block();
3494  } else {
3495  cond_true = NULL;
3496  }
3497 
3498  if (cond_false->HasPredecessor()) {
3499  cond_false->SetJoinId(stmt->ElseId());
3500  set_current_block(cond_false);
3501  CHECK_BAILOUT(Visit(stmt->else_statement()));
3502  cond_false = current_block();
3503  } else {
3504  cond_false = NULL;
3505  }
3506 
3507  HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId());
3508  set_current_block(join);
3509  }
3510 }
3511 
3512 
3513 HBasicBlock* HGraphBuilder::BreakAndContinueScope::Get(
3514  BreakableStatement* stmt,
3515  BreakType type,
3516  int* drop_extra) {
3517  *drop_extra = 0;
3518  BreakAndContinueScope* current = this;
3519  while (current != NULL && current->info()->target() != stmt) {
3520  *drop_extra += current->info()->drop_extra();
3521  current = current->next();
3522  }
3523  ASSERT(current != NULL); // Always found (unless stack is malformed).
3524 
3525  if (type == BREAK) {
3526  *drop_extra += current->info()->drop_extra();
3527  }
3528 
3529  HBasicBlock* block = NULL;
3530  switch (type) {
3531  case BREAK:
3532  block = current->info()->break_block();
3533  if (block == NULL) {
3534  block = current->owner()->graph()->CreateBasicBlock();
3535  current->info()->set_break_block(block);
3536  }
3537  break;
3538 
3539  case CONTINUE:
3540  block = current->info()->continue_block();
3541  if (block == NULL) {
3542  block = current->owner()->graph()->CreateBasicBlock();
3543  current->info()->set_continue_block(block);
3544  }
3545  break;
3546  }
3547 
3548  return block;
3549 }
3550 
3551 
3552 void HGraphBuilder::VisitContinueStatement(ContinueStatement* stmt) {
3553  ASSERT(!HasStackOverflow());
3554  ASSERT(current_block() != NULL);
3555  ASSERT(current_block()->HasPredecessor());
3556  int drop_extra = 0;
3557  HBasicBlock* continue_block = break_scope()->Get(stmt->target(),
3558  CONTINUE,
3559  &drop_extra);
3560  Drop(drop_extra);
3561  current_block()->Goto(continue_block);
3563 }
3564 
3565 
3566 void HGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
3567  ASSERT(!HasStackOverflow());
3568  ASSERT(current_block() != NULL);
3569  ASSERT(current_block()->HasPredecessor());
3570  int drop_extra = 0;
3571  HBasicBlock* break_block = break_scope()->Get(stmt->target(),
3572  BREAK,
3573  &drop_extra);
3574  Drop(drop_extra);
3575  current_block()->Goto(break_block);
3577 }
3578 
3579 
3580 void HGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
3581  ASSERT(!HasStackOverflow());
3582  ASSERT(current_block() != NULL);
3583  ASSERT(current_block()->HasPredecessor());
3584  AstContext* context = call_context();
3585  if (context == NULL) {
3586  // Not an inlined return, so an actual one.
3587  CHECK_ALIVE(VisitForValue(stmt->expression()));
3588  HValue* result = environment()->Pop();
3589  current_block()->FinishExit(new(zone()) HReturn(result));
3590  } else if (function_state()->is_construct()) {
3591  // Return from an inlined construct call. In a test context the return
3592  // value will always evaluate to true, in a value context the return value
3593  // needs to be a JSObject.
3594  if (context->IsTest()) {
3595  TestContext* test = TestContext::cast(context);
3596  CHECK_ALIVE(VisitForEffect(stmt->expression()));
3597  current_block()->Goto(test->if_true(), function_state());
3598  } else if (context->IsEffect()) {
3599  CHECK_ALIVE(VisitForEffect(stmt->expression()));
3600  current_block()->Goto(function_return(), function_state());
3601  } else {
3602  ASSERT(context->IsValue());
3603  CHECK_ALIVE(VisitForValue(stmt->expression()));
3604  HValue* return_value = Pop();
3605  HValue* receiver = environment()->Lookup(0);
3606  HHasInstanceTypeAndBranch* typecheck =
3607  new(zone()) HHasInstanceTypeAndBranch(return_value,
3610  HBasicBlock* if_spec_object = graph()->CreateBasicBlock();
3611  HBasicBlock* not_spec_object = graph()->CreateBasicBlock();
3612  typecheck->SetSuccessorAt(0, if_spec_object);
3613  typecheck->SetSuccessorAt(1, not_spec_object);
3614  current_block()->Finish(typecheck);
3615  if_spec_object->AddLeaveInlined(return_value,
3616  function_return(),
3617  function_state());
3618  not_spec_object->AddLeaveInlined(receiver,
3619  function_return(),
3620  function_state());
3621  }
3622  } else {
3623  // Return from an inlined function, visit the subexpression in the
3624  // expression context of the call.
3625  if (context->IsTest()) {
3626  TestContext* test = TestContext::cast(context);
3627  VisitForControl(stmt->expression(),
3628  test->if_true(),
3629  test->if_false());
3630  } else if (context->IsEffect()) {
3631  CHECK_ALIVE(VisitForEffect(stmt->expression()));
3632  current_block()->Goto(function_return(), function_state());
3633  } else {
3634  ASSERT(context->IsValue());
3635  CHECK_ALIVE(VisitForValue(stmt->expression()));
3636  HValue* return_value = Pop();
3637  current_block()->AddLeaveInlined(return_value,
3638  function_return(),
3639  function_state());
3640  }
3641  }
3643 }
3644 
3645 
3646 void HGraphBuilder::VisitWithStatement(WithStatement* stmt) {
3647  ASSERT(!HasStackOverflow());
3648  ASSERT(current_block() != NULL);
3649  ASSERT(current_block()->HasPredecessor());
3650  return Bailout("WithStatement");
3651 }
3652 
3653 
3654 void HGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
3655  ASSERT(!HasStackOverflow());
3656  ASSERT(current_block() != NULL);
3657  ASSERT(current_block()->HasPredecessor());
3658  // We only optimize switch statements with smi-literal smi comparisons,
3659  // with a bounded number of clauses.
3660  const int kCaseClauseLimit = 128;
3661  ZoneList<CaseClause*>* clauses = stmt->cases();
3662  int clause_count = clauses->length();
3663  if (clause_count > kCaseClauseLimit) {
3664  return Bailout("SwitchStatement: too many clauses");
3665  }
3666 
3667  HValue* context = environment()->LookupContext();
3668 
3669  CHECK_ALIVE(VisitForValue(stmt->tag()));
3670  AddSimulate(stmt->EntryId());
3671  HValue* tag_value = Pop();
3672  HBasicBlock* first_test_block = current_block();
3673 
3674  SwitchType switch_type = UNKNOWN_SWITCH;
3675 
3676  // 1. Extract clause type
3677  for (int i = 0; i < clause_count; ++i) {
3678  CaseClause* clause = clauses->at(i);
3679  if (clause->is_default()) continue;
3680 
3681  if (switch_type == UNKNOWN_SWITCH) {
3682  if (clause->label()->IsSmiLiteral()) {
3683  switch_type = SMI_SWITCH;
3684  } else if (clause->label()->IsStringLiteral()) {
3685  switch_type = STRING_SWITCH;
3686  } else {
3687  return Bailout("SwitchStatement: non-literal switch label");
3688  }
3689  } else if ((switch_type == STRING_SWITCH &&
3690  !clause->label()->IsStringLiteral()) ||
3691  (switch_type == SMI_SWITCH &&
3692  !clause->label()->IsSmiLiteral())) {
3693  return Bailout("SwitchStatemnt: mixed label types are not supported");
3694  }
3695  }
3696 
3697  HUnaryControlInstruction* string_check = NULL;
3698  HBasicBlock* not_string_block = NULL;
3699 
3700  // Test switch's tag value if all clauses are string literals
3701  if (switch_type == STRING_SWITCH) {
3702  string_check = new(zone()) HIsStringAndBranch(tag_value);
3703  first_test_block = graph()->CreateBasicBlock();
3704  not_string_block = graph()->CreateBasicBlock();
3705 
3706  string_check->SetSuccessorAt(0, first_test_block);
3707  string_check->SetSuccessorAt(1, not_string_block);
3708  current_block()->Finish(string_check);
3709 
3710  set_current_block(first_test_block);
3711  }
3712 
3713  // 2. Build all the tests, with dangling true branches
3714  int default_id = AstNode::kNoNumber;
3715  for (int i = 0; i < clause_count; ++i) {
3716  CaseClause* clause = clauses->at(i);
3717  if (clause->is_default()) {
3718  default_id = clause->EntryId();
3719  continue;
3720  }
3721  if (switch_type == SMI_SWITCH) {
3722  clause->RecordTypeFeedback(oracle());
3723  }
3724 
3725  // Generate a compare and branch.
3726  CHECK_ALIVE(VisitForValue(clause->label()));
3727  HValue* label_value = Pop();
3728 
3729  HBasicBlock* next_test_block = graph()->CreateBasicBlock();
3730  HBasicBlock* body_block = graph()->CreateBasicBlock();
3731 
3732  HControlInstruction* compare;
3733 
3734  if (switch_type == SMI_SWITCH) {
3735  if (!clause->IsSmiCompare()) {
3736  // Finish with deoptimize and add uses of enviroment values to
3737  // account for invisible uses.
3740  break;
3741  }
3742 
3743  HCompareIDAndBranch* compare_ =
3744  new(zone()) HCompareIDAndBranch(tag_value,
3745  label_value,
3746  Token::EQ_STRICT);
3747  compare_->SetInputRepresentation(Representation::Integer32());
3748  compare = compare_;
3749  } else {
3750  compare = new(zone()) HStringCompareAndBranch(context, tag_value,
3751  label_value,
3752  Token::EQ_STRICT);
3753  }
3754 
3755  compare->SetSuccessorAt(0, body_block);
3756  compare->SetSuccessorAt(1, next_test_block);
3757  current_block()->Finish(compare);
3758 
3759  set_current_block(next_test_block);
3760  }
3761 
3762  // Save the current block to use for the default or to join with the
3763  // exit. This block is NULL if we deoptimized.
3764  HBasicBlock* last_block = current_block();
3765 
3766  if (not_string_block != NULL) {
3767  int join_id = (default_id != AstNode::kNoNumber)
3768  ? default_id
3769  : stmt->ExitId();
3770  last_block = CreateJoin(last_block, not_string_block, join_id);
3771  }
3772 
3773  // 3. Loop over the clauses and the linked list of tests in lockstep,
3774  // translating the clause bodies.
3775  HBasicBlock* curr_test_block = first_test_block;
3776  HBasicBlock* fall_through_block = NULL;
3777 
3778  BreakAndContinueInfo break_info(stmt);
3779  { BreakAndContinueScope push(&break_info, this);
3780  for (int i = 0; i < clause_count; ++i) {
3781  CaseClause* clause = clauses->at(i);
3782 
3783  // Identify the block where normal (non-fall-through) control flow
3784  // goes to.
3785  HBasicBlock* normal_block = NULL;
3786  if (clause->is_default()) {
3787  if (last_block != NULL) {
3788  normal_block = last_block;
3789  last_block = NULL; // Cleared to indicate we've handled it.
3790  }
3791  } else if (!curr_test_block->end()->IsDeoptimize()) {
3792  normal_block = curr_test_block->end()->FirstSuccessor();
3793  curr_test_block = curr_test_block->end()->SecondSuccessor();
3794  }
3795 
3796  // Identify a block to emit the body into.
3797  if (normal_block == NULL) {
3798  if (fall_through_block == NULL) {
3799  // (a) Unreachable.
3800  if (clause->is_default()) {
3801  continue; // Might still be reachable clause bodies.
3802  } else {
3803  break;
3804  }
3805  } else {
3806  // (b) Reachable only as fall through.
3807  set_current_block(fall_through_block);
3808  }
3809  } else if (fall_through_block == NULL) {
3810  // (c) Reachable only normally.
3811  set_current_block(normal_block);
3812  } else {
3813  // (d) Reachable both ways.
3814  HBasicBlock* join = CreateJoin(fall_through_block,
3815  normal_block,
3816  clause->EntryId());
3817  set_current_block(join);
3818  }
3819 
3820  CHECK_BAILOUT(VisitStatements(clause->statements()));
3821  fall_through_block = current_block();
3822  }
3823  }
3824 
3825  // Create an up-to-3-way join. Use the break block if it exists since
3826  // it's already a join block.
3827  HBasicBlock* break_block = break_info.break_block();
3828  if (break_block == NULL) {
3829  set_current_block(CreateJoin(fall_through_block,
3830  last_block,
3831  stmt->ExitId()));
3832  } else {
3833  if (fall_through_block != NULL) fall_through_block->Goto(break_block);
3834  if (last_block != NULL) last_block->Goto(break_block);
3835  break_block->SetJoinId(stmt->ExitId());
3836  set_current_block(break_block);
3837  }
3838 }
3839 
3840 
3841 bool HGraphBuilder::HasOsrEntryAt(IterationStatement* statement) {
3842  return statement->OsrEntryId() == info()->osr_ast_id();
3843 }
3844 
3845 
3846 bool HGraphBuilder::PreProcessOsrEntry(IterationStatement* statement) {
3847  if (!HasOsrEntryAt(statement)) return false;
3848 
3849  HBasicBlock* non_osr_entry = graph()->CreateBasicBlock();
3850  HBasicBlock* osr_entry = graph()->CreateBasicBlock();
3851  HValue* true_value = graph()->GetConstantTrue();
3852  HBranch* test = new(zone()) HBranch(true_value, non_osr_entry, osr_entry);
3853  current_block()->Finish(test);
3854 
3855  HBasicBlock* loop_predecessor = graph()->CreateBasicBlock();
3856  non_osr_entry->Goto(loop_predecessor);
3857 
3858  set_current_block(osr_entry);
3859  int osr_entry_id = statement->OsrEntryId();
3860  int first_expression_index = environment()->first_expression_index();
3861  int length = environment()->length();
3862  ZoneList<HUnknownOSRValue*>* osr_values =
3863  new(zone()) ZoneList<HUnknownOSRValue*>(length, zone());
3864 
3865  for (int i = 0; i < first_expression_index; ++i) {
3866  HUnknownOSRValue* osr_value = new(zone()) HUnknownOSRValue;
3867  AddInstruction(osr_value);
3868  environment()->Bind(i, osr_value);
3869  osr_values->Add(osr_value, zone());
3870  }
3871 
3872  if (first_expression_index != length) {
3873  environment()->Drop(length - first_expression_index);
3874  for (int i = first_expression_index; i < length; ++i) {
3875  HUnknownOSRValue* osr_value = new(zone()) HUnknownOSRValue;
3876  AddInstruction(osr_value);
3877  environment()->Push(osr_value);
3878  osr_values->Add(osr_value, zone());
3879  }
3880  }
3881 
3882  graph()->set_osr_values(osr_values);
3883 
3884  AddSimulate(osr_entry_id);
3885  AddInstruction(new(zone()) HOsrEntry(osr_entry_id));
3886  HContext* context = new(zone()) HContext;
3887  AddInstruction(context);
3888  environment()->BindContext(context);
3889  current_block()->Goto(loop_predecessor);
3890  loop_predecessor->SetJoinId(statement->EntryId());
3891  set_current_block(loop_predecessor);
3892  return true;
3893 }
3894 
3895 
3896 void HGraphBuilder::VisitLoopBody(IterationStatement* stmt,
3897  HBasicBlock* loop_entry,
3898  BreakAndContinueInfo* break_info) {
3899  BreakAndContinueScope push(break_info, this);
3900  AddSimulate(stmt->StackCheckId());
3901  HValue* context = environment()->LookupContext();
3902  HStackCheck* stack_check =
3903  new(zone()) HStackCheck(context, HStackCheck::kBackwardsBranch);
3904  AddInstruction(stack_check);
3905  ASSERT(loop_entry->IsLoopHeader());
3906  loop_entry->loop_information()->set_stack_check(stack_check);
3907  CHECK_BAILOUT(Visit(stmt->body()));
3908 }
3909 
3910 
3911 void HGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
3912  ASSERT(!HasStackOverflow());
3913  ASSERT(current_block() != NULL);
3914  ASSERT(current_block()->HasPredecessor());
3915  ASSERT(current_block() != NULL);
3916  bool osr_entry = PreProcessOsrEntry(stmt);
3917  HBasicBlock* loop_entry = CreateLoopHeaderBlock();
3918  current_block()->Goto(loop_entry);
3919  set_current_block(loop_entry);
3920  if (osr_entry) graph()->set_osr_loop_entry(loop_entry);
3921 
3922  BreakAndContinueInfo break_info(stmt);
3923  CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
3924  HBasicBlock* body_exit =
3925  JoinContinue(stmt, current_block(), break_info.continue_block());
3926  HBasicBlock* loop_successor = NULL;
3927  if (body_exit != NULL && !stmt->cond()->ToBooleanIsTrue()) {
3928  set_current_block(body_exit);
3929  // The block for a true condition, the actual predecessor block of the
3930  // back edge.
3931  body_exit = graph()->CreateBasicBlock();
3932  loop_successor = graph()->CreateBasicBlock();
3933  CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor));
3934  if (body_exit->HasPredecessor()) {
3935  body_exit->SetJoinId(stmt->BackEdgeId());
3936  } else {
3937  body_exit = NULL;
3938  }
3939  if (loop_successor->HasPredecessor()) {
3940  loop_successor->SetJoinId(stmt->ExitId());
3941  } else {
3942  loop_successor = NULL;
3943  }
3944  }
3945  HBasicBlock* loop_exit = CreateLoop(stmt,
3946  loop_entry,
3947  body_exit,
3948  loop_successor,
3949  break_info.break_block());
3950  set_current_block(loop_exit);
3951 }
3952 
3953 
3954 void HGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
3955  ASSERT(!HasStackOverflow());
3956  ASSERT(current_block() != NULL);
3957  ASSERT(current_block()->HasPredecessor());
3958  ASSERT(current_block() != NULL);
3959  bool osr_entry = PreProcessOsrEntry(stmt);
3960  HBasicBlock* loop_entry = CreateLoopHeaderBlock();
3961  current_block()->Goto(loop_entry);
3962  set_current_block(loop_entry);
3963  if (osr_entry) graph()->set_osr_loop_entry(loop_entry);
3964 
3965 
3966  // If the condition is constant true, do not generate a branch.
3967  HBasicBlock* loop_successor = NULL;
3968  if (!stmt->cond()->ToBooleanIsTrue()) {
3969  HBasicBlock* body_entry = graph()->CreateBasicBlock();
3970  loop_successor = graph()->CreateBasicBlock();
3971  CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
3972  if (body_entry->HasPredecessor()) {
3973  body_entry->SetJoinId(stmt->BodyId());
3974  set_current_block(body_entry);
3975  }
3976  if (loop_successor->HasPredecessor()) {
3977  loop_successor->SetJoinId(stmt->ExitId());
3978  } else {
3979  loop_successor = NULL;
3980  }
3981  }
3982 
3983  BreakAndContinueInfo break_info(stmt);
3984  if (current_block() != NULL) {
3985  CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
3986  }
3987  HBasicBlock* body_exit =
3988  JoinContinue(stmt, current_block(), break_info.continue_block());
3989  HBasicBlock* loop_exit = CreateLoop(stmt,
3990  loop_entry,
3991  body_exit,
3992  loop_successor,
3993  break_info.break_block());
3994  set_current_block(loop_exit);
3995 }
3996 
3997 
3998 void HGraphBuilder::VisitForStatement(ForStatement* stmt) {
3999  ASSERT(!HasStackOverflow());
4000  ASSERT(current_block() != NULL);
4001  ASSERT(current_block()->HasPredecessor());
4002  if (stmt->init() != NULL) {
4003  CHECK_ALIVE(Visit(stmt->init()));
4004  }
4005  ASSERT(current_block() != NULL);
4006  bool osr_entry = PreProcessOsrEntry(stmt);
4007  HBasicBlock* loop_entry = CreateLoopHeaderBlock();
4008  current_block()->Goto(loop_entry);
4009  set_current_block(loop_entry);
4010  if (osr_entry) graph()->set_osr_loop_entry(loop_entry);
4011 
4012  HBasicBlock* loop_successor = NULL;
4013  if (stmt->cond() != NULL) {
4014  HBasicBlock* body_entry = graph()->CreateBasicBlock();
4015  loop_successor = graph()->CreateBasicBlock();
4016  CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
4017  if (body_entry->HasPredecessor()) {
4018  body_entry->SetJoinId(stmt->BodyId());
4019  set_current_block(body_entry);
4020  }
4021  if (loop_successor->HasPredecessor()) {
4022  loop_successor->SetJoinId(stmt->ExitId());
4023  } else {
4024  loop_successor = NULL;
4025  }
4026  }
4027 
4028  BreakAndContinueInfo break_info(stmt);
4029  if (current_block() != NULL) {
4030  CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4031  }
4032  HBasicBlock* body_exit =
4033  JoinContinue(stmt, current_block(), break_info.continue_block());
4034 
4035  if (stmt->next() != NULL && body_exit != NULL) {
4036  set_current_block(body_exit);
4037  CHECK_BAILOUT(Visit(stmt->next()));
4038  body_exit = current_block();
4039  }
4040 
4041  HBasicBlock* loop_exit = CreateLoop(stmt,
4042  loop_entry,
4043  body_exit,
4044  loop_successor,
4045  break_info.break_block());
4046  set_current_block(loop_exit);
4047 }
4048 
4049 
4050 void HGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
4051  ASSERT(!HasStackOverflow());
4052  ASSERT(current_block() != NULL);
4053  ASSERT(current_block()->HasPredecessor());
4054 
4055  if (!FLAG_optimize_for_in) {
4056  return Bailout("ForInStatement optimization is disabled");
4057  }
4058 
4059  if (!oracle()->IsForInFastCase(stmt)) {
4060  return Bailout("ForInStatement is not fast case");
4061  }
4062 
4063  if (!stmt->each()->IsVariableProxy() ||
4064  !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
4065  return Bailout("ForInStatement with non-local each variable");
4066  }
4067 
4068  Variable* each_var = stmt->each()->AsVariableProxy()->var();
4069 
4070  CHECK_ALIVE(VisitForValue(stmt->enumerable()));
4071  HValue* enumerable = Top(); // Leave enumerable at the top.
4072 
4073  HInstruction* map = AddInstruction(new(zone()) HForInPrepareMap(
4074  environment()->LookupContext(), enumerable));
4075  AddSimulate(stmt->PrepareId());
4076 
4077  HInstruction* array = AddInstruction(
4078  new(zone()) HForInCacheArray(
4079  enumerable,
4080  map,
4082 
4083  HInstruction* array_length = AddInstruction(
4084  new(zone()) HFixedArrayBaseLength(array));
4085 
4086  HInstruction* start_index = AddInstruction(new(zone()) HConstant(
4087  Handle<Object>(Smi::FromInt(0)), Representation::Integer32()));
4088 
4089  Push(map);
4090  Push(array);
4091  Push(array_length);
4092  Push(start_index);
4093 
4094  HInstruction* index_cache = AddInstruction(
4095  new(zone()) HForInCacheArray(
4096  enumerable,
4097  map,
4099  HForInCacheArray::cast(array)->set_index_cache(
4100  HForInCacheArray::cast(index_cache));
4101 
4102  bool osr_entry = PreProcessOsrEntry(stmt);
4103  HBasicBlock* loop_entry = CreateLoopHeaderBlock();
4104  current_block()->Goto(loop_entry);
4105  set_current_block(loop_entry);
4106  if (osr_entry) graph()->set_osr_loop_entry(loop_entry);
4107 
4108  HValue* index = environment()->ExpressionStackAt(0);
4109  HValue* limit = environment()->ExpressionStackAt(1);
4110 
4111  // Check that we still have more keys.
4112  HCompareIDAndBranch* compare_index =
4113  new(zone()) HCompareIDAndBranch(index, limit, Token::LT);
4114  compare_index->SetInputRepresentation(Representation::Integer32());
4115 
4116  HBasicBlock* loop_body = graph()->CreateBasicBlock();
4117  HBasicBlock* loop_successor = graph()->CreateBasicBlock();
4118 
4119  compare_index->SetSuccessorAt(0, loop_body);
4120  compare_index->SetSuccessorAt(1, loop_successor);
4121  current_block()->Finish(compare_index);
4122 
4123  set_current_block(loop_successor);
4124  Drop(5);
4125 
4126  set_current_block(loop_body);
4127 
4128  HValue* key = AddInstruction(
4129  new(zone()) HLoadKeyedFastElement(
4130  environment()->ExpressionStackAt(2), // Enum cache.
4131  environment()->ExpressionStackAt(0))); // Iteration index.
4132 
4133  // Check if the expected map still matches that of the enumerable.
4134  // If not just deoptimize.
4135  AddInstruction(new(zone()) HCheckMapValue(
4136  environment()->ExpressionStackAt(4),
4137  environment()->ExpressionStackAt(3)));
4138 
4139  Bind(each_var, key);
4140 
4141  BreakAndContinueInfo break_info(stmt, 5);
4142  CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4143 
4144  HBasicBlock* body_exit =
4145  JoinContinue(stmt, current_block(), break_info.continue_block());
4146 
4147  if (body_exit != NULL) {
4148  set_current_block(body_exit);
4149 
4150  HValue* current_index = Pop();
4151  HInstruction* new_index = new(zone()) HAdd(environment()->LookupContext(),
4152  current_index,
4153  graph()->GetConstant1());
4154  new_index->AssumeRepresentation(Representation::Integer32());
4155  PushAndAdd(new_index);
4156  body_exit = current_block();
4157  }
4158 
4159  HBasicBlock* loop_exit = CreateLoop(stmt,
4160  loop_entry,
4161  body_exit,
4162  loop_successor,
4163  break_info.break_block());
4164 
4165  set_current_block(loop_exit);
4166 }
4167 
4168 
4169 void HGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
4170  ASSERT(!HasStackOverflow());
4171  ASSERT(current_block() != NULL);
4172  ASSERT(current_block()->HasPredecessor());
4173  return Bailout("TryCatchStatement");
4174 }
4175 
4176 
4177 void HGraphBuilder::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
4178  ASSERT(!HasStackOverflow());
4179  ASSERT(current_block() != NULL);
4180  ASSERT(current_block()->HasPredecessor());
4181  return Bailout("TryFinallyStatement");
4182 }
4183 
4184 
4185 void HGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
4186  ASSERT(!HasStackOverflow());
4187  ASSERT(current_block() != NULL);
4188  ASSERT(current_block()->HasPredecessor());
4189  return Bailout("DebuggerStatement");
4190 }
4191 
4192 
4193 static Handle<SharedFunctionInfo> SearchSharedFunctionInfo(
4194  Code* unoptimized_code, FunctionLiteral* expr) {
4195  int start_position = expr->start_position();
4196  RelocIterator it(unoptimized_code);
4197  for (;!it.done(); it.next()) {
4198  RelocInfo* rinfo = it.rinfo();
4199  if (rinfo->rmode() != RelocInfo::EMBEDDED_OBJECT) continue;
4200  Object* obj = rinfo->target_object();
4201  if (obj->IsSharedFunctionInfo()) {
4202  SharedFunctionInfo* shared = SharedFunctionInfo::cast(obj);
4203  if (shared->start_position() == start_position) {
4204  return Handle<SharedFunctionInfo>(shared);
4205  }
4206  }
4207  }
4208 
4209  return Handle<SharedFunctionInfo>();
4210 }
4211 
4212 
4213 void HGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
4214  ASSERT(!HasStackOverflow());
4215  ASSERT(current_block() != NULL);
4216  ASSERT(current_block()->HasPredecessor());
4217  Handle<SharedFunctionInfo> shared_info =
4218  SearchSharedFunctionInfo(info()->shared_info()->code(),
4219  expr);
4220  if (shared_info.is_null()) {
4221  shared_info = Compiler::BuildFunctionInfo(expr, info()->script());
4222  }
4223  // We also have a stack overflow if the recursive compilation did.
4224  if (HasStackOverflow()) return;
4225  HValue* context = environment()->LookupContext();
4226  HFunctionLiteral* instr =
4227  new(zone()) HFunctionLiteral(context, shared_info, expr->pretenure());
4228  return ast_context()->ReturnInstruction(instr, expr->id());
4229 }
4230 
4231 
4232 void HGraphBuilder::VisitSharedFunctionInfoLiteral(
4233  SharedFunctionInfoLiteral* expr) {
4234  ASSERT(!HasStackOverflow());
4235  ASSERT(current_block() != NULL);
4236  ASSERT(current_block()->HasPredecessor());
4237  return Bailout("SharedFunctionInfoLiteral");
4238 }
4239 
4240 
4241 void HGraphBuilder::VisitConditional(Conditional* expr) {
4242  ASSERT(!HasStackOverflow());
4243  ASSERT(current_block() != NULL);
4244  ASSERT(current_block()->HasPredecessor());
4245  HBasicBlock* cond_true = graph()->CreateBasicBlock();
4246  HBasicBlock* cond_false = graph()->CreateBasicBlock();
4247  CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false));
4248 
4249  // Visit the true and false subexpressions in the same AST context as the
4250  // whole expression.
4251  if (cond_true->HasPredecessor()) {
4252  cond_true->SetJoinId(expr->ThenId());
4253  set_current_block(cond_true);
4254  CHECK_BAILOUT(Visit(expr->then_expression()));
4255  cond_true = current_block();
4256  } else {
4257  cond_true = NULL;
4258  }
4259 
4260  if (cond_false->HasPredecessor()) {
4261  cond_false->SetJoinId(expr->ElseId());
4262  set_current_block(cond_false);
4263  CHECK_BAILOUT(Visit(expr->else_expression()));
4264  cond_false = current_block();
4265  } else {
4266  cond_false = NULL;
4267  }
4268 
4269  if (!ast_context()->IsTest()) {
4270  HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id());
4271  set_current_block(join);
4272  if (join != NULL && !ast_context()->IsEffect()) {
4273  return ast_context()->ReturnValue(Pop());
4274  }
4275  }
4276 }
4277 
4278 
4279 HGraphBuilder::GlobalPropertyAccess HGraphBuilder::LookupGlobalProperty(
4280  Variable* var, LookupResult* lookup, bool is_store) {
4281  if (var->is_this() || !info()->has_global_object()) {
4282  return kUseGeneric;
4283  }
4284  Handle<GlobalObject> global(info()->global_object());
4285  global->Lookup(*var->name(), lookup);
4286  if (!lookup->IsFound() ||
4287  lookup->type() != NORMAL ||
4288  (is_store && lookup->IsReadOnly()) ||
4289  lookup->holder() != *global) {
4290  return kUseGeneric;
4291  }
4292 
4293  return kUseCell;
4294 }
4295 
4296 
4297 HValue* HGraphBuilder::BuildContextChainWalk(Variable* var) {
4298  ASSERT(var->IsContextSlot());
4299  HValue* context = environment()->LookupContext();
4300  int length = info()->scope()->ContextChainLength(var->scope());
4301  while (length-- > 0) {
4302  HInstruction* context_instruction = new(zone()) HOuterContext(context);
4303  AddInstruction(context_instruction);
4304  context = context_instruction;
4305  }
4306  return context;
4307 }
4308 
4309 
4310 void HGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
4311  ASSERT(!HasStackOverflow());
4312  ASSERT(current_block() != NULL);
4313  ASSERT(current_block()->HasPredecessor());
4314  Variable* variable = expr->var();
4315  switch (variable->location()) {
4316  case Variable::UNALLOCATED: {
4317  if (variable->mode() == LET || variable->mode() == CONST_HARMONY) {
4318  return Bailout("reference to global harmony declared variable");
4319  }
4320  // Handle known global constants like 'undefined' specially to avoid a
4321  // load from a global cell for them.
4322  Handle<Object> constant_value =
4323  isolate()->factory()->GlobalConstantFor(variable->name());
4324  if (!constant_value.is_null()) {
4325  HConstant* instr =
4326  new(zone()) HConstant(constant_value, Representation::Tagged());
4327  return ast_context()->ReturnInstruction(instr, expr->id());
4328  }
4329 
4330  LookupResult lookup(isolate());
4331  GlobalPropertyAccess type =
4332  LookupGlobalProperty(variable, &lookup, false);
4333 
4334  if (type == kUseCell &&
4335  info()->global_object()->IsAccessCheckNeeded()) {
4336  type = kUseGeneric;
4337  }
4338 
4339  if (type == kUseCell) {
4340  Handle<GlobalObject> global(info()->global_object());
4341  Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(&lookup));
4342  HLoadGlobalCell* instr =
4343  new(zone()) HLoadGlobalCell(cell, lookup.GetPropertyDetails());
4344  return ast_context()->ReturnInstruction(instr, expr->id());
4345  } else {
4346  HValue* context = environment()->LookupContext();
4347  HGlobalObject* global_object = new(zone()) HGlobalObject(context);
4348  AddInstruction(global_object);
4349  HLoadGlobalGeneric* instr =
4350  new(zone()) HLoadGlobalGeneric(context,
4351  global_object,
4352  variable->name(),
4353  ast_context()->is_for_typeof());
4354  instr->set_position(expr->position());
4355  return ast_context()->ReturnInstruction(instr, expr->id());
4356  }
4357  }
4358 
4359  case Variable::PARAMETER:
4360  case Variable::LOCAL: {
4361  HValue* value = environment()->Lookup(variable);
4362  if (value == graph()->GetConstantHole()) {
4363  ASSERT(variable->mode() == CONST ||
4364  variable->mode() == CONST_HARMONY ||
4365  variable->mode() == LET);
4366  return Bailout("reference to uninitialized variable");
4367  }
4368  return ast_context()->ReturnValue(value);
4369  }
4370 
4371  case Variable::CONTEXT: {
4372  HValue* context = BuildContextChainWalk(variable);
4373  HLoadContextSlot* instr = new(zone()) HLoadContextSlot(context, variable);
4374  return ast_context()->ReturnInstruction(instr, expr->id());
4375  }
4376 
4377  case Variable::LOOKUP:
4378  return Bailout("reference to a variable which requires dynamic lookup");
4379  }
4380 }
4381 
4382 
4383 void HGraphBuilder::VisitLiteral(Literal* expr) {
4384  ASSERT(!HasStackOverflow());
4385  ASSERT(current_block() != NULL);
4386  ASSERT(current_block()->HasPredecessor());
4387  HConstant* instr =
4388  new(zone()) HConstant(expr->handle(), Representation::Tagged());
4389  return ast_context()->ReturnInstruction(instr, expr->id());
4390 }
4391 
4392 
4393 void HGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
4394  ASSERT(!HasStackOverflow());
4395  ASSERT(current_block() != NULL);
4396  ASSERT(current_block()->HasPredecessor());
4397  HValue* context = environment()->LookupContext();
4398 
4399  HRegExpLiteral* instr = new(zone()) HRegExpLiteral(context,
4400  expr->pattern(),
4401  expr->flags(),
4402  expr->literal_index());
4403  return ast_context()->ReturnInstruction(instr, expr->id());
4404 }
4405 
4406 
4407 // Determines whether the given array or object literal boilerplate satisfies
4408 // all limits to be considered for fast deep-copying and computes the total
4409 // size of all objects that are part of the graph.
4410 static bool IsFastLiteral(Handle<JSObject> boilerplate,
4411  int max_depth,
4412  int* max_properties,
4413  int* total_size) {
4414  ASSERT(max_depth >= 0 && *max_properties >= 0);
4415  if (max_depth == 0) return false;
4416 
4417  Handle<FixedArrayBase> elements(boilerplate->elements());
4418  if (elements->length() > 0 &&
4419  elements->map() != boilerplate->GetHeap()->fixed_cow_array_map()) {
4420  if (boilerplate->HasFastDoubleElements()) {
4421  *total_size += FixedDoubleArray::SizeFor(elements->length());
4422  } else if (boilerplate->HasFastObjectElements()) {
4423  Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
4424  int length = elements->length();
4425  for (int i = 0; i < length; i++) {
4426  if ((*max_properties)-- == 0) return false;
4427  Handle<Object> value(fast_elements->get(i));
4428  if (value->IsJSObject()) {
4429  Handle<JSObject> value_object = Handle<JSObject>::cast(value);
4430  if (!IsFastLiteral(value_object,
4431  max_depth - 1,
4432  max_properties,
4433  total_size)) {
4434  return false;
4435  }
4436  }
4437  }
4438  *total_size += FixedArray::SizeFor(length);
4439  } else {
4440  return false;
4441  }
4442  }
4443 
4444  Handle<FixedArray> properties(boilerplate->properties());
4445  if (properties->length() > 0) {
4446  return false;
4447  } else {
4448  int nof = boilerplate->map()->inobject_properties();
4449  for (int i = 0; i < nof; i++) {
4450  if ((*max_properties)-- == 0) return false;
4451  Handle<Object> value(boilerplate->InObjectPropertyAt(i));
4452  if (value->IsJSObject()) {
4453  Handle<JSObject> value_object = Handle<JSObject>::cast(value);
4454  if (!IsFastLiteral(value_object,
4455  max_depth - 1,
4456  max_properties,
4457  total_size)) {
4458  return false;
4459  }
4460  }
4461  }
4462  }
4463 
4464  *total_size += boilerplate->map()->instance_size();
4465  return true;
4466 }
4467 
4468 
4469 void HGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
4470  ASSERT(!HasStackOverflow());
4471  ASSERT(current_block() != NULL);
4472  ASSERT(current_block()->HasPredecessor());
4473  Handle<JSFunction> closure = function_state()->compilation_info()->closure();
4474  HValue* context = environment()->LookupContext();
4475  HInstruction* literal;
4476 
4477  // Check whether to use fast or slow deep-copying for boilerplate.
4478  int total_size = 0;
4479  int max_properties = HFastLiteral::kMaxLiteralProperties;
4480  Handle<Object> boilerplate(closure->literals()->get(expr->literal_index()));
4481  if (boilerplate->IsJSObject() &&
4482  IsFastLiteral(Handle<JSObject>::cast(boilerplate),
4484  &max_properties,
4485  &total_size)) {
4486  Handle<JSObject> boilerplate_object = Handle<JSObject>::cast(boilerplate);
4487  literal = new(zone()) HFastLiteral(context,
4488  boilerplate_object,
4489  total_size,
4490  expr->literal_index(),
4491  expr->depth());
4492  } else {
4493  literal = new(zone()) HObjectLiteral(context,
4494  expr->constant_properties(),
4495  expr->fast_elements(),
4496  expr->literal_index(),
4497  expr->depth(),
4498  expr->has_function());
4499  }
4500 
4501  // The object is expected in the bailout environment during computation
4502  // of the property values and is the value of the entire expression.
4503  PushAndAdd(literal);
4504 
4505  expr->CalculateEmitStore(zone());
4506 
4507  for (int i = 0; i < expr->properties()->length(); i++) {
4508  ObjectLiteral::Property* property = expr->properties()->at(i);
4509  if (property->IsCompileTimeValue()) continue;
4510 
4511  Literal* key = property->key();
4512  Expression* value = property->value();
4513 
4514  switch (property->kind()) {
4517  // Fall through.
4519  if (key->handle()->IsSymbol()) {
4520  if (property->emit_store()) {
4521  property->RecordTypeFeedback(oracle());
4522  CHECK_ALIVE(VisitForValue(value));
4523  HValue* value = Pop();
4524  HInstruction* store;
4525  CHECK_ALIVE(store = BuildStoreNamed(literal, value, property));
4526  AddInstruction(store);
4527  if (store->HasObservableSideEffects()) AddSimulate(key->id());
4528  } else {
4529  CHECK_ALIVE(VisitForEffect(value));
4530  }
4531  break;
4532  }
4533  // Fall through.
4537  return Bailout("Object literal with complex property");
4538  default: UNREACHABLE();
4539  }
4540  }
4541 
4542  if (expr->has_function()) {
4543  // Return the result of the transformation to fast properties
4544  // instead of the original since this operation changes the map
4545  // of the object. This makes sure that the original object won't
4546  // be used by other optimized code before it is transformed
4547  // (e.g. because of code motion).
4548  HToFastProperties* result = new(zone()) HToFastProperties(Pop());
4549  AddInstruction(result);
4550  return ast_context()->ReturnValue(result);
4551  } else {
4552  return ast_context()->ReturnValue(Pop());
4553  }
4554 }
4555 
4556 
4557 void HGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
4558  ASSERT(!HasStackOverflow());
4559  ASSERT(current_block() != NULL);
4560  ASSERT(current_block()->HasPredecessor());
4561  ZoneList<Expression*>* subexprs = expr->values();
4562  int length = subexprs->length();
4563  HValue* context = environment()->LookupContext();
4564  HInstruction* literal;
4565 
4566  Handle<FixedArray> literals(environment()->closure()->literals());
4567  Handle<Object> raw_boilerplate(literals->get(expr->literal_index()));
4568 
4569  if (raw_boilerplate->IsUndefined()) {
4570  raw_boilerplate = Runtime::CreateArrayLiteralBoilerplate(
4571  isolate(), literals, expr->constant_elements());
4572  if (raw_boilerplate.is_null()) {
4573  return Bailout("array boilerplate creation failed");
4574  }
4575  literals->set(expr->literal_index(), *raw_boilerplate);
4576  if (JSObject::cast(*raw_boilerplate)->elements()->map() ==
4577  isolate()->heap()->fixed_cow_array_map()) {
4578  isolate()->counters()->cow_arrays_created_runtime()->Increment();
4579  }
4580  }
4581 
4582  Handle<JSObject> boilerplate = Handle<JSObject>::cast(raw_boilerplate);
4583  ElementsKind boilerplate_elements_kind =
4584  Handle<JSObject>::cast(boilerplate)->GetElementsKind();
4585 
4586  // Check whether to use fast or slow deep-copying for boilerplate.
4587  int total_size = 0;
4588  int max_properties = HFastLiteral::kMaxLiteralProperties;
4589  if (IsFastLiteral(boilerplate,
4591  &max_properties,
4592  &total_size)) {
4593  literal = new(zone()) HFastLiteral(context,
4594  boilerplate,
4595  total_size,
4596  expr->literal_index(),
4597  expr->depth());
4598  } else {
4599  literal = new(zone()) HArrayLiteral(context,
4600  boilerplate,
4601  length,
4602  expr->literal_index(),
4603  expr->depth());
4604  }
4605 
4606  // The array is expected in the bailout environment during computation
4607  // of the property values and is the value of the entire expression.
4608  PushAndAdd(literal);
4609 
4610  HLoadElements* elements = NULL;
4611 
4612  for (int i = 0; i < length; i++) {
4613  Expression* subexpr = subexprs->at(i);
4614  // If the subexpression is a literal or a simple materialized literal it
4615  // is already set in the cloned array.
4616  if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
4617 
4618  CHECK_ALIVE(VisitForValue(subexpr));
4619  HValue* value = Pop();
4620  if (!Smi::IsValid(i)) return Bailout("Non-smi key in array literal");
4621 
4622  elements = new(zone()) HLoadElements(literal);
4623  AddInstruction(elements);
4624 
4625  HValue* key = AddInstruction(
4626  new(zone()) HConstant(Handle<Object>(Smi::FromInt(i)),
4628 
4629  switch (boilerplate_elements_kind) {
4630  case FAST_SMI_ELEMENTS:
4632  // Smi-only arrays need a smi check.
4633  AddInstruction(new(zone()) HCheckSmi(value));
4634  // Fall through.
4635  case FAST_ELEMENTS:
4636  case FAST_HOLEY_ELEMENTS:
4637  AddInstruction(new(zone()) HStoreKeyedFastElement(
4638  elements,
4639  key,
4640  value,
4641  boilerplate_elements_kind));
4642  break;
4643  case FAST_DOUBLE_ELEMENTS:
4645  AddInstruction(new(zone()) HStoreKeyedFastDoubleElement(elements,
4646  key,
4647  value));
4648  break;
4649  default:
4650  UNREACHABLE();
4651  break;
4652  }
4653 
4654  AddSimulate(expr->GetIdForElement(i));
4655  }
4656  return ast_context()->ReturnValue(Pop());
4657 }
4658 
4659 
4660 // Sets the lookup result and returns true if the load/store can be inlined.
4661 static bool ComputeLoadStoreField(Handle<Map> type,
4662  Handle<String> name,
4663  LookupResult* lookup,
4664  bool is_store) {
4665  type->LookupInDescriptors(NULL, *name, lookup);
4666  if (!lookup->IsFound()) return false;
4667  if (lookup->type() == FIELD) return true;
4668  return is_store && (lookup->type() == MAP_TRANSITION) &&
4669  (type->unused_property_fields() > 0);
4670 }
4671 
4672 
4673 static int ComputeLoadStoreFieldIndex(Handle<Map> type,
4674  Handle<String> name,
4675  LookupResult* lookup) {
4676  ASSERT(lookup->type() == FIELD || lookup->type() == MAP_TRANSITION);
4677  if (lookup->type() == FIELD) {
4678  return lookup->GetLocalFieldIndexFromMap(*type);
4679  } else {
4680  Map* transition = lookup->GetTransitionMapFromMap(*type);
4681  return transition->PropertyIndexFor(*name) - type->inobject_properties();
4682  }
4683 }
4684 
4685 
4686 HInstruction* HGraphBuilder::BuildStoreNamedField(HValue* object,
4687  Handle<String> name,
4688  HValue* value,
4689  Handle<Map> type,
4690  LookupResult* lookup,
4691  bool smi_and_map_check) {
4692  ASSERT(lookup->IsFound());
4693  if (smi_and_map_check) {
4694  AddInstruction(new(zone()) HCheckNonSmi(object));
4695  AddInstruction(HCheckMaps::NewWithTransitions(object, type, zone()));
4696  }
4697 
4698  // If the property does not exist yet, we have to check that it wasn't made
4699  // readonly or turned into a setter by some meanwhile modifications on the
4700  // prototype chain.
4701  if (!lookup->IsProperty()) {
4702  Object* proto = type->prototype();
4703  // First check that the prototype chain isn't affected already.
4704  LookupResult proto_result(isolate());
4705  proto->Lookup(*name, &proto_result);
4706  if (proto_result.IsProperty()) {
4707  // If the inherited property could induce readonly-ness, bail out.
4708  if (proto_result.IsReadOnly() || !proto_result.IsCacheable()) {
4709  Bailout("improper object on prototype chain for store");
4710  return NULL;
4711  }
4712  // We only need to check up to the preexisting property.
4713  proto = proto_result.holder();
4714  } else {
4715  // Otherwise, find the top prototype.
4716  while (proto->GetPrototype()->IsJSObject()) proto = proto->GetPrototype();
4717  ASSERT(proto->GetPrototype()->IsNull());
4718  }
4719  ASSERT(proto->IsJSObject());
4720  AddInstruction(new(zone()) HCheckPrototypeMaps(
4721  Handle<JSObject>(JSObject::cast(type->prototype())),
4722  Handle<JSObject>(JSObject::cast(proto))));
4723  }
4724 
4725  int index = ComputeLoadStoreFieldIndex(type, name, lookup);
4726  bool is_in_object = index < 0;
4727  int offset = index * kPointerSize;
4728  if (index < 0) {
4729  // Negative property indices are in-object properties, indexed
4730  // from the end of the fixed part of the object.
4731  offset += type->instance_size();
4732  } else {
4733  offset += FixedArray::kHeaderSize;
4734  }
4735  HStoreNamedField* instr =
4736  new(zone()) HStoreNamedField(object, name, value, is_in_object, offset);
4737  if (lookup->type() == MAP_TRANSITION) {
4738  Handle<Map> transition(lookup->GetTransitionMapFromMap(*type));
4739  instr->set_transition(transition);
4740  // TODO(fschneider): Record the new map type of the object in the IR to
4741  // enable elimination of redundant checks after the transition store.
4742  instr->SetGVNFlag(kChangesMaps);
4743  }
4744  return instr;
4745 }
4746 
4747 
4748 HInstruction* HGraphBuilder::BuildStoreNamedGeneric(HValue* object,
4749  Handle<String> name,
4750  HValue* value) {
4751  HValue* context = environment()->LookupContext();
4752  return new(zone()) HStoreNamedGeneric(
4753  context,
4754  object,
4755  name,
4756  value,
4757  function_strict_mode_flag());
4758 }
4759 
4760 
4761 HInstruction* HGraphBuilder::BuildStoreNamed(HValue* object,
4762  HValue* value,
4763  ObjectLiteral::Property* prop) {
4764  Literal* key = prop->key()->AsLiteral();
4765  Handle<String> name = Handle<String>::cast(key->handle());
4766  ASSERT(!name.is_null());
4767 
4768  LookupResult lookup(isolate());
4769  Handle<Map> type = prop->GetReceiverType();
4770  bool is_monomorphic = prop->IsMonomorphic() &&
4771  ComputeLoadStoreField(type, name, &lookup, true);
4772 
4773  return is_monomorphic
4774  ? BuildStoreNamedField(object, name, value, type, &lookup,
4775  true) // Needs smi and map check.
4776  : BuildStoreNamedGeneric(object, name, value);
4777 }
4778 
4779 
4780 HInstruction* HGraphBuilder::BuildStoreNamed(HValue* object,
4781  HValue* value,
4782  Expression* expr) {
4783  Property* prop = (expr->AsProperty() != NULL)
4784  ? expr->AsProperty()
4785  : expr->AsAssignment()->target()->AsProperty();
4786  Literal* key = prop->key()->AsLiteral();
4787  Handle<String> name = Handle<String>::cast(key->handle());
4788  ASSERT(!name.is_null());
4789 
4790  LookupResult lookup(isolate());
4791  SmallMapList* types = expr->GetReceiverTypes();
4792  bool is_monomorphic = expr->IsMonomorphic() &&
4793  ComputeLoadStoreField(types->first(), name, &lookup, true);
4794 
4795  return is_monomorphic
4796  ? BuildStoreNamedField(object, name, value, types->first(), &lookup,
4797  true) // Needs smi and map check.
4798  : BuildStoreNamedGeneric(object, name, value);
4799 }
4800 
4801 
4802 void HGraphBuilder::HandlePolymorphicLoadNamedField(Property* expr,
4803  HValue* object,
4804  SmallMapList* types,
4805  Handle<String> name) {
4806  int count = 0;
4807  int previous_field_offset = 0;
4808  bool previous_field_is_in_object = false;
4809  bool is_monomorphic_field = true;
4810  Handle<Map> map;
4811  LookupResult lookup(isolate());
4812  for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) {
4813  map = types->at(i);
4814  if (ComputeLoadStoreField(map, name, &lookup, false)) {
4815  int index = ComputeLoadStoreFieldIndex(map, name, &lookup);
4816  bool is_in_object = index < 0;
4817  int offset = index * kPointerSize;
4818  if (index < 0) {
4819  // Negative property indices are in-object properties, indexed
4820  // from the end of the fixed part of the object.
4821  offset += map->instance_size();
4822  } else {
4823  offset += FixedArray::kHeaderSize;
4824  }
4825  if (count == 0) {
4826  previous_field_offset = offset;
4827  previous_field_is_in_object = is_in_object;
4828  } else if (is_monomorphic_field) {
4829  is_monomorphic_field = (offset == previous_field_offset) &&
4830  (is_in_object == previous_field_is_in_object);
4831  }
4832  ++count;
4833  }
4834  }
4835 
4836  // Use monomorphic load if property lookup results in the same field index
4837  // for all maps. Requires special map check on the set of all handled maps.
4838  HInstruction* instr;
4839  if (count == types->length() && is_monomorphic_field) {
4840  AddInstruction(new(zone()) HCheckMaps(object, types, zone()));
4841  instr = BuildLoadNamedField(object, expr, map, &lookup, false);
4842  } else {
4843  HValue* context = environment()->LookupContext();
4844  instr = new(zone()) HLoadNamedFieldPolymorphic(context,
4845  object,
4846  types,
4847  name,
4848  zone());
4849  }
4850 
4851  instr->set_position(expr->position());
4852  return ast_context()->ReturnInstruction(instr, expr->id());
4853 }
4854 
4855 
4856 void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
4857  HValue* object,
4858  HValue* value,
4859  SmallMapList* types,
4860  Handle<String> name) {
4861  // TODO(ager): We should recognize when the prototype chains for different
4862  // maps are identical. In that case we can avoid repeatedly generating the
4863  // same prototype map checks.
4864  int count = 0;
4865  HBasicBlock* join = NULL;
4866  for (int i = 0; i < types->length() && count < kMaxStorePolymorphism; ++i) {
4867  Handle<Map> map = types->at(i);
4868  LookupResult lookup(isolate());
4869  if (ComputeLoadStoreField(map, name, &lookup, true)) {
4870  if (count == 0) {
4871  AddInstruction(new(zone()) HCheckNonSmi(object)); // Only needed once.
4872  join = graph()->CreateBasicBlock();
4873  }
4874  ++count;
4875  HBasicBlock* if_true = graph()->CreateBasicBlock();
4876  HBasicBlock* if_false = graph()->CreateBasicBlock();
4877  HCompareMap* compare =
4878  new(zone()) HCompareMap(object, map, if_true, if_false);
4879  current_block()->Finish(compare);
4880 
4881  set_current_block(if_true);
4882  HInstruction* instr;
4883  CHECK_ALIVE(instr =
4884  BuildStoreNamedField(object, name, value, map, &lookup, false));
4885  instr->set_position(expr->position());
4886  // Goto will add the HSimulate for the store.
4887  AddInstruction(instr);
4888  if (!ast_context()->IsEffect()) Push(value);
4889  current_block()->Goto(join);
4890 
4891  set_current_block(if_false);
4892  }
4893  }
4894 
4895  // Finish up. Unconditionally deoptimize if we've handled all the maps we
4896  // know about and do not want to handle ones we've never seen. Otherwise
4897  // use a generic IC.
4898  if (count == types->length() && FLAG_deoptimize_uncommon_cases) {
4900  } else {
4901  HInstruction* instr = BuildStoreNamedGeneric(object, name, value);
4902  instr->set_position(expr->position());
4903  AddInstruction(instr);
4904 
4905  if (join != NULL) {
4906  if (!ast_context()->IsEffect()) Push(value);
4907  current_block()->Goto(join);
4908  } else {
4909  // The HSimulate for the store should not see the stored value in
4910  // effect contexts (it is not materialized at expr->id() in the
4911  // unoptimized code).
4912  if (instr->HasObservableSideEffects()) {
4913  if (ast_context()->IsEffect()) {
4914  AddSimulate(expr->id());
4915  } else {
4916  Push(value);
4917  AddSimulate(expr->id());
4918  Drop(1);
4919  }
4920  }
4921  return ast_context()->ReturnValue(value);
4922  }
4923  }
4924 
4925  ASSERT(join != NULL);
4926  join->SetJoinId(expr->id());
4927  set_current_block(join);
4928  if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop());
4929 }
4930 
4931 
4932 void HGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
4933  Property* prop = expr->target()->AsProperty();
4934  ASSERT(prop != NULL);
4935  expr->RecordTypeFeedback(oracle(), zone());
4936  CHECK_ALIVE(VisitForValue(prop->obj()));
4937 
4938  HValue* value = NULL;
4939  HInstruction* instr = NULL;
4940 
4941  if (prop->key()->IsPropertyName()) {
4942  // Named store.
4943  CHECK_ALIVE(VisitForValue(expr->value()));
4944  value = Pop();
4945  HValue* object = Pop();
4946 
4947  Literal* key = prop->key()->AsLiteral();
4948  Handle<String> name = Handle<String>::cast(key->handle());
4949  ASSERT(!name.is_null());
4950 
4951  SmallMapList* types = expr->GetReceiverTypes();
4952  if (expr->IsMonomorphic()) {
4953  CHECK_ALIVE(instr = BuildStoreNamed(object, value, expr));
4954 
4955  } else if (types != NULL && types->length() > 1) {
4956  HandlePolymorphicStoreNamedField(expr, object, value, types, name);
4957  return;
4958 
4959  } else {
4960  instr = BuildStoreNamedGeneric(object, name, value);
4961  }
4962 
4963  } else {
4964  // Keyed store.
4965  CHECK_ALIVE(VisitForValue(prop->key()));
4966  CHECK_ALIVE(VisitForValue(expr->value()));
4967  value = Pop();
4968  HValue* key = Pop();
4969  HValue* object = Pop();
4970  bool has_side_effects = false;
4971  HandleKeyedElementAccess(object, key, value, expr, expr->AssignmentId(),
4972  expr->position(),
4973  true, // is_store
4974  &has_side_effects);
4975  Push(value);
4976  ASSERT(has_side_effects); // Stores always have side effects.
4977  AddSimulate(expr->AssignmentId());
4978  return ast_context()->ReturnValue(Pop());
4979  }
4980  Push(value);
4981  instr->set_position(expr->position());
4982  AddInstruction(instr);
4983  if (instr->HasObservableSideEffects()) AddSimulate(expr->AssignmentId());
4984  return ast_context()->ReturnValue(Pop());
4985 }
4986 
4987 
4988 // Because not every expression has a position and there is not common
4989 // superclass of Assignment and CountOperation, we cannot just pass the
4990 // owning expression instead of position and ast_id separately.
4991 void HGraphBuilder::HandleGlobalVariableAssignment(Variable* var,
4992  HValue* value,
4993  int position,
4994  int ast_id) {
4995  LookupResult lookup(isolate());
4996  GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, true);
4997  if (type == kUseCell) {
4998  Handle<GlobalObject> global(info()->global_object());
4999  Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(&lookup));
5000  HInstruction* instr =
5001  new(zone()) HStoreGlobalCell(value, cell, lookup.GetPropertyDetails());
5002  instr->set_position(position);
5003  AddInstruction(instr);
5004  if (instr->HasObservableSideEffects()) AddSimulate(ast_id);
5005  } else {
5006  HValue* context = environment()->LookupContext();
5007  HGlobalObject* global_object = new(zone()) HGlobalObject(context);
5008  AddInstruction(global_object);
5009  HStoreGlobalGeneric* instr =
5010  new(zone()) HStoreGlobalGeneric(context,
5011  global_object,
5012  var->name(),
5013  value,
5014  function_strict_mode_flag());
5015  instr->set_position(position);
5016  AddInstruction(instr);
5017  ASSERT(instr->HasObservableSideEffects());
5018  if (instr->HasObservableSideEffects()) AddSimulate(ast_id);
5019  }
5020 }
5021 
5022 
5023 void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
5024  Expression* target = expr->target();
5025  VariableProxy* proxy = target->AsVariableProxy();
5026  Property* prop = target->AsProperty();
5027  ASSERT(proxy == NULL || prop == NULL);
5028 
5029  // We have a second position recorded in the FullCodeGenerator to have
5030  // type feedback for the binary operation.
5031  BinaryOperation* operation = expr->binary_operation();
5032 
5033  if (proxy != NULL) {
5034  Variable* var = proxy->var();
5035  if (var->mode() == LET) {
5036  return Bailout("unsupported let compound assignment");
5037  }
5038 
5039  CHECK_ALIVE(VisitForValue(operation));
5040 
5041  switch (var->location()) {
5042  case Variable::UNALLOCATED:
5043  HandleGlobalVariableAssignment(var,
5044  Top(),
5045  expr->position(),
5046  expr->AssignmentId());
5047  break;
5048 
5049  case Variable::PARAMETER:
5050  case Variable::LOCAL:
5051  if (var->mode() == CONST) {
5052  return Bailout("unsupported const compound assignment");
5053  }
5054  Bind(var, Top());
5055  break;
5056 
5057  case Variable::CONTEXT: {
5058  // Bail out if we try to mutate a parameter value in a function
5059  // using the arguments object. We do not (yet) correctly handle the
5060  // arguments property of the function.
5061  if (info()->scope()->arguments() != NULL) {
5062  // Parameters will be allocated to context slots. We have no
5063  // direct way to detect that the variable is a parameter so we do
5064  // a linear search of the parameter variables.
5065  int count = info()->scope()->num_parameters();
5066  for (int i = 0; i < count; ++i) {
5067  if (var == info()->scope()->parameter(i)) {
5068  Bailout(
5069  "assignment to parameter, function uses arguments object");
5070  }
5071  }
5072  }
5073 
5075 
5076  switch (var->mode()) {
5077  case LET:
5079  break;
5080  case CONST:
5081  return ast_context()->ReturnValue(Pop());
5082  case CONST_HARMONY:
5083  // This case is checked statically so no need to
5084  // perform checks here
5085  UNREACHABLE();
5086  default:
5088  }
5089 
5090  HValue* context = BuildContextChainWalk(var);
5091  HStoreContextSlot* instr =
5092  new(zone()) HStoreContextSlot(context, var->index(), mode, Top());
5093  AddInstruction(instr);
5094  if (instr->HasObservableSideEffects()) {
5095  AddSimulate(expr->AssignmentId());
5096  }
5097  break;
5098  }
5099 
5100  case Variable::LOOKUP:
5101  return Bailout("compound assignment to lookup slot");
5102  }
5103  return ast_context()->ReturnValue(Pop());
5104 
5105  } else if (prop != NULL) {
5106  prop->RecordTypeFeedback(oracle(), zone());
5107 
5108  if (prop->key()->IsPropertyName()) {
5109  // Named property.
5110  CHECK_ALIVE(VisitForValue(prop->obj()));
5111  HValue* obj = Top();
5112 
5113  HInstruction* load = NULL;
5114  if (prop->IsMonomorphic()) {
5115  Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
5116  Handle<Map> map = prop->GetReceiverTypes()->first();
5117  load = BuildLoadNamed(obj, prop, map, name);
5118  } else {
5119  load = BuildLoadNamedGeneric(obj, prop);
5120  }
5121  PushAndAdd(load);
5122  if (load->HasObservableSideEffects()) AddSimulate(expr->CompoundLoadId());
5123 
5124  CHECK_ALIVE(VisitForValue(expr->value()));
5125  HValue* right = Pop();
5126  HValue* left = Pop();
5127 
5128  HInstruction* instr = BuildBinaryOperation(operation, left, right);
5129  PushAndAdd(instr);
5130  if (instr->HasObservableSideEffects()) AddSimulate(operation->id());
5131 
5132  HInstruction* store;
5133  CHECK_ALIVE(store = BuildStoreNamed(obj, instr, prop));
5134  AddInstruction(store);
5135  // Drop the simulated receiver and value. Return the value.
5136  Drop(2);
5137  Push(instr);
5138  if (store->HasObservableSideEffects()) AddSimulate(expr->AssignmentId());
5139  return ast_context()->ReturnValue(Pop());
5140 
5141  } else {
5142  // Keyed property.
5143  CHECK_ALIVE(VisitForValue(prop->obj()));
5144  CHECK_ALIVE(VisitForValue(prop->key()));
5145  HValue* obj = environment()->ExpressionStackAt(1);
5146  HValue* key = environment()->ExpressionStackAt(0);
5147 
5148  bool has_side_effects = false;
5149  HValue* load = HandleKeyedElementAccess(
5150  obj, key, NULL, prop, expr->CompoundLoadId(), RelocInfo::kNoPosition,
5151  false, // is_store
5152  &has_side_effects);
5153  Push(load);
5154  if (has_side_effects) AddSimulate(expr->CompoundLoadId());
5155 
5156 
5157  CHECK_ALIVE(VisitForValue(expr->value()));
5158  HValue* right = Pop();
5159  HValue* left = Pop();
5160 
5161  HInstruction* instr = BuildBinaryOperation(operation, left, right);
5162  PushAndAdd(instr);
5163  if (instr->HasObservableSideEffects()) AddSimulate(operation->id());
5164 
5165  expr->RecordTypeFeedback(oracle(), zone());
5166  HandleKeyedElementAccess(obj, key, instr, expr, expr->AssignmentId(),
5167  RelocInfo::kNoPosition,
5168  true, // is_store
5169  &has_side_effects);
5170 
5171  // Drop the simulated receiver, key, and value. Return the value.
5172  Drop(3);
5173  Push(instr);
5174  ASSERT(has_side_effects); // Stores always have side effects.
5175  AddSimulate(expr->AssignmentId());
5176  return ast_context()->ReturnValue(Pop());
5177  }
5178 
5179  } else {
5180  return Bailout("invalid lhs in compound assignment");
5181  }
5182 }
5183 
5184 
5185 void HGraphBuilder::VisitAssignment(Assignment* expr) {
5186  ASSERT(!HasStackOverflow());
5187  ASSERT(current_block() != NULL);
5188  ASSERT(current_block()->HasPredecessor());
5189  VariableProxy* proxy = expr->target()->AsVariableProxy();
5190  Property* prop = expr->target()->AsProperty();
5191  ASSERT(proxy == NULL || prop == NULL);
5192 
5193  if (expr->is_compound()) {
5194  HandleCompoundAssignment(expr);
5195  return;
5196  }
5197 
5198  if (prop != NULL) {
5199  HandlePropertyAssignment(expr);
5200  } else if (proxy != NULL) {
5201  Variable* var = proxy->var();
5202 
5203  if (var->mode() == CONST) {
5204  if (expr->op() != Token::INIT_CONST) {
5205  CHECK_ALIVE(VisitForValue(expr->value()));
5206  return ast_context()->ReturnValue(Pop());
5207  }
5208 
5209  if (var->IsStackAllocated()) {
5210  // We insert a use of the old value to detect unsupported uses of const
5211  // variables (e.g. initialization inside a loop).
5212  HValue* old_value = environment()->Lookup(var);
5213  AddInstruction(new(zone()) HUseConst(old_value));
5214  }
5215  } else if (var->mode() == CONST_HARMONY) {
5216  if (expr->op() != Token::INIT_CONST_HARMONY) {
5217  return Bailout("non-initializer assignment to const");
5218  }
5219  }
5220 
5221  if (proxy->IsArguments()) return Bailout("assignment to arguments");
5222 
5223  // Handle the assignment.
5224  switch (var->location()) {
5225  case Variable::UNALLOCATED:
5226  CHECK_ALIVE(VisitForValue(expr->value()));
5227  HandleGlobalVariableAssignment(var,
5228  Top(),
5229  expr->position(),
5230  expr->AssignmentId());
5231  return ast_context()->ReturnValue(Pop());
5232 
5233  case Variable::PARAMETER:
5234  case Variable::LOCAL: {
5235  // Perform an initialization check for let declared variables
5236  // or parameters.
5237  if (var->mode() == LET && expr->op() == Token::ASSIGN) {
5238  HValue* env_value = environment()->Lookup(var);
5239  if (env_value == graph()->GetConstantHole()) {
5240  return Bailout("assignment to let variable before initialization");
5241  }
5242  }
5243  // We do not allow the arguments object to occur in a context where it
5244  // may escape, but assignments to stack-allocated locals are
5245  // permitted.
5246  CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED));
5247  HValue* value = Pop();
5248  Bind(var, value);
5249  return ast_context()->ReturnValue(value);
5250  }
5251 
5252  case Variable::CONTEXT: {
5253  // Bail out if we try to mutate a parameter value in a function using
5254  // the arguments object. We do not (yet) correctly handle the
5255  // arguments property of the function.
5256  if (info()->scope()->arguments() != NULL) {
5257  // Parameters will rewrite to context slots. We have no direct way
5258  // to detect that the variable is a parameter.
5259  int count = info()->scope()->num_parameters();
5260  for (int i = 0; i < count; ++i) {
5261  if (var == info()->scope()->parameter(i)) {
5262  return Bailout("assignment to parameter in arguments object");
5263  }
5264  }
5265  }
5266 
5267  CHECK_ALIVE(VisitForValue(expr->value()));
5269  if (expr->op() == Token::ASSIGN) {
5270  switch (var->mode()) {
5271  case LET:
5273  break;
5274  case CONST:
5275  return ast_context()->ReturnValue(Pop());
5276  case CONST_HARMONY:
5277  // This case is checked statically so no need to
5278  // perform checks here
5279  UNREACHABLE();
5280  default:
5282  }
5283  } else if (expr->op() == Token::INIT_VAR ||
5284  expr->op() == Token::INIT_LET ||
5285  expr->op() == Token::INIT_CONST_HARMONY) {
5287  } else {
5288  ASSERT(expr->op() == Token::INIT_CONST);
5289 
5291  }
5292 
5293  HValue* context = BuildContextChainWalk(var);
5294  HStoreContextSlot* instr = new(zone()) HStoreContextSlot(
5295  context, var->index(), mode, Top());
5296  AddInstruction(instr);
5297  if (instr->HasObservableSideEffects()) {
5298  AddSimulate(expr->AssignmentId());
5299  }
5300  return ast_context()->ReturnValue(Pop());
5301  }
5302 
5303  case Variable::LOOKUP:
5304  return Bailout("assignment to LOOKUP variable");
5305  }
5306  } else {
5307  return Bailout("invalid left-hand side in assignment");
5308  }
5309 }
5310 
5311 
5312 void HGraphBuilder::VisitThrow(Throw* expr) {
5313  ASSERT(!HasStackOverflow());
5314  ASSERT(current_block() != NULL);
5315  ASSERT(current_block()->HasPredecessor());
5316  // We don't optimize functions with invalid left-hand sides in
5317  // assignments, count operations, or for-in. Consequently throw can
5318  // currently only occur in an effect context.
5319  ASSERT(ast_context()->IsEffect());
5320  CHECK_ALIVE(VisitForValue(expr->exception()));
5321 
5322  HValue* context = environment()->LookupContext();
5323  HValue* value = environment()->Pop();
5324  HThrow* instr = new(zone()) HThrow(context, value);
5325  instr->set_position(expr->position());
5326  AddInstruction(instr);
5327  AddSimulate(expr->id());
5328  current_block()->FinishExit(new(zone()) HAbnormalExit);
5330 }
5331 
5332 
5333 HLoadNamedField* HGraphBuilder::BuildLoadNamedField(HValue* object,
5334  Property* expr,
5335  Handle<Map> type,
5336  LookupResult* lookup,
5337  bool smi_and_map_check) {
5338  if (smi_and_map_check) {
5339  AddInstruction(new(zone()) HCheckNonSmi(object));
5340  AddInstruction(HCheckMaps::NewWithTransitions(object, type, zone()));
5341  }
5342 
5343  int index = lookup->GetLocalFieldIndexFromMap(*type);
5344  if (index < 0) {
5345  // Negative property indices are in-object properties, indexed
5346  // from the end of the fixed part of the object.
5347  int offset = (index * kPointerSize) + type->instance_size();
5348  return new(zone()) HLoadNamedField(object, true, offset);
5349  } else {
5350  // Non-negative property indices are in the properties array.
5351  int offset = (index * kPointerSize) + FixedArray::kHeaderSize;
5352  return new(zone()) HLoadNamedField(object, false, offset);
5353  }
5354 }
5355 
5356 
5357 HInstruction* HGraphBuilder::BuildLoadNamedGeneric(HValue* obj,
5358  Property* expr) {
5359  if (expr->IsUninitialized() && !FLAG_always_opt) {
5360  AddInstruction(new(zone()) HSoftDeoptimize);
5362  }
5363  ASSERT(expr->key()->IsPropertyName());
5364  Handle<Object> name = expr->key()->AsLiteral()->handle();
5365  HValue* context = environment()->LookupContext();
5366  return new(zone()) HLoadNamedGeneric(context, obj, name);
5367 }
5368 
5369 
5370 HInstruction* HGraphBuilder::BuildLoadNamed(HValue* obj,
5371  Property* expr,
5372  Handle<Map> map,
5373  Handle<String> name) {
5374  LookupResult lookup(isolate());
5375  map->LookupInDescriptors(NULL, *name, &lookup);
5376  if (lookup.IsFound() && lookup.type() == FIELD) {
5377  return BuildLoadNamedField(obj,
5378  expr,
5379  map,
5380  &lookup,
5381  true);
5382  } else if (lookup.IsFound() && lookup.type() == CONSTANT_FUNCTION) {
5383  AddInstruction(new(zone()) HCheckNonSmi(obj));
5385  Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*map));
5386  return new(zone()) HConstant(function, Representation::Tagged());
5387  } else {
5388  return BuildLoadNamedGeneric(obj, expr);
5389  }
5390 }
5391 
5392 
5393 HInstruction* HGraphBuilder::BuildLoadKeyedGeneric(HValue* object,
5394  HValue* key) {
5395  HValue* context = environment()->LookupContext();
5396  return new(zone()) HLoadKeyedGeneric(context, object, key);
5397 }
5398 
5399 
5400 HInstruction* HGraphBuilder::BuildExternalArrayElementAccess(
5401  HValue* external_elements,
5402  HValue* checked_key,
5403  HValue* val,
5404  ElementsKind elements_kind,
5405  bool is_store) {
5406  if (is_store) {
5407  ASSERT(val != NULL);
5408  switch (elements_kind) {
5409  case EXTERNAL_PIXEL_ELEMENTS: {
5410  val = AddInstruction(new(zone()) HClampToUint8(val));
5411  break;
5412  }
5417  case EXTERNAL_INT_ELEMENTS:
5419  if (!val->representation().IsInteger32()) {
5420  val = AddInstruction(new(zone()) HChange(
5421  val,
5423  true, // Truncate to int32.
5424  false)); // Don't deoptimize undefined (irrelevant here).
5425  }
5426  break;
5427  }
5430  break;
5431  case FAST_SMI_ELEMENTS:
5432  case FAST_ELEMENTS:
5433  case FAST_DOUBLE_ELEMENTS:
5435  case FAST_HOLEY_ELEMENTS:
5437  case DICTIONARY_ELEMENTS:
5439  UNREACHABLE();
5440  break;
5441  }
5442  return new(zone()) HStoreKeyedSpecializedArrayElement(
5443  external_elements, checked_key, val, elements_kind);
5444  } else {
5445  ASSERT(val == NULL);
5446  return new(zone()) HLoadKeyedSpecializedArrayElement(
5447  external_elements, checked_key, elements_kind);
5448  }
5449 }
5450 
5451 
5452 HInstruction* HGraphBuilder::BuildFastElementAccess(HValue* elements,
5453  HValue* checked_key,
5454  HValue* val,
5455  ElementsKind elements_kind,
5456  bool is_store) {
5457  if (is_store) {
5458  ASSERT(val != NULL);
5459  switch (elements_kind) {
5460  case FAST_DOUBLE_ELEMENTS:
5462  return new(zone()) HStoreKeyedFastDoubleElement(
5463  elements, checked_key, val);
5464  case FAST_SMI_ELEMENTS:
5466  // Smi-only arrays need a smi check.
5467  AddInstruction(new(zone()) HCheckSmi(val));
5468  // Fall through.
5469  case FAST_ELEMENTS:
5470  case FAST_HOLEY_ELEMENTS:
5471  return new(zone()) HStoreKeyedFastElement(
5472  elements, checked_key, val, elements_kind);
5473  default:
5474  UNREACHABLE();
5475  return NULL;
5476  }
5477  }
5478  // It's an element load (!is_store).
5479  HoleCheckMode mode = IsFastPackedElementsKind(elements_kind) ?
5480  OMIT_HOLE_CHECK :
5482  if (IsFastDoubleElementsKind(elements_kind)) {
5483  return new(zone()) HLoadKeyedFastDoubleElement(elements, checked_key, mode);
5484  } else { // Smi or Object elements.
5485  return new(zone()) HLoadKeyedFastElement(elements, checked_key,
5486  elements_kind);
5487  }
5488 }
5489 
5490 
5491 HInstruction* HGraphBuilder::BuildMonomorphicElementAccess(HValue* object,
5492  HValue* key,
5493  HValue* val,
5494  HValue* dependency,
5495  Handle<Map> map,
5496  bool is_store) {
5497  HInstruction* mapcheck =
5498  AddInstruction(new(zone()) HCheckMaps(object, map, zone(), dependency));
5499  // No GVNFlag is necessary for ElementsKind if there is an explicit dependency
5500  // on a HElementsTransition instruction. The flag can also be removed if the
5501  // map to check has FAST_HOLEY_ELEMENTS, since there can be no further
5502  // ElementsKind transitions. Finally, the dependency can be removed for stores
5503  // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the
5504  // generated store code.
5505  if (dependency ||
5506  (map->elements_kind() == FAST_HOLEY_ELEMENTS) ||
5507  (map->elements_kind() == FAST_ELEMENTS && is_store)) {
5508  mapcheck->ClearGVNFlag(kDependsOnElementsKind);
5509  }
5510  bool fast_smi_only_elements = map->has_fast_smi_elements();
5511  bool fast_elements = map->has_fast_object_elements();
5512  HInstruction* elements = AddInstruction(new(zone()) HLoadElements(object));
5513  if (is_store && (fast_elements || fast_smi_only_elements)) {
5514  HCheckMaps* check_cow_map = new(zone()) HCheckMaps(
5515  elements, isolate()->factory()->fixed_array_map(), zone());
5516  check_cow_map->ClearGVNFlag(kDependsOnElementsKind);
5517  AddInstruction(check_cow_map);
5518  }
5519  HInstruction* length = NULL;
5520  HInstruction* checked_key = NULL;
5521  if (map->has_external_array_elements()) {
5522  length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
5523  checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
5524  HLoadExternalArrayPointer* external_elements =
5525  new(zone()) HLoadExternalArrayPointer(elements);
5526  AddInstruction(external_elements);
5527  return BuildExternalArrayElementAccess(external_elements, checked_key,
5528  val, map->elements_kind(), is_store);
5529  }
5530  ASSERT(fast_smi_only_elements ||
5531  fast_elements ||
5532  map->has_fast_double_elements());
5533  if (map->instance_type() == JS_ARRAY_TYPE) {
5534  length = AddInstruction(new(zone()) HJSArrayLength(object, mapcheck,
5535  HType::Smi()));
5536  } else {
5537  length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
5538  }
5539  checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
5540  return BuildFastElementAccess(elements, checked_key, val,
5541  map->elements_kind(), is_store);
5542 }
5543 
5544 
5545 HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
5546  HValue* key,
5547  HValue* val,
5548  Expression* prop,
5549  int ast_id,
5550  int position,
5551  bool is_store,
5552  bool* has_side_effects) {
5553  *has_side_effects = false;
5554  AddInstruction(new(zone()) HCheckNonSmi(object));
5555  SmallMapList* maps = prop->GetReceiverTypes();
5556  bool todo_external_array = false;
5557 
5558  static const int kNumElementTypes = kElementsKindCount;
5559  bool type_todo[kNumElementTypes];
5560  for (int i = 0; i < kNumElementTypes; ++i) {
5561  type_todo[i] = false;
5562  }
5563 
5564  // Elements_kind transition support.
5565  MapHandleList transition_target(maps->length());
5566  // Collect possible transition targets.
5567  MapHandleList possible_transitioned_maps(maps->length());
5568  for (int i = 0; i < maps->length(); ++i) {
5569  Handle<Map> map = maps->at(i);
5570  ElementsKind elements_kind = map->elements_kind();
5571  if (IsFastElementsKind(elements_kind) &&
5572  elements_kind != GetInitialFastElementsKind()) {
5573  possible_transitioned_maps.Add(map);
5574  }
5575  }
5576  // Get transition target for each map (NULL == no transition).
5577  for (int i = 0; i < maps->length(); ++i) {
5578  Handle<Map> map = maps->at(i);
5579  Handle<Map> transitioned_map =
5580  map->FindTransitionedMap(&possible_transitioned_maps);
5581  transition_target.Add(transitioned_map);
5582  }
5583 
5584  int num_untransitionable_maps = 0;
5585  Handle<Map> untransitionable_map;
5586  HTransitionElementsKind* transition = NULL;
5587  for (int i = 0; i < maps->length(); ++i) {
5588  Handle<Map> map = maps->at(i);
5589  ASSERT(map->IsMap());
5590  if (!transition_target.at(i).is_null()) {
5592  map->elements_kind(),
5593  transition_target.at(i)->elements_kind()));
5594  transition = new(zone()) HTransitionElementsKind(
5595  object, map, transition_target.at(i));
5596  AddInstruction(transition);
5597  } else {
5598  type_todo[map->elements_kind()] = true;
5599  if (IsExternalArrayElementsKind(map->elements_kind())) {
5600  todo_external_array = true;
5601  }
5602  num_untransitionable_maps++;
5603  untransitionable_map = map;
5604  }
5605  }
5606 
5607  // If only one map is left after transitioning, handle this case
5608  // monomorphically.
5609  if (num_untransitionable_maps == 1) {
5610  HInstruction* instr = NULL;
5611  if (untransitionable_map->has_slow_elements_kind()) {
5612  instr = AddInstruction(is_store ? BuildStoreKeyedGeneric(object, key, val)
5613  : BuildLoadKeyedGeneric(object, key));
5614  } else {
5615  instr = AddInstruction(BuildMonomorphicElementAccess(
5616  object, key, val, transition, untransitionable_map, is_store));
5617  }
5618  *has_side_effects |= instr->HasObservableSideEffects();
5619  instr->set_position(position);
5620  return is_store ? NULL : instr;
5621  }
5622 
5624  HBasicBlock* join = graph()->CreateBasicBlock();
5625 
5626  HInstruction* elements_kind_instr =
5627  AddInstruction(new(zone()) HElementsKind(object));
5628  HCompareConstantEqAndBranch* elements_kind_branch = NULL;
5629  HInstruction* elements = AddInstruction(new(zone()) HLoadElements(object));
5630  HLoadExternalArrayPointer* external_elements = NULL;
5631  HInstruction* checked_key = NULL;
5632 
5633  // Generated code assumes that FAST_* and DICTIONARY_ELEMENTS ElementsKinds
5634  // are handled before external arrays.
5639 
5640  for (ElementsKind elements_kind = FIRST_ELEMENTS_KIND;
5641  elements_kind <= LAST_ELEMENTS_KIND;
5642  elements_kind = ElementsKind(elements_kind + 1)) {
5643  // After having handled FAST_* and DICTIONARY_ELEMENTS, we need to add some
5644  // code that's executed for all external array cases.
5647  if (elements_kind == FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND
5648  && todo_external_array) {
5649  HInstruction* length =
5650  AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
5651  checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
5652  external_elements = new(zone()) HLoadExternalArrayPointer(elements);
5653  AddInstruction(external_elements);
5654  }
5655  if (type_todo[elements_kind]) {
5656  HBasicBlock* if_true = graph()->CreateBasicBlock();
5657  HBasicBlock* if_false = graph()->CreateBasicBlock();
5658  elements_kind_branch = new(zone()) HCompareConstantEqAndBranch(
5659  elements_kind_instr, elements_kind, Token::EQ_STRICT);
5660  elements_kind_branch->SetSuccessorAt(0, if_true);
5661  elements_kind_branch->SetSuccessorAt(1, if_false);
5662  current_block()->Finish(elements_kind_branch);
5663 
5664  set_current_block(if_true);
5665  HInstruction* access;
5666  if (IsFastElementsKind(elements_kind)) {
5667  if (is_store && !IsFastDoubleElementsKind(elements_kind)) {
5668  AddInstruction(new(zone()) HCheckMaps(
5669  elements, isolate()->factory()->fixed_array_map(),
5670  zone(), elements_kind_branch));
5671  }
5672  // TODO(jkummerow): The need for these two blocks could be avoided
5673  // in one of two ways:
5674  // (1) Introduce ElementsKinds for JSArrays that are distinct from
5675  // those for fast objects.
5676  // (2) Put the common instructions into a third "join" block. This
5677  // requires additional AST IDs that we can deopt to from inside
5678  // that join block. They must be added to the Property class (when
5679  // it's a keyed property) and registered in the full codegen.
5680  HBasicBlock* if_jsarray = graph()->CreateBasicBlock();
5681  HBasicBlock* if_fastobject = graph()->CreateBasicBlock();
5682  HHasInstanceTypeAndBranch* typecheck =
5683  new(zone()) HHasInstanceTypeAndBranch(object, JS_ARRAY_TYPE);
5684  typecheck->SetSuccessorAt(0, if_jsarray);
5685  typecheck->SetSuccessorAt(1, if_fastobject);
5686  current_block()->Finish(typecheck);
5687 
5688  set_current_block(if_jsarray);
5689  HInstruction* length;
5690  length = AddInstruction(new(zone()) HJSArrayLength(object, typecheck,
5691  HType::Smi()));
5692  checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
5693  access = AddInstruction(BuildFastElementAccess(
5694  elements, checked_key, val, elements_kind, is_store));
5695  if (!is_store) {
5696  Push(access);
5697  }
5698 
5699  *has_side_effects |= access->HasObservableSideEffects();
5700  if (position != -1) {
5701  access->set_position(position);
5702  }
5703  if_jsarray->Goto(join);
5704 
5705  set_current_block(if_fastobject);
5706  length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
5707  checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
5708  access = AddInstruction(BuildFastElementAccess(
5709  elements, checked_key, val, elements_kind, is_store));
5710  } else if (elements_kind == DICTIONARY_ELEMENTS) {
5711  if (is_store) {
5712  access = AddInstruction(BuildStoreKeyedGeneric(object, key, val));
5713  } else {
5714  access = AddInstruction(BuildLoadKeyedGeneric(object, key));
5715  }
5716  } else { // External array elements.
5717  access = AddInstruction(BuildExternalArrayElementAccess(
5718  external_elements, checked_key, val, elements_kind, is_store));
5719  }
5720  *has_side_effects |= access->HasObservableSideEffects();
5721  access->set_position(position);
5722  if (!is_store) {
5723  Push(access);
5724  }
5725  current_block()->Goto(join);
5726  set_current_block(if_false);
5727  }
5728  }
5729 
5730  // Deopt if none of the cases matched.
5732  join->SetJoinId(ast_id);
5733  set_current_block(join);
5734  return is_store ? NULL : Pop();
5735 }
5736 
5737 
5738 HValue* HGraphBuilder::HandleKeyedElementAccess(HValue* obj,
5739  HValue* key,
5740  HValue* val,
5741  Expression* expr,
5742  int ast_id,
5743  int position,
5744  bool is_store,
5745  bool* has_side_effects) {
5746  ASSERT(!expr->IsPropertyName());
5747  HInstruction* instr = NULL;
5748  if (expr->IsMonomorphic()) {
5749  Handle<Map> map = expr->GetMonomorphicReceiverType();
5750  if (map->has_slow_elements_kind()) {
5751  instr = is_store ? BuildStoreKeyedGeneric(obj, key, val)
5752  : BuildLoadKeyedGeneric(obj, key);
5753  } else {
5754  AddInstruction(new(zone()) HCheckNonSmi(obj));
5755  instr = BuildMonomorphicElementAccess(obj, key, val, NULL, map, is_store);
5756  }
5757  } else if (expr->GetReceiverTypes() != NULL &&
5758  !expr->GetReceiverTypes()->is_empty()) {
5759  return HandlePolymorphicElementAccess(
5760  obj, key, val, expr, ast_id, position, is_store, has_side_effects);
5761  } else {
5762  if (is_store) {
5763  instr = BuildStoreKeyedGeneric(obj, key, val);
5764  } else {
5765  instr = BuildLoadKeyedGeneric(obj, key);
5766  }
5767  }
5768  instr->set_position(position);
5769  AddInstruction(instr);
5770  *has_side_effects = instr->HasObservableSideEffects();
5771  return instr;
5772 }
5773 
5774 
5775 HInstruction* HGraphBuilder::BuildStoreKeyedGeneric(HValue* object,
5776  HValue* key,
5777  HValue* value) {
5778  HValue* context = environment()->LookupContext();
5779  return new(zone()) HStoreKeyedGeneric(
5780  context,
5781  object,
5782  key,
5783  value,
5784  function_strict_mode_flag());
5785 }
5786 
5787 
5788 void HGraphBuilder::EnsureArgumentsArePushedForAccess() {
5789  // Outermost function already has arguments on the stack.
5790  if (function_state()->outer() == NULL) return;
5791 
5792  if (function_state()->arguments_pushed()) return;
5793 
5794  // Push arguments when entering inlined function.
5795  HEnterInlined* entry = function_state()->entry();
5796 
5797  ZoneList<HValue*>* arguments_values = entry->arguments_values();
5798 
5799  HInstruction* insert_after = entry;
5800  for (int i = 0; i < arguments_values->length(); i++) {
5801  HValue* argument = arguments_values->at(i);
5802  HInstruction* push_argument = new(zone()) HPushArgument(argument);
5803  push_argument->InsertAfter(insert_after);
5804  insert_after = push_argument;
5805  }
5806 
5807  HArgumentsElements* arguments_elements =
5808  new(zone()) HArgumentsElements(true);
5809  arguments_elements->ClearFlag(HValue::kUseGVN);
5810  arguments_elements->InsertAfter(insert_after);
5811  function_state()->set_arguments_elements(arguments_elements);
5812 }
5813 
5814 
5815 bool HGraphBuilder::TryArgumentsAccess(Property* expr) {
5816  VariableProxy* proxy = expr->obj()->AsVariableProxy();
5817  if (proxy == NULL) return false;
5818  if (!proxy->var()->IsStackAllocated()) return false;
5819  if (!environment()->Lookup(proxy->var())->CheckFlag(HValue::kIsArguments)) {
5820  return false;
5821  }
5822 
5823  HInstruction* result = NULL;
5824  if (expr->key()->IsPropertyName()) {
5825  Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
5826  if (!name->IsEqualTo(CStrVector("length"))) return false;
5827 
5828  if (function_state()->outer() == NULL) {
5829  HInstruction* elements = AddInstruction(
5830  new(zone()) HArgumentsElements(false));
5831  result = new(zone()) HArgumentsLength(elements);
5832  } else {
5833  // Number of arguments without receiver.
5834  int argument_count = environment()->
5835  arguments_environment()->parameter_count() - 1;
5836  result = new(zone()) HConstant(
5837  Handle<Object>(Smi::FromInt(argument_count)),
5839  }
5840  } else {
5841  Push(graph()->GetArgumentsObject());
5842  VisitForValue(expr->key());
5843  if (HasStackOverflow() || current_block() == NULL) return true;
5844  HValue* key = Pop();
5845  Drop(1); // Arguments object.
5846  if (function_state()->outer() == NULL) {
5847  HInstruction* elements = AddInstruction(
5848  new(zone()) HArgumentsElements(false));
5849  HInstruction* length = AddInstruction(
5850  new(zone()) HArgumentsLength(elements));
5851  HInstruction* checked_key =
5852  AddInstruction(new(zone()) HBoundsCheck(key, length));
5853  result = new(zone()) HAccessArgumentsAt(elements, length, checked_key);
5854  } else {
5855  EnsureArgumentsArePushedForAccess();
5856 
5857  // Number of arguments without receiver.
5858  HInstruction* elements = function_state()->arguments_elements();
5859  int argument_count = environment()->
5860  arguments_environment()->parameter_count() - 1;
5861  HInstruction* length = AddInstruction(new(zone()) HConstant(
5862  Handle<Object>(Smi::FromInt(argument_count)),
5864  HInstruction* checked_key =
5865  AddInstruction(new(zone()) HBoundsCheck(key, length));
5866  result = new(zone()) HAccessArgumentsAt(elements, length, checked_key);
5867  }
5868  }
5869  ast_context()->ReturnInstruction(result, expr->id());
5870  return true;
5871 }
5872 
5873 
5874 void HGraphBuilder::VisitProperty(Property* expr) {
5875  ASSERT(!HasStackOverflow());
5876  ASSERT(current_block() != NULL);
5877  ASSERT(current_block()->HasPredecessor());
5878  expr->RecordTypeFeedback(oracle(), zone());
5879 
5880  if (TryArgumentsAccess(expr)) return;
5881 
5882  CHECK_ALIVE(VisitForValue(expr->obj()));
5883 
5884  HInstruction* instr = NULL;
5885  if (expr->AsProperty()->IsArrayLength()) {
5886  HValue* array = Pop();
5887  AddInstruction(new(zone()) HCheckNonSmi(array));
5888  HInstruction* mapcheck =
5890  instr = new(zone()) HJSArrayLength(array, mapcheck);
5891  } else if (expr->IsStringLength()) {
5892  HValue* string = Pop();
5893  AddInstruction(new(zone()) HCheckNonSmi(string));
5895  instr = new(zone()) HStringLength(string);
5896  } else if (expr->IsStringAccess()) {
5897  CHECK_ALIVE(VisitForValue(expr->key()));
5898  HValue* index = Pop();
5899  HValue* string = Pop();
5900  HValue* context = environment()->LookupContext();
5901  HStringCharCodeAt* char_code =
5902  BuildStringCharCodeAt(context, string, index);
5903  AddInstruction(char_code);
5904  instr = new(zone()) HStringCharFromCode(context, char_code);
5905 
5906  } else if (expr->IsFunctionPrototype()) {
5907  HValue* function = Pop();
5908  AddInstruction(new(zone()) HCheckNonSmi(function));
5909  instr = new(zone()) HLoadFunctionPrototype(function);
5910 
5911  } else if (expr->key()->IsPropertyName()) {
5912  Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
5913  SmallMapList* types = expr->GetReceiverTypes();
5914 
5915  HValue* obj = Pop();
5916  if (expr->IsMonomorphic()) {
5917  instr = BuildLoadNamed(obj, expr, types->first(), name);
5918  } else if (types != NULL && types->length() > 1) {
5919  AddInstruction(new(zone()) HCheckNonSmi(obj));
5920  HandlePolymorphicLoadNamedField(expr, obj, types, name);
5921  return;
5922  } else {
5923  instr = BuildLoadNamedGeneric(obj, expr);
5924  }
5925 
5926  } else {
5927  CHECK_ALIVE(VisitForValue(expr->key()));
5928 
5929  HValue* key = Pop();
5930  HValue* obj = Pop();
5931 
5932  bool has_side_effects = false;
5933  HValue* load = HandleKeyedElementAccess(
5934  obj, key, NULL, expr, expr->id(), expr->position(),
5935  false, // is_store
5936  &has_side_effects);
5937  if (has_side_effects) {
5938  if (ast_context()->IsEffect()) {
5939  AddSimulate(expr->id());
5940  } else {
5941  Push(load);
5942  AddSimulate(expr->id());
5943  Drop(1);
5944  }
5945  }
5946  return ast_context()->ReturnValue(load);
5947  }
5948  instr->set_position(expr->position());
5949  return ast_context()->ReturnInstruction(instr, expr->id());
5950 }
5951 
5952 
5953 void HGraphBuilder::AddCheckConstantFunction(Call* expr,
5954  HValue* receiver,
5955  Handle<Map> receiver_map,
5956  bool smi_and_map_check) {
5957  // Constant functions have the nice property that the map will change if they
5958  // are overwritten. Therefore it is enough to check the map of the holder and
5959  // its prototypes.
5960  if (smi_and_map_check) {
5961  AddInstruction(new(zone()) HCheckNonSmi(receiver));
5962  AddInstruction(HCheckMaps::NewWithTransitions(receiver, receiver_map,
5963  zone()));
5964  }
5965  if (!expr->holder().is_null()) {
5966  AddInstruction(new(zone()) HCheckPrototypeMaps(
5967  Handle<JSObject>(JSObject::cast(receiver_map->prototype())),
5968  expr->holder()));
5969  }
5970 }
5971 
5972 
5974  public:
5975  FunctionSorter() : index_(0), ticks_(0), ast_length_(0), src_length_(0) { }
5976  FunctionSorter(int index, int ticks, int ast_length, int src_length)
5977  : index_(index),
5978  ticks_(ticks),
5979  ast_length_(ast_length),
5980  src_length_(src_length) { }
5981 
5982  int index() const { return index_; }
5983  int ticks() const { return ticks_; }
5984  int ast_length() const { return ast_length_; }
5985  int src_length() const { return src_length_; }
5986 
5987  private:
5988  int index_;
5989  int ticks_;
5990  int ast_length_;
5991  int src_length_;
5992 };
5993 
5994 
5995 static int CompareHotness(void const* a, void const* b) {
5996  FunctionSorter const* function1 = reinterpret_cast<FunctionSorter const*>(a);
5997  FunctionSorter const* function2 = reinterpret_cast<FunctionSorter const*>(b);
5998  int diff = function1->ticks() - function2->ticks();
5999  if (diff != 0) return -diff;
6000  diff = function1->ast_length() - function2->ast_length();
6001  if (diff != 0) return diff;
6002  return function1->src_length() - function2->src_length();
6003 }
6004 
6005 
6006 void HGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
6007  HValue* receiver,
6008  SmallMapList* types,
6009  Handle<String> name) {
6010  // TODO(ager): We should recognize when the prototype chains for different
6011  // maps are identical. In that case we can avoid repeatedly generating the
6012  // same prototype map checks.
6013  int argument_count = expr->arguments()->length() + 1; // Includes receiver.
6014  HBasicBlock* join = NULL;
6015  FunctionSorter order[kMaxCallPolymorphism];
6016  int ordered_functions = 0;
6017  for (int i = 0;
6018  i < types->length() && ordered_functions < kMaxCallPolymorphism;
6019  ++i) {
6020  Handle<Map> map = types->at(i);
6021  if (expr->ComputeTarget(map, name)) {
6022  order[ordered_functions++] =
6023  FunctionSorter(i,
6024  expr->target()->shared()->profiler_ticks(),
6025  InliningAstSize(expr->target()),
6026  expr->target()->shared()->SourceSize());
6027  }
6028  }
6029 
6030  qsort(reinterpret_cast<void*>(&order[0]),
6031  ordered_functions,
6032  sizeof(order[0]),
6033  &CompareHotness);
6034 
6035  for (int fn = 0; fn < ordered_functions; ++fn) {
6036  int i = order[fn].index();
6037  Handle<Map> map = types->at(i);
6038  if (fn == 0) {
6039  // Only needed once.
6040  AddInstruction(new(zone()) HCheckNonSmi(receiver));
6041  join = graph()->CreateBasicBlock();
6042  }
6043  HBasicBlock* if_true = graph()->CreateBasicBlock();
6044  HBasicBlock* if_false = graph()->CreateBasicBlock();
6045  HCompareMap* compare =
6046  new(zone()) HCompareMap(receiver, map, if_true, if_false);
6047  current_block()->Finish(compare);
6048 
6049  set_current_block(if_true);
6050  expr->ComputeTarget(map, name);
6051  AddCheckConstantFunction(expr, receiver, map, false);
6052  if (FLAG_trace_inlining && FLAG_polymorphic_inlining) {
6053  Handle<JSFunction> caller = info()->closure();
6054  SmartArrayPointer<char> caller_name =
6055  caller->shared()->DebugName()->ToCString();
6056  PrintF("Trying to inline the polymorphic call to %s from %s\n",
6057  *name->ToCString(),
6058  *caller_name);
6059  }
6060  if (FLAG_polymorphic_inlining && TryInlineCall(expr)) {
6061  // Trying to inline will signal that we should bailout from the
6062  // entire compilation by setting stack overflow on the visitor.
6063  if (HasStackOverflow()) return;
6064  } else {
6065  HCallConstantFunction* call =
6066  new(zone()) HCallConstantFunction(expr->target(), argument_count);
6067  call->set_position(expr->position());
6068  PreProcessCall(call);
6069  AddInstruction(call);
6070  if (!ast_context()->IsEffect()) Push(call);
6071  }
6072 
6073  if (current_block() != NULL) current_block()->Goto(join);
6074  set_current_block(if_false);
6075  }
6076 
6077  // Finish up. Unconditionally deoptimize if we've handled all the maps we
6078  // know about and do not want to handle ones we've never seen. Otherwise
6079  // use a generic IC.
6080  if (ordered_functions == types->length() && FLAG_deoptimize_uncommon_cases) {
6082  } else {
6083  HValue* context = environment()->LookupContext();
6084  HCallNamed* call = new(zone()) HCallNamed(context, name, argument_count);
6085  call->set_position(expr->position());
6086  PreProcessCall(call);
6087 
6088  if (join != NULL) {
6089  AddInstruction(call);
6090  if (!ast_context()->IsEffect()) Push(call);
6091  current_block()->Goto(join);
6092  } else {
6093  return ast_context()->ReturnInstruction(call, expr->id());
6094  }
6095  }
6096 
6097  // We assume that control flow is always live after an expression. So
6098  // even without predecessors to the join block, we set it as the exit
6099  // block and continue by adding instructions there.
6100  ASSERT(join != NULL);
6101  if (join->HasPredecessor()) {
6102  set_current_block(join);
6103  join->SetJoinId(expr->id());
6104  if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop());
6105  } else {
6107  }
6108 }
6109 
6110 
6111 void HGraphBuilder::TraceInline(Handle<JSFunction> target,
6112  Handle<JSFunction> caller,
6113  const char* reason) {
6114  if (FLAG_trace_inlining) {
6115  SmartArrayPointer<char> target_name =
6116  target->shared()->DebugName()->ToCString();
6117  SmartArrayPointer<char> caller_name =
6118  caller->shared()->DebugName()->ToCString();
6119  if (reason == NULL) {
6120  PrintF("Inlined %s called from %s.\n", *target_name, *caller_name);
6121  } else {
6122  PrintF("Did not inline %s called from %s (%s).\n",
6123  *target_name, *caller_name, reason);
6124  }
6125  }
6126 }
6127 
6128 
6129 static const int kNotInlinable = 1000000000;
6130 
6131 
6132 int HGraphBuilder::InliningAstSize(Handle<JSFunction> target) {
6133  if (!FLAG_use_inlining) return kNotInlinable;
6134 
6135  // Precondition: call is monomorphic and we have found a target with the
6136  // appropriate arity.
6137  Handle<JSFunction> caller = info()->closure();
6138  Handle<SharedFunctionInfo> target_shared(target->shared());
6139 
6140  // Do a quick check on source code length to avoid parsing large
6141  // inlining candidates.
6142  if (target_shared->SourceSize() >
6143  Min(FLAG_max_inlined_source_size, kUnlimitedMaxInlinedSourceSize)) {
6144  TraceInline(target, caller, "target text too big");
6145  return kNotInlinable;
6146  }
6147 
6148  // Target must be inlineable.
6149  if (!target->IsInlineable()) {
6150  TraceInline(target, caller, "target not inlineable");
6151  return kNotInlinable;
6152  }
6153  if (target_shared->dont_inline() || target_shared->dont_optimize()) {
6154  TraceInline(target, caller, "target contains unsupported syntax [early]");
6155  return kNotInlinable;
6156  }
6157 
6158  int nodes_added = target_shared->ast_node_count();
6159  return nodes_added;
6160 }
6161 
6162 
6163 bool HGraphBuilder::TryInline(CallKind call_kind,
6164  Handle<JSFunction> target,
6165  ZoneList<Expression*>* arguments,
6166  HValue* receiver,
6167  int ast_id,
6168  int return_id,
6169  ReturnHandlingFlag return_handling) {
6170  int nodes_added = InliningAstSize(target);
6171  if (nodes_added == kNotInlinable) return false;
6172 
6173  Handle<JSFunction> caller = info()->closure();
6174 
6175  if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
6176  TraceInline(target, caller, "target AST is too large [early]");
6177  return false;
6178  }
6179 
6180  Handle<SharedFunctionInfo> target_shared(target->shared());
6181 
6182 #if !defined(V8_TARGET_ARCH_IA32)
6183  // Target must be able to use caller's context.
6184  CompilationInfo* outer_info = info();
6185  if (target->context() != outer_info->closure()->context() ||
6186  outer_info->scope()->contains_with() ||
6187  outer_info->scope()->num_heap_slots() > 0) {
6188  TraceInline(target, caller, "target requires context change");
6189  return false;
6190  }
6191 #endif
6192 
6193 
6194  // Don't inline deeper than kMaxInliningLevels calls.
6195  HEnvironment* env = environment();
6196  int current_level = 1;
6197  while (env->outer() != NULL) {
6198  if (current_level == Compiler::kMaxInliningLevels) {
6199  TraceInline(target, caller, "inline depth limit reached");
6200  return false;
6201  }
6202  if (env->outer()->frame_type() == JS_FUNCTION) {
6203  current_level++;
6204  }
6205  env = env->outer();
6206  }
6207 
6208  // Don't inline recursive functions.
6209  for (FunctionState* state = function_state();
6210  state != NULL;
6211  state = state->outer()) {
6212  if (state->compilation_info()->closure()->shared() == *target_shared) {
6213  TraceInline(target, caller, "target is recursive");
6214  return false;
6215  }
6216  }
6217 
6218  // We don't want to add more than a certain number of nodes from inlining.
6219  if (inlined_count_ > Min(FLAG_max_inlined_nodes_cumulative,
6220  kUnlimitedMaxInlinedNodesCumulative)) {
6221  TraceInline(target, caller, "cumulative AST node limit reached");
6222  return false;
6223  }
6224 
6225  // Parse and allocate variables.
6226  CompilationInfo target_info(target);
6227  if (!ParserApi::Parse(&target_info, kNoParsingFlags) ||
6228  !Scope::Analyze(&target_info)) {
6229  if (target_info.isolate()->has_pending_exception()) {
6230  // Parse or scope error, never optimize this function.
6231  SetStackOverflow();
6232  target_shared->DisableOptimization();
6233  }
6234  TraceInline(target, caller, "parse failure");
6235  return false;
6236  }
6237 
6238  if (target_info.scope()->num_heap_slots() > 0) {
6239  TraceInline(target, caller, "target has context-allocated variables");
6240  return false;
6241  }
6242  FunctionLiteral* function = target_info.function();
6243 
6244  // The following conditions must be checked again after re-parsing, because
6245  // earlier the information might not have been complete due to lazy parsing.
6246  nodes_added = function->ast_node_count();
6247  if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
6248  TraceInline(target, caller, "target AST is too large [late]");
6249  return false;
6250  }
6251  AstProperties::Flags* flags(function->flags());
6252  if (flags->Contains(kDontInline) || flags->Contains(kDontOptimize)) {
6253  TraceInline(target, caller, "target contains unsupported syntax [late]");
6254  return false;
6255  }
6256 
6257  // If the function uses the arguments object check that inlining of functions
6258  // with arguments object is enabled and the arguments-variable is
6259  // stack allocated.
6260  if (function->scope()->arguments() != NULL) {
6261  if (!FLAG_inline_arguments) {
6262  TraceInline(target, caller, "target uses arguments object");
6263  return false;
6264  }
6265 
6266  if (!function->scope()->arguments()->IsStackAllocated()) {
6267  TraceInline(target,
6268  caller,
6269  "target uses non-stackallocated arguments object");
6270  return false;
6271  }
6272  }
6273 
6274  // All declarations must be inlineable.
6275  ZoneList<Declaration*>* decls = target_info.scope()->declarations();
6276  int decl_count = decls->length();
6277  for (int i = 0; i < decl_count; ++i) {
6278  if (!decls->at(i)->IsInlineable()) {
6279  TraceInline(target, caller, "target has non-trivial declaration");
6280  return false;
6281  }
6282  }
6283 
6284  // Generate the deoptimization data for the unoptimized version of
6285  // the target function if we don't already have it.
6286  if (!target_shared->has_deoptimization_support()) {
6287  // Note that we compile here using the same AST that we will use for
6288  // generating the optimized inline code.
6289  target_info.EnableDeoptimizationSupport();
6290  if (!FullCodeGenerator::MakeCode(&target_info)) {
6291  TraceInline(target, caller, "could not generate deoptimization info");
6292  return false;
6293  }
6294  if (target_shared->scope_info() == ScopeInfo::Empty()) {
6295  // The scope info might not have been set if a lazily compiled
6296  // function is inlined before being called for the first time.
6297  Handle<ScopeInfo> target_scope_info =
6298  ScopeInfo::Create(target_info.scope(), zone());
6299  target_shared->set_scope_info(*target_scope_info);
6300  }
6301  target_shared->EnableDeoptimizationSupport(*target_info.code());
6302  Compiler::RecordFunctionCompilation(Logger::FUNCTION_TAG,
6303  &target_info,
6304  target_shared);
6305  }
6306 
6307  // ----------------------------------------------------------------
6308  // After this point, we've made a decision to inline this function (so
6309  // TryInline should always return true).
6310 
6311  // Save the pending call context and type feedback oracle. Set up new ones
6312  // for the inlined function.
6313  ASSERT(target_shared->has_deoptimization_support());
6314  TypeFeedbackOracle target_oracle(
6315  Handle<Code>(target_shared->code()),
6316  Handle<Context>(target->context()->global_context()),
6317  isolate(),
6318  zone());
6319  // The function state is new-allocated because we need to delete it
6320  // in two different places.
6321  FunctionState* target_state = new FunctionState(
6322  this, &target_info, &target_oracle, return_handling);
6323 
6324  HConstant* undefined = graph()->GetConstantUndefined();
6325  HEnvironment* inner_env =
6326  environment()->CopyForInlining(target,
6327  arguments->length(),
6328  function,
6329  undefined,
6330  call_kind,
6332 #ifdef V8_TARGET_ARCH_IA32
6333  // IA32 only, overwrite the caller's context in the deoptimization
6334  // environment with the correct one.
6335  //
6336  // TODO(kmillikin): implement the same inlining on other platforms so we
6337  // can remove the unsightly ifdefs in this function.
6338  HConstant* context =
6339  new(zone()) HConstant(Handle<Context>(target->context()),
6341  AddInstruction(context);
6342  inner_env->BindContext(context);
6343 #endif
6344 
6345  AddSimulate(return_id);
6346  current_block()->UpdateEnvironment(inner_env);
6347 
6348  ZoneList<HValue*>* arguments_values = NULL;
6349 
6350  // If the function uses arguments copy current arguments values
6351  // to use them for materialization.
6352  if (function->scope()->arguments() != NULL) {
6353  HEnvironment* arguments_env = inner_env->arguments_environment();
6354  int arguments_count = arguments_env->parameter_count();
6355  arguments_values = new(zone()) ZoneList<HValue*>(arguments_count, zone());
6356  for (int i = 0; i < arguments_count; i++) {
6357  arguments_values->Add(arguments_env->Lookup(i), zone());
6358  }
6359  }
6360 
6361  HEnterInlined* enter_inlined =
6362  new(zone()) HEnterInlined(target,
6363  arguments->length(),
6364  function,
6365  call_kind,
6367  function->scope()->arguments(),
6368  arguments_values);
6369  function_state()->set_entry(enter_inlined);
6370  AddInstruction(enter_inlined);
6371 
6372  // If the function uses arguments object create and bind one.
6373  if (function->scope()->arguments() != NULL) {
6374  ASSERT(function->scope()->arguments()->IsStackAllocated());
6375  inner_env->Bind(function->scope()->arguments(),
6376  graph()->GetArgumentsObject());
6377  }
6378 
6379 
6380  VisitDeclarations(target_info.scope()->declarations());
6381  VisitStatements(function->body());
6382  if (HasStackOverflow()) {
6383  // Bail out if the inline function did, as we cannot residualize a call
6384  // instead.
6385  TraceInline(target, caller, "inline graph construction failed");
6386  target_shared->DisableOptimization();
6387  inline_bailout_ = true;
6388  delete target_state;
6389  return true;
6390  }
6391 
6392  // Update inlined nodes count.
6393  inlined_count_ += nodes_added;
6394 
6395  TraceInline(target, caller, NULL);
6396 
6397  if (current_block() != NULL) {
6398  // Add default return value (i.e. undefined for normals calls or the newly
6399  // allocated receiver for construct calls) if control can fall off the
6400  // body. In a test context, undefined is false and any JSObject is true.
6401  if (call_context()->IsValue()) {
6402  ASSERT(function_return() != NULL);
6403  HValue* return_value = function_state()->is_construct()
6404  ? receiver
6405  : undefined;
6406  current_block()->AddLeaveInlined(return_value,
6407  function_return(),
6408  function_state());
6409  } else if (call_context()->IsEffect()) {
6410  ASSERT(function_return() != NULL);
6411  current_block()->Goto(function_return(), function_state());
6412  } else {
6413  ASSERT(call_context()->IsTest());
6414  ASSERT(inlined_test_context() != NULL);
6415  HBasicBlock* target = function_state()->is_construct()
6416  ? inlined_test_context()->if_true()
6417  : inlined_test_context()->if_false();
6418  current_block()->Goto(target, function_state());
6419  }
6420  }
6421 
6422  // Fix up the function exits.
6423  if (inlined_test_context() != NULL) {
6424  HBasicBlock* if_true = inlined_test_context()->if_true();
6425  HBasicBlock* if_false = inlined_test_context()->if_false();
6426 
6427  // Pop the return test context from the expression context stack.
6428  ASSERT(ast_context() == inlined_test_context());
6429  ClearInlinedTestContext();
6430  delete target_state;
6431 
6432  // Forward to the real test context.
6433  if (if_true->HasPredecessor()) {
6434  if_true->SetJoinId(ast_id);
6435  HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
6436  if_true->Goto(true_target, function_state());
6437  }
6438  if (if_false->HasPredecessor()) {
6439  if_false->SetJoinId(ast_id);
6440  HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
6441  if_false->Goto(false_target, function_state());
6442  }
6444  return true;
6445 
6446  } else if (function_return()->HasPredecessor()) {
6447  function_return()->SetJoinId(ast_id);
6448  set_current_block(function_return());
6449  } else {
6451  }
6452  delete target_state;
6453  return true;
6454 }
6455 
6456 
6457 bool HGraphBuilder::TryInlineCall(Call* expr, bool drop_extra) {
6458  // The function call we are inlining is a method call if the call
6459  // is a property call.
6460  CallKind call_kind = (expr->expression()->AsProperty() == NULL)
6462  : CALL_AS_METHOD;
6463 
6464  return TryInline(call_kind,
6465  expr->target(),
6466  expr->arguments(),
6467  NULL,
6468  expr->id(),
6469  expr->ReturnId(),
6470  drop_extra ? DROP_EXTRA_ON_RETURN : NORMAL_RETURN);
6471 }
6472 
6473 
6474 bool HGraphBuilder::TryInlineConstruct(CallNew* expr, HValue* receiver) {
6475  return TryInline(CALL_AS_FUNCTION,
6476  expr->target(),
6477  expr->arguments(),
6478  receiver,
6479  expr->id(),
6480  expr->ReturnId(),
6482 }
6483 
6484 
6485 bool HGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr, bool drop_extra) {
6486  if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
6487  BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
6488  switch (id) {
6489  case kMathRound:
6490  case kMathAbs:
6491  case kMathSqrt:
6492  case kMathLog:
6493  case kMathSin:
6494  case kMathCos:
6495  case kMathTan:
6496  if (expr->arguments()->length() == 1) {
6497  HValue* argument = Pop();
6498  HValue* context = environment()->LookupContext();
6499  Drop(1); // Receiver.
6500  HUnaryMathOperation* op =
6501  new(zone()) HUnaryMathOperation(context, argument, id);
6502  op->set_position(expr->position());
6503  if (drop_extra) Drop(1); // Optionally drop the function.
6504  ast_context()->ReturnInstruction(op, expr->id());
6505  return true;
6506  }
6507  break;
6508  default:
6509  // Not supported for inlining yet.
6510  break;
6511  }
6512  return false;
6513 }
6514 
6515 
6516 bool HGraphBuilder::TryInlineBuiltinMethodCall(Call* expr,
6517  HValue* receiver,
6518  Handle<Map> receiver_map,
6519  CheckType check_type) {
6520  ASSERT(check_type != RECEIVER_MAP_CHECK || !receiver_map.is_null());
6521  // Try to inline calls like Math.* as operations in the calling function.
6522  if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
6523  BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
6524  int argument_count = expr->arguments()->length() + 1; // Plus receiver.
6525  switch (id) {
6526  case kStringCharCodeAt:
6527  case kStringCharAt:
6528  if (argument_count == 2 && check_type == STRING_CHECK) {
6529  HValue* index = Pop();
6530  HValue* string = Pop();
6531  HValue* context = environment()->LookupContext();
6532  ASSERT(!expr->holder().is_null());
6533  AddInstruction(new(zone()) HCheckPrototypeMaps(
6534  oracle()->GetPrototypeForPrimitiveCheck(STRING_CHECK),
6535  expr->holder()));
6536  HStringCharCodeAt* char_code =
6537  BuildStringCharCodeAt(context, string, index);
6538  if (id == kStringCharCodeAt) {
6539  ast_context()->ReturnInstruction(char_code, expr->id());
6540  return true;
6541  }
6542  AddInstruction(char_code);
6543  HStringCharFromCode* result =
6544  new(zone()) HStringCharFromCode(context, char_code);
6545  ast_context()->ReturnInstruction(result, expr->id());
6546  return true;
6547  }
6548  break;
6549  case kMathRound:
6550  case kMathFloor:
6551  case kMathAbs:
6552  case kMathSqrt:
6553  case kMathLog:
6554  case kMathSin:
6555  case kMathCos:
6556  case kMathTan:
6557  if (argument_count == 2 && check_type == RECEIVER_MAP_CHECK) {
6558  AddCheckConstantFunction(expr, receiver, receiver_map, true);
6559  HValue* argument = Pop();
6560  HValue* context = environment()->LookupContext();
6561  Drop(1); // Receiver.
6562  HUnaryMathOperation* op =
6563  new(zone()) HUnaryMathOperation(context, argument, id);
6564  op->set_position(expr->position());
6565  ast_context()->ReturnInstruction(op, expr->id());
6566  return true;
6567  }
6568  break;
6569  case kMathPow:
6570  if (argument_count == 3 && check_type == RECEIVER_MAP_CHECK) {
6571  AddCheckConstantFunction(expr, receiver, receiver_map, true);
6572  HValue* right = Pop();
6573  HValue* left = Pop();
6574  Pop(); // Pop receiver.
6575  HValue* context = environment()->LookupContext();
6576  HInstruction* result = NULL;
6577  // Use sqrt() if exponent is 0.5 or -0.5.
6578  if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) {
6579  double exponent = HConstant::cast(right)->DoubleValue();
6580  if (exponent == 0.5) {
6581  result =
6582  new(zone()) HUnaryMathOperation(context, left, kMathPowHalf);
6583  } else if (exponent == -0.5) {
6584  HConstant* double_one =
6585  new(zone()) HConstant(Handle<Object>(Smi::FromInt(1)),
6587  AddInstruction(double_one);
6588  HUnaryMathOperation* square_root =
6589  new(zone()) HUnaryMathOperation(context, left, kMathPowHalf);
6590  AddInstruction(square_root);
6591  // MathPowHalf doesn't have side effects so there's no need for
6592  // an environment simulation here.
6593  ASSERT(!square_root->HasObservableSideEffects());
6594  result = new(zone()) HDiv(context, double_one, square_root);
6595  } else if (exponent == 2.0) {
6596  result = new(zone()) HMul(context, left, left);
6597  }
6598  } else if (right->IsConstant() &&
6599  HConstant::cast(right)->HasInteger32Value() &&
6600  HConstant::cast(right)->Integer32Value() == 2) {
6601  result = new(zone()) HMul(context, left, left);
6602  }
6603 
6604  if (result == NULL) {
6605  result = new(zone()) HPower(left, right);
6606  }
6607  ast_context()->ReturnInstruction(result, expr->id());
6608  return true;
6609  }
6610  break;
6611  case kMathRandom:
6612  if (argument_count == 1 && check_type == RECEIVER_MAP_CHECK) {
6613  AddCheckConstantFunction(expr, receiver, receiver_map, true);
6614  Drop(1); // Receiver.
6615  HValue* context = environment()->LookupContext();
6616  HGlobalObject* global_object = new(zone()) HGlobalObject(context);
6617  AddInstruction(global_object);
6618  HRandom* result = new(zone()) HRandom(global_object);
6619  ast_context()->ReturnInstruction(result, expr->id());
6620  return true;
6621  }
6622  break;
6623  case kMathMax:
6624  case kMathMin:
6625  if (argument_count == 3 && check_type == RECEIVER_MAP_CHECK) {
6626  AddCheckConstantFunction(expr, receiver, receiver_map, true);
6627  HValue* right = Pop();
6628  HValue* left = Pop();
6629  Pop(); // Pop receiver.
6630 
6631  HValue* left_operand = left;
6632  HValue* right_operand = right;
6633 
6634  // If we do not have two integers, we convert to double for comparison.
6635  if (!left->representation().IsInteger32() ||
6636  !right->representation().IsInteger32()) {
6637  if (!left->representation().IsDouble()) {
6638  HChange* left_convert = new(zone()) HChange(
6639  left,
6641  false, // Do not truncate when converting to double.
6642  true); // Deoptimize for undefined.
6643  left_convert->SetFlag(HValue::kBailoutOnMinusZero);
6644  left_operand = AddInstruction(left_convert);
6645  }
6646  if (!right->representation().IsDouble()) {
6647  HChange* right_convert = new(zone()) HChange(
6648  right,
6650  false, // Do not truncate when converting to double.
6651  true); // Deoptimize for undefined.
6652  right_convert->SetFlag(HValue::kBailoutOnMinusZero);
6653  right_operand = AddInstruction(right_convert);
6654  }
6655  }
6656 
6657  ASSERT(left_operand->representation().Equals(
6658  right_operand->representation()));
6659  ASSERT(!left_operand->representation().IsTagged());
6660 
6661  Token::Value op = (id == kMathMin) ? Token::LT : Token::GT;
6662 
6663  HCompareIDAndBranch* compare =
6664  new(zone()) HCompareIDAndBranch(left_operand, right_operand, op);
6665  compare->SetInputRepresentation(left_operand->representation());
6666 
6667  HBasicBlock* return_left = graph()->CreateBasicBlock();
6668  HBasicBlock* return_right = graph()->CreateBasicBlock();
6669 
6670  compare->SetSuccessorAt(0, return_left);
6671  compare->SetSuccessorAt(1, return_right);
6672  current_block()->Finish(compare);
6673 
6674  set_current_block(return_left);
6675  Push(left);
6676  set_current_block(return_right);
6677  // The branch above always returns the right operand if either of
6678  // them is NaN, but the spec requires that max/min(NaN, X) = NaN.
6679  // We add another branch that checks if the left operand is NaN or not.
6680  if (left_operand->representation().IsDouble()) {
6681  // If left_operand != left_operand then it is NaN.
6682  HCompareIDAndBranch* compare_nan = new(zone()) HCompareIDAndBranch(
6683  left_operand, left_operand, Token::EQ);
6684  compare_nan->SetInputRepresentation(left_operand->representation());
6685  HBasicBlock* left_is_number = graph()->CreateBasicBlock();
6686  HBasicBlock* left_is_nan = graph()->CreateBasicBlock();
6687  compare_nan->SetSuccessorAt(0, left_is_number);
6688  compare_nan->SetSuccessorAt(1, left_is_nan);
6689  current_block()->Finish(compare_nan);
6690  set_current_block(left_is_nan);
6691  Push(left);
6692  set_current_block(left_is_number);
6693  Push(right);
6694  return_right = CreateJoin(left_is_number, left_is_nan, expr->id());
6695  } else {
6696  Push(right);
6697  }
6698 
6699  HBasicBlock* join = CreateJoin(return_left, return_right, expr->id());
6700  set_current_block(join);
6701  ast_context()->ReturnValue(Pop());
6702  return true;
6703  }
6704  break;
6705  default:
6706  // Not yet supported for inlining.
6707  break;
6708  }
6709  return false;
6710 }
6711 
6712 
6713 bool HGraphBuilder::TryCallApply(Call* expr) {
6714  Expression* callee = expr->expression();
6715  Property* prop = callee->AsProperty();
6716  ASSERT(prop != NULL);
6717 
6718  if (!expr->IsMonomorphic() || expr->check_type() != RECEIVER_MAP_CHECK) {
6719  return false;
6720  }
6721  Handle<Map> function_map = expr->GetReceiverTypes()->first();
6722  if (function_map->instance_type() != JS_FUNCTION_TYPE ||
6723  !expr->target()->shared()->HasBuiltinFunctionId() ||
6724  expr->target()->shared()->builtin_function_id() != kFunctionApply) {
6725  return false;
6726  }
6727 
6728  if (info()->scope()->arguments() == NULL) return false;
6729 
6730  ZoneList<Expression*>* args = expr->arguments();
6731  if (args->length() != 2) return false;
6732 
6733  VariableProxy* arg_two = args->at(1)->AsVariableProxy();
6734  if (arg_two == NULL || !arg_two->var()->IsStackAllocated()) return false;
6735  HValue* arg_two_value = environment()->Lookup(arg_two->var());
6736  if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false;
6737 
6738  // Found pattern f.apply(receiver, arguments).
6739  VisitForValue(prop->obj());
6740  if (HasStackOverflow() || current_block() == NULL) return true;
6741  HValue* function = Top();
6742  AddCheckConstantFunction(expr, function, function_map, true);
6743  Drop(1);
6744 
6745  VisitForValue(args->at(0));
6746  if (HasStackOverflow() || current_block() == NULL) return true;
6747  HValue* receiver = Pop();
6748 
6749  if (function_state()->outer() == NULL) {
6750  HInstruction* elements = AddInstruction(
6751  new(zone()) HArgumentsElements(false));
6752  HInstruction* length =
6753  AddInstruction(new(zone()) HArgumentsLength(elements));
6754  HValue* wrapped_receiver =
6755  AddInstruction(new(zone()) HWrapReceiver(receiver, function));
6756  HInstruction* result =
6757  new(zone()) HApplyArguments(function,
6758  wrapped_receiver,
6759  length,
6760  elements);
6761  result->set_position(expr->position());
6762  ast_context()->ReturnInstruction(result, expr->id());
6763  return true;
6764  } else {
6765  // We are inside inlined function and we know exactly what is inside
6766  // arguments object.
6767  HValue* context = environment()->LookupContext();
6768 
6769  HValue* wrapped_receiver =
6770  AddInstruction(new(zone()) HWrapReceiver(receiver, function));
6771  PushAndAdd(new(zone()) HPushArgument(wrapped_receiver));
6772 
6773  HEnvironment* arguments_env = environment()->arguments_environment();
6774 
6775  int parameter_count = arguments_env->parameter_count();
6776  for (int i = 1; i < arguments_env->parameter_count(); i++) {
6777  PushAndAdd(new(zone()) HPushArgument(arguments_env->Lookup(i)));
6778  }
6779 
6780  HInvokeFunction* call = new(zone()) HInvokeFunction(
6781  context,
6782  function,
6783  parameter_count);
6784  Drop(parameter_count);
6785  call->set_position(expr->position());
6786  ast_context()->ReturnInstruction(call, expr->id());
6787  return true;
6788  }
6789 }
6790 
6791 
6792 void HGraphBuilder::VisitCall(Call* expr) {
6793  ASSERT(!HasStackOverflow());
6794  ASSERT(current_block() != NULL);
6795  ASSERT(current_block()->HasPredecessor());
6796  Expression* callee = expr->expression();
6797  int argument_count = expr->arguments()->length() + 1; // Plus receiver.
6798  HInstruction* call = NULL;
6799 
6800  Property* prop = callee->AsProperty();
6801  if (prop != NULL) {
6802  if (!prop->key()->IsPropertyName()) {
6803  // Keyed function call.
6804  CHECK_ALIVE(VisitArgument(prop->obj()));
6805 
6806  CHECK_ALIVE(VisitForValue(prop->key()));
6807  // Push receiver and key like the non-optimized code generator expects it.
6808  HValue* key = Pop();
6809  HValue* receiver = Pop();
6810  Push(key);
6811  Push(receiver);
6812 
6813  CHECK_ALIVE(VisitArgumentList(expr->arguments()));
6814 
6815  HValue* context = environment()->LookupContext();
6816  call = new(zone()) HCallKeyed(context, key, argument_count);
6817  call->set_position(expr->position());
6818  Drop(argument_count + 1); // 1 is the key.
6819  return ast_context()->ReturnInstruction(call, expr->id());
6820  }
6821 
6822  // Named function call.
6823  expr->RecordTypeFeedback(oracle(), CALL_AS_METHOD);
6824 
6825  if (TryCallApply(expr)) return;
6826 
6827  CHECK_ALIVE(VisitForValue(prop->obj()));
6828  CHECK_ALIVE(VisitExpressions(expr->arguments()));
6829 
6830  Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
6831 
6832  SmallMapList* types = expr->GetReceiverTypes();
6833 
6834  HValue* receiver =
6835  environment()->ExpressionStackAt(expr->arguments()->length());
6836  if (expr->IsMonomorphic()) {
6837  Handle<Map> receiver_map = (types == NULL || types->is_empty())
6838  ? Handle<Map>::null()
6839  : types->first();
6840  if (TryInlineBuiltinMethodCall(expr,
6841  receiver,
6842  receiver_map,
6843  expr->check_type())) {
6844  if (FLAG_trace_inlining) {
6845  PrintF("Inlining builtin ");
6846  expr->target()->ShortPrint();
6847  PrintF("\n");
6848  }
6849  return;
6850  }
6851 
6852  if (CallStubCompiler::HasCustomCallGenerator(expr->target()) ||
6853  expr->check_type() != RECEIVER_MAP_CHECK) {
6854  // When the target has a custom call IC generator, use the IC,
6855  // because it is likely to generate better code. Also use the IC
6856  // when a primitive receiver check is required.
6857  HValue* context = environment()->LookupContext();
6858  call = PreProcessCall(
6859  new(zone()) HCallNamed(context, name, argument_count));
6860  } else {
6861  AddCheckConstantFunction(expr, receiver, receiver_map, true);
6862 
6863  if (TryInlineCall(expr)) return;
6864  call = PreProcessCall(
6865  new(zone()) HCallConstantFunction(expr->target(),
6866  argument_count));
6867  }
6868  } else if (types != NULL && types->length() > 1) {
6869  ASSERT(expr->check_type() == RECEIVER_MAP_CHECK);
6870  HandlePolymorphicCallNamed(expr, receiver, types, name);
6871  return;
6872 
6873  } else {
6874  HValue* context = environment()->LookupContext();
6875  call = PreProcessCall(
6876  new(zone()) HCallNamed(context, name, argument_count));
6877  }
6878 
6879  } else {
6880  expr->RecordTypeFeedback(oracle(), CALL_AS_FUNCTION);
6881  VariableProxy* proxy = expr->expression()->AsVariableProxy();
6882  bool global_call = proxy != NULL && proxy->var()->IsUnallocated();
6883 
6884  if (proxy != NULL && proxy->var()->is_possibly_eval()) {
6885  return Bailout("possible direct call to eval");
6886  }
6887 
6888  if (global_call) {
6889  Variable* var = proxy->var();
6890  bool known_global_function = false;
6891  // If there is a global property cell for the name at compile time and
6892  // access check is not enabled we assume that the function will not change
6893  // and generate optimized code for calling the function.
6894  LookupResult lookup(isolate());
6895  GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, false);
6896  if (type == kUseCell &&
6897  !info()->global_object()->IsAccessCheckNeeded()) {
6898  Handle<GlobalObject> global(info()->global_object());
6899  known_global_function = expr->ComputeGlobalTarget(global, &lookup);
6900  }
6901  if (known_global_function) {
6902  // Push the global object instead of the global receiver because
6903  // code generated by the full code generator expects it.
6904  HValue* context = environment()->LookupContext();
6905  HGlobalObject* global_object = new(zone()) HGlobalObject(context);
6906  PushAndAdd(global_object);
6907  CHECK_ALIVE(VisitExpressions(expr->arguments()));
6908 
6909  CHECK_ALIVE(VisitForValue(expr->expression()));
6910  HValue* function = Pop();
6911  AddInstruction(new(zone()) HCheckFunction(function, expr->target()));
6912 
6913  // Replace the global object with the global receiver.
6914  HGlobalReceiver* global_receiver =
6915  new(zone()) HGlobalReceiver(global_object);
6916  // Index of the receiver from the top of the expression stack.
6917  const int receiver_index = argument_count - 1;
6918  AddInstruction(global_receiver);
6919  ASSERT(environment()->ExpressionStackAt(receiver_index)->
6920  IsGlobalObject());
6921  environment()->SetExpressionStackAt(receiver_index, global_receiver);
6922 
6923  if (TryInlineBuiltinFunctionCall(expr, false)) { // Nothing to drop.
6924  if (FLAG_trace_inlining) {
6925  PrintF("Inlining builtin ");
6926  expr->target()->ShortPrint();
6927  PrintF("\n");
6928  }
6929  return;
6930  }
6931  if (TryInlineCall(expr)) return;
6932 
6933  if (expr->target().is_identical_to(info()->closure())) {
6934  graph()->MarkRecursive();
6935  }
6936 
6937  call = PreProcessCall(new(zone()) HCallKnownGlobal(expr->target(),
6938  argument_count));
6939  } else {
6940  HValue* context = environment()->LookupContext();
6941  HGlobalObject* receiver = new(zone()) HGlobalObject(context);
6942  AddInstruction(receiver);
6943  PushAndAdd(new(zone()) HPushArgument(receiver));
6944  CHECK_ALIVE(VisitArgumentList(expr->arguments()));
6945 
6946  call = new(zone()) HCallGlobal(context, var->name(), argument_count);
6947  Drop(argument_count);
6948  }
6949 
6950  } else if (expr->IsMonomorphic()) {
6951  // The function is on the stack in the unoptimized code during
6952  // evaluation of the arguments.
6953  CHECK_ALIVE(VisitForValue(expr->expression()));
6954  HValue* function = Top();
6955  HValue* context = environment()->LookupContext();
6956  HGlobalObject* global = new(zone()) HGlobalObject(context);
6957  AddInstruction(global);
6958  HGlobalReceiver* receiver = new(zone()) HGlobalReceiver(global);
6959  PushAndAdd(receiver);
6960  CHECK_ALIVE(VisitExpressions(expr->arguments()));
6961  AddInstruction(new(zone()) HCheckFunction(function, expr->target()));
6962 
6963  if (TryInlineBuiltinFunctionCall(expr, true)) { // Drop the function.
6964  if (FLAG_trace_inlining) {
6965  PrintF("Inlining builtin ");
6966  expr->target()->ShortPrint();
6967  PrintF("\n");
6968  }
6969  return;
6970  }
6971 
6972  if (TryInlineCall(expr, true)) { // Drop function from environment.
6973  return;
6974  } else {
6975  call = PreProcessCall(
6976  new(zone()) HInvokeFunction(context,
6977  function,
6978  expr->target(),
6979  argument_count));
6980  Drop(1); // The function.
6981  }
6982 
6983  } else {
6984  CHECK_ALIVE(VisitForValue(expr->expression()));
6985  HValue* function = Top();
6986  HValue* context = environment()->LookupContext();
6987  HGlobalObject* global_object = new(zone()) HGlobalObject(context);
6988  AddInstruction(global_object);
6989  HGlobalReceiver* receiver = new(zone()) HGlobalReceiver(global_object);
6990  AddInstruction(receiver);
6991  PushAndAdd(new(zone()) HPushArgument(receiver));
6992  CHECK_ALIVE(VisitArgumentList(expr->arguments()));
6993 
6994  call = new(zone()) HCallFunction(context, function, argument_count);
6995  Drop(argument_count + 1);
6996  }
6997  }
6998 
6999  call->set_position(expr->position());
7000  return ast_context()->ReturnInstruction(call, expr->id());
7001 }
7002 
7003 
7004 // Checks whether allocation using the given constructor can be inlined.
7005 static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
7006  return constructor->has_initial_map() &&
7007  constructor->initial_map()->instance_type() == JS_OBJECT_TYPE &&
7008  constructor->initial_map()->instance_size() < HAllocateObject::kMaxSize;
7009 }
7010 
7011 
7012 void HGraphBuilder::VisitCallNew(CallNew* expr) {
7013  ASSERT(!HasStackOverflow());
7014  ASSERT(current_block() != NULL);
7015  ASSERT(current_block()->HasPredecessor());
7016  expr->RecordTypeFeedback(oracle());
7017  int argument_count = expr->arguments()->length() + 1; // Plus constructor.
7018  HValue* context = environment()->LookupContext();
7019 
7020  if (FLAG_inline_construct &&
7021  expr->IsMonomorphic() &&
7022  IsAllocationInlineable(expr->target())) {
7023  // The constructor function is on the stack in the unoptimized code
7024  // during evaluation of the arguments.
7025  CHECK_ALIVE(VisitForValue(expr->expression()));
7026  HValue* function = Top();
7027  CHECK_ALIVE(VisitExpressions(expr->arguments()));
7028  Handle<JSFunction> constructor = expr->target();
7029  HValue* check = AddInstruction(
7030  new(zone()) HCheckFunction(function, constructor));
7031 
7032  // Force completion of inobject slack tracking before generating
7033  // allocation code to finalize instance size.
7034  if (constructor->shared()->IsInobjectSlackTrackingInProgress()) {
7035  constructor->shared()->CompleteInobjectSlackTracking();
7036  }
7037 
7038  // Replace the constructor function with a newly allocated receiver.
7039  HInstruction* receiver = new(zone()) HAllocateObject(context, constructor);
7040  // Index of the receiver from the top of the expression stack.
7041  const int receiver_index = argument_count - 1;
7042  AddInstruction(receiver);
7043  ASSERT(environment()->ExpressionStackAt(receiver_index) == function);
7044  environment()->SetExpressionStackAt(receiver_index, receiver);
7045 
7046  if (TryInlineConstruct(expr, receiver)) return;
7047 
7048  // TODO(mstarzinger): For now we remove the previous HAllocateObject and
7049  // add HPushArgument for the arguments in case inlining failed. What we
7050  // actually should do is emit HInvokeFunction on the constructor instead
7051  // of using HCallNew as a fallback.
7052  receiver->DeleteAndReplaceWith(NULL);
7053  check->DeleteAndReplaceWith(NULL);
7054  environment()->SetExpressionStackAt(receiver_index, function);
7055  HInstruction* call = PreProcessCall(
7056  new(zone()) HCallNew(context, function, argument_count));
7057  call->set_position(expr->position());
7058  return ast_context()->ReturnInstruction(call, expr->id());
7059  } else {
7060  // The constructor function is both an operand to the instruction and an
7061  // argument to the construct call.
7062  HValue* constructor = NULL;
7063  CHECK_ALIVE(constructor = VisitArgument(expr->expression()));
7064  CHECK_ALIVE(VisitArgumentList(expr->arguments()));
7065  HInstruction* call =
7066  new(zone()) HCallNew(context, constructor, argument_count);
7067  Drop(argument_count);
7068  call->set_position(expr->position());
7069  return ast_context()->ReturnInstruction(call, expr->id());
7070  }
7071 }
7072 
7073 
7074 // Support for generating inlined runtime functions.
7075 
7076 // Lookup table for generators for runtime calls that are generated inline.
7077 // Elements of the table are member pointers to functions of HGraphBuilder.
7078 #define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize) \
7079  &HGraphBuilder::Generate##Name,
7080 
7081 const HGraphBuilder::InlineFunctionGenerator
7082  HGraphBuilder::kInlineFunctionGenerators[] = {
7085 };
7086 #undef INLINE_FUNCTION_GENERATOR_ADDRESS
7087 
7088 
7089 void HGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
7090  ASSERT(!HasStackOverflow());
7091  ASSERT(current_block() != NULL);
7092  ASSERT(current_block()->HasPredecessor());
7093  if (expr->is_jsruntime()) {
7094  return Bailout("call to a JavaScript runtime function");
7095  }
7096 
7097  const Runtime::Function* function = expr->function();
7098  ASSERT(function != NULL);
7099  if (function->intrinsic_type == Runtime::INLINE) {
7100  ASSERT(expr->name()->length() > 0);
7101  ASSERT(expr->name()->Get(0) == '_');
7102  // Call to an inline function.
7103  int lookup_index = static_cast<int>(function->function_id) -
7104  static_cast<int>(Runtime::kFirstInlineFunction);
7105  ASSERT(lookup_index >= 0);
7106  ASSERT(static_cast<size_t>(lookup_index) <
7107  ARRAY_SIZE(kInlineFunctionGenerators));
7108  InlineFunctionGenerator generator = kInlineFunctionGenerators[lookup_index];
7109 
7110  // Call the inline code generator using the pointer-to-member.
7111  (this->*generator)(expr);
7112  } else {
7113  ASSERT(function->intrinsic_type == Runtime::RUNTIME);
7114  CHECK_ALIVE(VisitArgumentList(expr->arguments()));
7115 
7116  HValue* context = environment()->LookupContext();
7117  Handle<String> name = expr->name();
7118  int argument_count = expr->arguments()->length();
7119  HCallRuntime* call =
7120  new(zone()) HCallRuntime(context, name, function, argument_count);
7121  call->set_position(RelocInfo::kNoPosition);
7122  Drop(argument_count);
7123  return ast_context()->ReturnInstruction(call, expr->id());
7124  }
7125 }
7126 
7127 
7128 void HGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
7129  ASSERT(!HasStackOverflow());
7130  ASSERT(current_block() != NULL);
7131  ASSERT(current_block()->HasPredecessor());
7132  switch (expr->op()) {
7133  case Token::DELETE: return VisitDelete(expr);
7134  case Token::VOID: return VisitVoid(expr);
7135  case Token::TYPEOF: return VisitTypeof(expr);
7136  case Token::ADD: return VisitAdd(expr);
7137  case Token::SUB: return VisitSub(expr);
7138  case Token::BIT_NOT: return VisitBitNot(expr);
7139  case Token::NOT: return VisitNot(expr);
7140  default: UNREACHABLE();
7141  }
7142 }
7143 
7144 void HGraphBuilder::VisitDelete(UnaryOperation* expr) {
7145  Property* prop = expr->expression()->AsProperty();
7146  VariableProxy* proxy = expr->expression()->AsVariableProxy();
7147  if (prop != NULL) {
7148  CHECK_ALIVE(VisitForValue(prop->obj()));
7149  CHECK_ALIVE(VisitForValue(prop->key()));
7150  HValue* key = Pop();
7151  HValue* obj = Pop();
7152  HValue* context = environment()->LookupContext();
7153  HDeleteProperty* instr = new(zone()) HDeleteProperty(context, obj, key);
7154  return ast_context()->ReturnInstruction(instr, expr->id());
7155  } else if (proxy != NULL) {
7156  Variable* var = proxy->var();
7157  if (var->IsUnallocated()) {
7158  Bailout("delete with global variable");
7159  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
7160  // Result of deleting non-global variables is false. 'this' is not
7161  // really a variable, though we implement it as one. The
7162  // subexpression does not have side effects.
7163  HValue* value = var->is_this()
7164  ? graph()->GetConstantTrue()
7165  : graph()->GetConstantFalse();
7166  return ast_context()->ReturnValue(value);
7167  } else {
7168  Bailout("delete with non-global variable");
7169  }
7170  } else {
7171  // Result of deleting non-property, non-variable reference is true.
7172  // Evaluate the subexpression for side effects.
7173  CHECK_ALIVE(VisitForEffect(expr->expression()));
7174  return ast_context()->ReturnValue(graph()->GetConstantTrue());
7175  }
7176 }
7177 
7178 
7179 void HGraphBuilder::VisitVoid(UnaryOperation* expr) {
7180  CHECK_ALIVE(VisitForEffect(expr->expression()));
7181  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
7182 }
7183 
7184 
7185 void HGraphBuilder::VisitTypeof(UnaryOperation* expr) {
7186  CHECK_ALIVE(VisitForTypeOf(expr->expression()));
7187  HValue* value = Pop();
7188  HValue* context = environment()->LookupContext();
7189  HInstruction* instr = new(zone()) HTypeof(context, value);
7190  return ast_context()->ReturnInstruction(instr, expr->id());
7191 }
7192 
7193 
7194 void HGraphBuilder::VisitAdd(UnaryOperation* expr) {
7195  CHECK_ALIVE(VisitForValue(expr->expression()));
7196  HValue* value = Pop();
7197  HValue* context = environment()->LookupContext();
7198  HInstruction* instr =
7199  new(zone()) HMul(context, value, graph_->GetConstant1());
7200  return ast_context()->ReturnInstruction(instr, expr->id());
7201 }
7202 
7203 
7204 void HGraphBuilder::VisitSub(UnaryOperation* expr) {
7205  CHECK_ALIVE(VisitForValue(expr->expression()));
7206  HValue* value = Pop();
7207  HValue* context = environment()->LookupContext();
7208  HInstruction* instr =
7209  new(zone()) HMul(context, value, graph_->GetConstantMinus1());
7210  TypeInfo info = oracle()->UnaryType(expr);
7211  if (info.IsUninitialized()) {
7212  AddInstruction(new(zone()) HSoftDeoptimize);
7214  info = TypeInfo::Unknown();
7215  }
7216  Representation rep = ToRepresentation(info);
7217  TraceRepresentation(expr->op(), info, instr, rep);
7218  instr->AssumeRepresentation(rep);
7219  return ast_context()->ReturnInstruction(instr, expr->id());
7220 }
7221 
7222 
7223 void HGraphBuilder::VisitBitNot(UnaryOperation* expr) {
7224  CHECK_ALIVE(VisitForValue(expr->expression()));
7225  HValue* value = Pop();
7226  TypeInfo info = oracle()->UnaryType(expr);
7227  if (info.IsUninitialized()) {
7228  AddInstruction(new(zone()) HSoftDeoptimize);
7230  }
7231  HInstruction* instr = new(zone()) HBitNot(value);
7232  return ast_context()->ReturnInstruction(instr, expr->id());
7233 }
7234 
7235 
7236 void HGraphBuilder::VisitNot(UnaryOperation* expr) {
7237  if (ast_context()->IsTest()) {
7238  TestContext* context = TestContext::cast(ast_context());
7239  VisitForControl(expr->expression(),
7240  context->if_false(),
7241  context->if_true());
7242  return;
7243  }
7244 
7245  if (ast_context()->IsEffect()) {
7246  VisitForEffect(expr->expression());
7247  return;
7248  }
7249 
7250  ASSERT(ast_context()->IsValue());
7251  HBasicBlock* materialize_false = graph()->CreateBasicBlock();
7252  HBasicBlock* materialize_true = graph()->CreateBasicBlock();
7253  CHECK_BAILOUT(VisitForControl(expr->expression(),
7254  materialize_false,
7255  materialize_true));
7256 
7257  if (materialize_false->HasPredecessor()) {
7258  materialize_false->SetJoinId(expr->MaterializeFalseId());
7259  set_current_block(materialize_false);
7260  Push(graph()->GetConstantFalse());
7261  } else {
7262  materialize_false = NULL;
7263  }
7264 
7265  if (materialize_true->HasPredecessor()) {
7266  materialize_true->SetJoinId(expr->MaterializeTrueId());
7267  set_current_block(materialize_true);
7268  Push(graph()->GetConstantTrue());
7269  } else {
7270  materialize_true = NULL;
7271  }
7272 
7273  HBasicBlock* join =
7274  CreateJoin(materialize_false, materialize_true, expr->id());
7275  set_current_block(join);
7276  if (join != NULL) return ast_context()->ReturnValue(Pop());
7277 }
7278 
7279 
7280 HInstruction* HGraphBuilder::BuildIncrement(bool returns_original_input,
7281  CountOperation* expr) {
7282  // The input to the count operation is on top of the expression stack.
7283  TypeInfo info = oracle()->IncrementType(expr);
7284  Representation rep = ToRepresentation(info);
7285  if (rep.IsTagged()) {
7286  rep = Representation::Integer32();
7287  }
7288 
7289  if (returns_original_input) {
7290  // We need an explicit HValue representing ToNumber(input). The
7291  // actual HChange instruction we need is (sometimes) added in a later
7292  // phase, so it is not available now to be used as an input to HAdd and
7293  // as the return value.
7294  HInstruction* number_input = new(zone()) HForceRepresentation(Pop(), rep);
7295  AddInstruction(number_input);
7296  Push(number_input);
7297  }
7298 
7299  // The addition has no side effects, so we do not need
7300  // to simulate the expression stack after this instruction.
7301  // Any later failures deopt to the load of the input or earlier.
7302  HConstant* delta = (expr->op() == Token::INC)
7303  ? graph_->GetConstant1()
7304  : graph_->GetConstantMinus1();
7305  HValue* context = environment()->LookupContext();
7306  HInstruction* instr = new(zone()) HAdd(context, Top(), delta);
7307  TraceRepresentation(expr->op(), info, instr, rep);
7308  instr->AssumeRepresentation(rep);
7309  AddInstruction(instr);
7310  return instr;
7311 }
7312 
7313 
7314 void HGraphBuilder::VisitCountOperation(CountOperation* expr) {
7315  ASSERT(!HasStackOverflow());
7316  ASSERT(current_block() != NULL);
7317  ASSERT(current_block()->HasPredecessor());
7318  Expression* target = expr->expression();
7319  VariableProxy* proxy = target->AsVariableProxy();
7320  Property* prop = target->AsProperty();
7321  if (proxy == NULL && prop == NULL) {
7322  return Bailout("invalid lhs in count operation");
7323  }
7324 
7325  // Match the full code generator stack by simulating an extra stack
7326  // element for postfix operations in a non-effect context. The return
7327  // value is ToNumber(input).
7328  bool returns_original_input =
7329  expr->is_postfix() && !ast_context()->IsEffect();
7330  HValue* input = NULL; // ToNumber(original_input).
7331  HValue* after = NULL; // The result after incrementing or decrementing.
7332 
7333  if (proxy != NULL) {
7334  Variable* var = proxy->var();
7335  if (var->mode() == CONST) {
7336  return Bailout("unsupported count operation with const");
7337  }
7338  // Argument of the count operation is a variable, not a property.
7339  ASSERT(prop == NULL);
7340  CHECK_ALIVE(VisitForValue(target));
7341 
7342  after = BuildIncrement(returns_original_input, expr);
7343  input = returns_original_input ? Top() : Pop();
7344  Push(after);
7345 
7346  switch (var->location()) {
7347  case Variable::UNALLOCATED:
7348  HandleGlobalVariableAssignment(var,
7349  after,
7350  expr->position(),
7351  expr->AssignmentId());
7352  break;
7353 
7354  case Variable::PARAMETER:
7355  case Variable::LOCAL:
7356  Bind(var, after);
7357  break;
7358 
7359  case Variable::CONTEXT: {
7360  // Bail out if we try to mutate a parameter value in a function
7361  // using the arguments object. We do not (yet) correctly handle the
7362  // arguments property of the function.
7363  if (info()->scope()->arguments() != NULL) {
7364  // Parameters will rewrite to context slots. We have no direct
7365  // way to detect that the variable is a parameter so we use a
7366  // linear search of the parameter list.
7367  int count = info()->scope()->num_parameters();
7368  for (int i = 0; i < count; ++i) {
7369  if (var == info()->scope()->parameter(i)) {
7370  return Bailout("assignment to parameter in arguments object");
7371  }
7372  }
7373  }
7374 
7375  HValue* context = BuildContextChainWalk(var);
7377  (var->mode() == LET || var->mode() == CONST_HARMONY)
7379  HStoreContextSlot* instr =
7380  new(zone()) HStoreContextSlot(context, var->index(), mode, after);
7381  AddInstruction(instr);
7382  if (instr->HasObservableSideEffects()) {
7383  AddSimulate(expr->AssignmentId());
7384  }
7385  break;
7386  }
7387 
7388  case Variable::LOOKUP:
7389  return Bailout("lookup variable in count operation");
7390  }
7391 
7392  } else {
7393  // Argument of the count operation is a property.
7394  ASSERT(prop != NULL);
7395  prop->RecordTypeFeedback(oracle(), zone());
7396 
7397  if (prop->key()->IsPropertyName()) {
7398  // Named property.
7399  if (returns_original_input) Push(graph_->GetConstantUndefined());
7400 
7401  CHECK_ALIVE(VisitForValue(prop->obj()));
7402  HValue* obj = Top();
7403 
7404  HInstruction* load = NULL;
7405  if (prop->IsMonomorphic()) {
7406  Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
7407  Handle<Map> map = prop->GetReceiverTypes()->first();
7408  load = BuildLoadNamed(obj, prop, map, name);
7409  } else {
7410  load = BuildLoadNamedGeneric(obj, prop);
7411  }
7412  PushAndAdd(load);
7413  if (load->HasObservableSideEffects()) AddSimulate(expr->CountId());
7414 
7415  after = BuildIncrement(returns_original_input, expr);
7416  input = Pop();
7417 
7418  HInstruction* store;
7419  CHECK_ALIVE(store = BuildStoreNamed(obj, after, prop));
7420  AddInstruction(store);
7421 
7422  // Overwrite the receiver in the bailout environment with the result
7423  // of the operation, and the placeholder with the original value if
7424  // necessary.
7425  environment()->SetExpressionStackAt(0, after);
7426  if (returns_original_input) environment()->SetExpressionStackAt(1, input);
7427  if (store->HasObservableSideEffects()) AddSimulate(expr->AssignmentId());
7428 
7429  } else {
7430  // Keyed property.
7431  if (returns_original_input) Push(graph_->GetConstantUndefined());
7432 
7433  CHECK_ALIVE(VisitForValue(prop->obj()));
7434  CHECK_ALIVE(VisitForValue(prop->key()));
7435  HValue* obj = environment()->ExpressionStackAt(1);
7436  HValue* key = environment()->ExpressionStackAt(0);
7437 
7438  bool has_side_effects = false;
7439  HValue* load = HandleKeyedElementAccess(
7440  obj, key, NULL, prop, expr->CountId(), RelocInfo::kNoPosition,
7441  false, // is_store
7442  &has_side_effects);
7443  Push(load);
7444  if (has_side_effects) AddSimulate(expr->CountId());
7445 
7446  after = BuildIncrement(returns_original_input, expr);
7447  input = Pop();
7448 
7449  expr->RecordTypeFeedback(oracle(), zone());
7450  HandleKeyedElementAccess(obj, key, after, expr, expr->AssignmentId(),
7451  RelocInfo::kNoPosition,
7452  true, // is_store
7453  &has_side_effects);
7454 
7455  // Drop the key from the bailout environment. Overwrite the receiver
7456  // with the result of the operation, and the placeholder with the
7457  // original value if necessary.
7458  Drop(1);
7459  environment()->SetExpressionStackAt(0, after);
7460  if (returns_original_input) environment()->SetExpressionStackAt(1, input);
7461  ASSERT(has_side_effects); // Stores always have side effects.
7462  AddSimulate(expr->AssignmentId());
7463  }
7464  }
7465 
7466  Drop(returns_original_input ? 2 : 1);
7467  return ast_context()->ReturnValue(expr->is_postfix() ? input : after);
7468 }
7469 
7470 
7471 HStringCharCodeAt* HGraphBuilder::BuildStringCharCodeAt(HValue* context,
7472  HValue* string,
7473  HValue* index) {
7474  AddInstruction(new(zone()) HCheckNonSmi(string));
7476  HStringLength* length = new(zone()) HStringLength(string);
7477  AddInstruction(length);
7478  HInstruction* checked_index =
7479  AddInstruction(new(zone()) HBoundsCheck(index, length));
7480  return new(zone()) HStringCharCodeAt(context, string, checked_index);
7481 }
7482 
7483 
7484 HInstruction* HGraphBuilder::BuildBinaryOperation(BinaryOperation* expr,
7485  HValue* left,
7486  HValue* right) {
7487  HValue* context = environment()->LookupContext();
7488  TypeInfo info = oracle()->BinaryType(expr);
7489  if (info.IsUninitialized()) {
7490  AddInstruction(new(zone()) HSoftDeoptimize);
7492  info = TypeInfo::Unknown();
7493  }
7494  HInstruction* instr = NULL;
7495  switch (expr->op()) {
7496  case Token::ADD:
7497  if (info.IsString()) {
7498  AddInstruction(new(zone()) HCheckNonSmi(left));
7500  AddInstruction(new(zone()) HCheckNonSmi(right));
7502  instr = new(zone()) HStringAdd(context, left, right);
7503  } else {
7504  instr = HAdd::NewHAdd(zone(), context, left, right);
7505  }
7506  break;
7507  case Token::SUB:
7508  instr = HSub::NewHSub(zone(), context, left, right);
7509  break;
7510  case Token::MUL:
7511  instr = HMul::NewHMul(zone(), context, left, right);
7512  break;
7513  case Token::MOD:
7514  instr = HMod::NewHMod(zone(), context, left, right);
7515  break;
7516  case Token::DIV:
7517  instr = HDiv::NewHDiv(zone(), context, left, right);
7518  break;
7519  case Token::BIT_XOR:
7520  case Token::BIT_AND:
7521  case Token::BIT_OR:
7522  instr = HBitwise::NewHBitwise(zone(), expr->op(), context, left, right);
7523  break;
7524  case Token::SAR:
7525  instr = HSar::NewHSar(zone(), context, left, right);
7526  break;
7527  case Token::SHR:
7528  instr = HShr::NewHShr(zone(), context, left, right);
7529  break;
7530  case Token::SHL:
7531  instr = HShl::NewHShl(zone(), context, left, right);
7532  break;
7533  default:
7534  UNREACHABLE();
7535  }
7536 
7537  // If we hit an uninitialized binary op stub we will get type info
7538  // for a smi operation. If one of the operands is a constant string
7539  // do not generate code assuming it is a smi operation.
7540  if (info.IsSmi() &&
7541  ((left->IsConstant() && HConstant::cast(left)->HasStringValue()) ||
7542  (right->IsConstant() && HConstant::cast(right)->HasStringValue()))) {
7543  return instr;
7544  }
7545  Representation rep = ToRepresentation(info);
7546  // We only generate either int32 or generic tagged bitwise operations.
7547  if (instr->IsBitwiseBinaryOperation()) {
7549  InitializeObservedInputRepresentation(rep);
7550  if (rep.IsDouble()) rep = Representation::Integer32();
7551  }
7552  TraceRepresentation(expr->op(), info, instr, rep);
7553  instr->AssumeRepresentation(rep);
7554  return instr;
7555 }
7556 
7557 
7558 // Check for the form (%_ClassOf(foo) === 'BarClass').
7559 static bool IsClassOfTest(CompareOperation* expr) {
7560  if (expr->op() != Token::EQ_STRICT) return false;
7561  CallRuntime* call = expr->left()->AsCallRuntime();
7562  if (call == NULL) return false;
7563  Literal* literal = expr->right()->AsLiteral();
7564  if (literal == NULL) return false;
7565  if (!literal->handle()->IsString()) return false;
7566  if (!call->name()->IsEqualTo(CStrVector("_ClassOf"))) return false;
7567  ASSERT(call->arguments()->length() == 1);
7568  return true;
7569 }
7570 
7571 
7572 void HGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) {
7573  ASSERT(!HasStackOverflow());
7574  ASSERT(current_block() != NULL);
7575  ASSERT(current_block()->HasPredecessor());
7576  switch (expr->op()) {
7577  case Token::COMMA:
7578  return VisitComma(expr);
7579  case Token::OR:
7580  case Token::AND:
7581  return VisitLogicalExpression(expr);
7582  default:
7583  return VisitArithmeticExpression(expr);
7584  }
7585 }
7586 
7587 
7588 void HGraphBuilder::VisitComma(BinaryOperation* expr) {
7589  CHECK_ALIVE(VisitForEffect(expr->left()));
7590  // Visit the right subexpression in the same AST context as the entire
7591  // expression.
7592  Visit(expr->right());
7593 }
7594 
7595 
7596 void HGraphBuilder::VisitLogicalExpression(BinaryOperation* expr) {
7597  bool is_logical_and = expr->op() == Token::AND;
7598  if (ast_context()->IsTest()) {
7599  TestContext* context = TestContext::cast(ast_context());
7600  // Translate left subexpression.
7601  HBasicBlock* eval_right = graph()->CreateBasicBlock();
7602  if (is_logical_and) {
7603  CHECK_BAILOUT(VisitForControl(expr->left(),
7604  eval_right,
7605  context->if_false()));
7606  } else {
7607  CHECK_BAILOUT(VisitForControl(expr->left(),
7608  context->if_true(),
7609  eval_right));
7610  }
7611 
7612  // Translate right subexpression by visiting it in the same AST
7613  // context as the entire expression.
7614  if (eval_right->HasPredecessor()) {
7615  eval_right->SetJoinId(expr->RightId());
7616  set_current_block(eval_right);
7617  Visit(expr->right());
7618  }
7619 
7620  } else if (ast_context()->IsValue()) {
7621  CHECK_ALIVE(VisitForValue(expr->left()));
7622  ASSERT(current_block() != NULL);
7623 
7624  // We need an extra block to maintain edge-split form.
7625  HBasicBlock* empty_block = graph()->CreateBasicBlock();
7626  HBasicBlock* eval_right = graph()->CreateBasicBlock();
7627  unsigned test_id = expr->left()->test_id();
7628  ToBooleanStub::Types expected(oracle()->ToBooleanTypes(test_id));
7629  HBranch* test = is_logical_and
7630  ? new(zone()) HBranch(Top(), eval_right, empty_block, expected)
7631  : new(zone()) HBranch(Top(), empty_block, eval_right, expected);
7632  current_block()->Finish(test);
7633 
7634  set_current_block(eval_right);
7635  Drop(1); // Value of the left subexpression.
7636  CHECK_BAILOUT(VisitForValue(expr->right()));
7637 
7638  HBasicBlock* join_block =
7639  CreateJoin(empty_block, current_block(), expr->id());
7640  set_current_block(join_block);
7641  return ast_context()->ReturnValue(Pop());
7642 
7643  } else {
7644  ASSERT(ast_context()->IsEffect());
7645  // In an effect context, we don't need the value of the left subexpression,
7646  // only its control flow and side effects. We need an extra block to
7647  // maintain edge-split form.
7648  HBasicBlock* empty_block = graph()->CreateBasicBlock();
7649  HBasicBlock* right_block = graph()->CreateBasicBlock();
7650  if (is_logical_and) {
7651  CHECK_BAILOUT(VisitForControl(expr->left(), right_block, empty_block));
7652  } else {
7653  CHECK_BAILOUT(VisitForControl(expr->left(), empty_block, right_block));
7654  }
7655 
7656  // TODO(kmillikin): Find a way to fix this. It's ugly that there are
7657  // actually two empty blocks (one here and one inserted by
7658  // TestContext::BuildBranch, and that they both have an HSimulate though the
7659  // second one is not a merge node, and that we really have no good AST ID to
7660  // put on that first HSimulate.
7661 
7662  if (empty_block->HasPredecessor()) {
7663  empty_block->SetJoinId(expr->id());
7664  } else {
7665  empty_block = NULL;
7666  }
7667 
7668  if (right_block->HasPredecessor()) {
7669  right_block->SetJoinId(expr->RightId());
7670  set_current_block(right_block);
7671  CHECK_BAILOUT(VisitForEffect(expr->right()));
7672  right_block = current_block();
7673  } else {
7674  right_block = NULL;
7675  }
7676 
7677  HBasicBlock* join_block =
7678  CreateJoin(empty_block, right_block, expr->id());
7679  set_current_block(join_block);
7680  // We did not materialize any value in the predecessor environments,
7681  // so there is no need to handle it here.
7682  }
7683 }
7684 
7685 
7686 void HGraphBuilder::VisitArithmeticExpression(BinaryOperation* expr) {
7687  CHECK_ALIVE(VisitForValue(expr->left()));
7688  CHECK_ALIVE(VisitForValue(expr->right()));
7689  HValue* right = Pop();
7690  HValue* left = Pop();
7691  HInstruction* instr = BuildBinaryOperation(expr, left, right);
7692  instr->set_position(expr->position());
7693  return ast_context()->ReturnInstruction(instr, expr->id());
7694 }
7695 
7696 
7697 void HGraphBuilder::TraceRepresentation(Token::Value op,
7698  TypeInfo info,
7699  HValue* value,
7700  Representation rep) {
7701  if (!FLAG_trace_representation) return;
7702  // TODO(svenpanne) Under which circumstances are we actually not flexible?
7703  // At first glance, this looks a bit weird...
7704  bool flexible = value->CheckFlag(HValue::kFlexibleRepresentation);
7705  PrintF("Operation %s has type info %s, %schange representation assumption "
7706  "for %s (ID %d) from %s to %s\n",
7707  Token::Name(op),
7708  info.ToString(),
7709  flexible ? "" : " DO NOT ",
7710  value->Mnemonic(),
7711  graph_->GetMaximumValueID(),
7712  value->representation().Mnemonic(),
7713  rep.Mnemonic());
7714 }
7715 
7716 
7717 Representation HGraphBuilder::ToRepresentation(TypeInfo info) {
7718  if (info.IsSmi()) return Representation::Integer32();
7719  if (info.IsInteger32()) return Representation::Integer32();
7720  if (info.IsDouble()) return Representation::Double();
7721  if (info.IsNumber()) return Representation::Double();
7722  return Representation::Tagged();
7723 }
7724 
7725 
7726 void HGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr,
7727  HTypeof* typeof_expr,
7728  Handle<String> check) {
7729  // Note: The HTypeof itself is removed during canonicalization, if possible.
7730  HValue* value = typeof_expr->value();
7731  HTypeofIsAndBranch* instr = new(zone()) HTypeofIsAndBranch(value, check);
7732  instr->set_position(expr->position());
7733  return ast_context()->ReturnControl(instr, expr->id());
7734 }
7735 
7736 
7737 static bool MatchLiteralCompareNil(HValue* left,
7738  Token::Value op,
7739  HValue* right,
7740  Handle<Object> nil,
7741  HValue** expr) {
7742  if (left->IsConstant() &&
7743  HConstant::cast(left)->handle().is_identical_to(nil) &&
7744  Token::IsEqualityOp(op)) {
7745  *expr = right;
7746  return true;
7747  }
7748  return false;
7749 }
7750 
7751 
7752 static bool MatchLiteralCompareTypeof(HValue* left,
7753  Token::Value op,
7754  HValue* right,
7755  HTypeof** typeof_expr,
7756  Handle<String>* check) {
7757  if (left->IsTypeof() &&
7758  Token::IsEqualityOp(op) &&
7759  right->IsConstant() &&
7760  HConstant::cast(right)->HasStringValue()) {
7761  *typeof_expr = HTypeof::cast(left);
7762  *check = Handle<String>::cast(HConstant::cast(right)->handle());
7763  return true;
7764  }
7765  return false;
7766 }
7767 
7768 
7769 static bool IsLiteralCompareTypeof(HValue* left,
7770  Token::Value op,
7771  HValue* right,
7772  HTypeof** typeof_expr,
7773  Handle<String>* check) {
7774  return MatchLiteralCompareTypeof(left, op, right, typeof_expr, check) ||
7775  MatchLiteralCompareTypeof(right, op, left, typeof_expr, check);
7776 }
7777 
7778 
7779 static bool IsLiteralCompareNil(HValue* left,
7780  Token::Value op,
7781  HValue* right,
7782  Handle<Object> nil,
7783  HValue** expr) {
7784  return MatchLiteralCompareNil(left, op, right, nil, expr) ||
7785  MatchLiteralCompareNil(right, op, left, nil, expr);
7786 }
7787 
7788 
7789 static bool IsLiteralCompareBool(HValue* left,
7790  Token::Value op,
7791  HValue* right) {
7792  return op == Token::EQ_STRICT &&
7793  ((left->IsConstant() && HConstant::cast(left)->handle()->IsBoolean()) ||
7794  (right->IsConstant() && HConstant::cast(right)->handle()->IsBoolean()));
7795 }
7796 
7797 
7798 void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
7799  ASSERT(!HasStackOverflow());
7800  ASSERT(current_block() != NULL);
7801  ASSERT(current_block()->HasPredecessor());
7802  if (IsClassOfTest(expr)) {
7803  CallRuntime* call = expr->left()->AsCallRuntime();
7804  ASSERT(call->arguments()->length() == 1);
7805  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
7806  HValue* value = Pop();
7807  Literal* literal = expr->right()->AsLiteral();
7808  Handle<String> rhs = Handle<String>::cast(literal->handle());
7809  HClassOfTestAndBranch* instr =
7810  new(zone()) HClassOfTestAndBranch(value, rhs);
7811  instr->set_position(expr->position());
7812  return ast_context()->ReturnControl(instr, expr->id());
7813  }
7814 
7815  TypeInfo type_info = oracle()->CompareType(expr);
7816  // Check if this expression was ever executed according to type feedback.
7817  // Note that for the special typeof/null/undefined cases we get unknown here.
7818  if (type_info.IsUninitialized()) {
7819  AddInstruction(new(zone()) HSoftDeoptimize);
7821  type_info = TypeInfo::Unknown();
7822  }
7823 
7824  CHECK_ALIVE(VisitForValue(expr->left()));
7825  CHECK_ALIVE(VisitForValue(expr->right()));
7826 
7827  HValue* context = environment()->LookupContext();
7828  HValue* right = Pop();
7829  HValue* left = Pop();
7830  Token::Value op = expr->op();
7831 
7832  HTypeof* typeof_expr = NULL;
7833  Handle<String> check;
7834  if (IsLiteralCompareTypeof(left, op, right, &typeof_expr, &check)) {
7835  return HandleLiteralCompareTypeof(expr, typeof_expr, check);
7836  }
7837  HValue* sub_expr = NULL;
7838  Factory* f = graph()->isolate()->factory();
7839  if (IsLiteralCompareNil(left, op, right, f->undefined_value(), &sub_expr)) {
7840  return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue);
7841  }
7842  if (IsLiteralCompareNil(left, op, right, f->null_value(), &sub_expr)) {
7843  return HandleLiteralCompareNil(expr, sub_expr, kNullValue);
7844  }
7845  if (IsLiteralCompareBool(left, op, right)) {
7846  HCompareObjectEqAndBranch* result =
7847  new(zone()) HCompareObjectEqAndBranch(left, right);
7848  result->set_position(expr->position());
7849  return ast_context()->ReturnControl(result, expr->id());
7850  }
7851 
7852  if (op == Token::INSTANCEOF) {
7853  // Check to see if the rhs of the instanceof is a global function not
7854  // residing in new space. If it is we assume that the function will stay the
7855  // same.
7856  Handle<JSFunction> target = Handle<JSFunction>::null();
7857  VariableProxy* proxy = expr->right()->AsVariableProxy();
7858  bool global_function = (proxy != NULL) && proxy->var()->IsUnallocated();
7859  if (global_function &&
7860  info()->has_global_object() &&
7861  !info()->global_object()->IsAccessCheckNeeded()) {
7862  Handle<String> name = proxy->name();
7863  Handle<GlobalObject> global(info()->global_object());
7864  LookupResult lookup(isolate());
7865  global->Lookup(*name, &lookup);
7866  if (lookup.IsFound() &&
7867  lookup.type() == NORMAL &&
7868  lookup.GetValue()->IsJSFunction()) {
7869  Handle<JSFunction> candidate(JSFunction::cast(lookup.GetValue()));
7870  // If the function is in new space we assume it's more likely to
7871  // change and thus prefer the general IC code.
7872  if (!isolate()->heap()->InNewSpace(*candidate)) {
7873  target = candidate;
7874  }
7875  }
7876  }
7877 
7878  // If the target is not null we have found a known global function that is
7879  // assumed to stay the same for this instanceof.
7880  if (target.is_null()) {
7881  HInstanceOf* result = new(zone()) HInstanceOf(context, left, right);
7882  result->set_position(expr->position());
7883  return ast_context()->ReturnInstruction(result, expr->id());
7884  } else {
7885  AddInstruction(new(zone()) HCheckFunction(right, target));
7886  HInstanceOfKnownGlobal* result =
7887  new(zone()) HInstanceOfKnownGlobal(context, left, target);
7888  result->set_position(expr->position());
7889  return ast_context()->ReturnInstruction(result, expr->id());
7890  }
7891  } else if (op == Token::IN) {
7892  HIn* result = new(zone()) HIn(context, left, right);
7893  result->set_position(expr->position());
7894  return ast_context()->ReturnInstruction(result, expr->id());
7895  } else if (type_info.IsNonPrimitive()) {
7896  switch (op) {
7897  case Token::EQ:
7898  case Token::EQ_STRICT: {
7899  // Can we get away with map check and not instance type check?
7900  Handle<Map> map = oracle()->GetCompareMap(expr);
7901  if (!map.is_null()) {
7902  AddInstruction(new(zone()) HCheckNonSmi(left));
7903  AddInstruction(HCheckMaps::NewWithTransitions(left, map, zone()));
7904  AddInstruction(new(zone()) HCheckNonSmi(right));
7905  AddInstruction(HCheckMaps::NewWithTransitions(right, map, zone()));
7906  HCompareObjectEqAndBranch* result =
7907  new(zone()) HCompareObjectEqAndBranch(left, right);
7908  result->set_position(expr->position());
7909  return ast_context()->ReturnControl(result, expr->id());
7910  } else {
7911  AddInstruction(new(zone()) HCheckNonSmi(left));
7913  AddInstruction(new(zone()) HCheckNonSmi(right));
7915  HCompareObjectEqAndBranch* result =
7916  new(zone()) HCompareObjectEqAndBranch(left, right);
7917  result->set_position(expr->position());
7918  return ast_context()->ReturnControl(result, expr->id());
7919  }
7920  }
7921  default:
7922  return Bailout("Unsupported non-primitive compare");
7923  }
7924  } else if (type_info.IsString() && oracle()->IsSymbolCompare(expr) &&
7925  (op == Token::EQ || op == Token::EQ_STRICT)) {
7926  AddInstruction(new(zone()) HCheckNonSmi(left));
7928  AddInstruction(new(zone()) HCheckNonSmi(right));
7930  HCompareObjectEqAndBranch* result =
7931  new(zone()) HCompareObjectEqAndBranch(left, right);
7932  result->set_position(expr->position());
7933  return ast_context()->ReturnControl(result, expr->id());
7934  } else {
7935  Representation r = ToRepresentation(type_info);
7936  if (r.IsTagged()) {
7937  HCompareGeneric* result =
7938  new(zone()) HCompareGeneric(context, left, right, op);
7939  result->set_position(expr->position());
7940  return ast_context()->ReturnInstruction(result, expr->id());
7941  } else {
7942  HCompareIDAndBranch* result =
7943  new(zone()) HCompareIDAndBranch(left, right, op);
7944  result->set_position(expr->position());
7945  result->SetInputRepresentation(r);
7946  return ast_context()->ReturnControl(result, expr->id());
7947  }
7948  }
7949 }
7950 
7951 
7952 void HGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr,
7953  HValue* value,
7954  NilValue nil) {
7955  ASSERT(!HasStackOverflow());
7956  ASSERT(current_block() != NULL);
7957  ASSERT(current_block()->HasPredecessor());
7958  EqualityKind kind =
7959  expr->op() == Token::EQ_STRICT ? kStrictEquality : kNonStrictEquality;
7960  HIsNilAndBranch* instr = new(zone()) HIsNilAndBranch(value, kind, nil);
7961  instr->set_position(expr->position());
7962  return ast_context()->ReturnControl(instr, expr->id());
7963 }
7964 
7965 
7966 void HGraphBuilder::VisitThisFunction(ThisFunction* expr) {
7967  ASSERT(!HasStackOverflow());
7968  ASSERT(current_block() != NULL);
7969  ASSERT(current_block()->HasPredecessor());
7970  HThisFunction* self = new(zone()) HThisFunction(
7971  function_state()->compilation_info()->closure());
7972  return ast_context()->ReturnInstruction(self, expr->id());
7973 }
7974 
7975 
7977  ASSERT(globals_.is_empty());
7978  AstVisitor::VisitDeclarations(declarations);
7979  if (!globals_.is_empty()) {
7980  Handle<FixedArray> array =
7981  isolate()->factory()->NewFixedArray(globals_.length(), TENURED);
7982  for (int i = 0; i < globals_.length(); ++i) array->set(i, *globals_.at(i));
7983  int flags = DeclareGlobalsEvalFlag::encode(info()->is_eval()) |
7984  DeclareGlobalsNativeFlag::encode(info()->is_native()) |
7985  DeclareGlobalsLanguageMode::encode(info()->language_mode());
7986  HInstruction* result = new(zone()) HDeclareGlobals(
7987  environment()->LookupContext(), array, flags);
7988  AddInstruction(result);
7989  globals_.Clear();
7990  }
7991 }
7992 
7993 
7994 void HGraphBuilder::VisitVariableDeclaration(VariableDeclaration* declaration) {
7995  VariableProxy* proxy = declaration->proxy();
7996  VariableMode mode = declaration->mode();
7997  Variable* variable = proxy->var();
7998  bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
7999  switch (variable->location()) {
8000  case Variable::UNALLOCATED:
8001  globals_.Add(variable->name(), zone());
8002  globals_.Add(variable->binding_needs_init()
8003  ? isolate()->factory()->the_hole_value()
8004  : isolate()->factory()->undefined_value(), zone());
8005  return;
8006  case Variable::PARAMETER:
8007  case Variable::LOCAL:
8008  if (hole_init) {
8009  HValue* value = graph()->GetConstantHole();
8010  environment()->Bind(variable, value);
8011  }
8012  break;
8013  case Variable::CONTEXT:
8014  if (hole_init) {
8015  HValue* value = graph()->GetConstantHole();
8016  HValue* context = environment()->LookupContext();
8017  HStoreContextSlot* store = new(zone()) HStoreContextSlot(
8018  context, variable->index(), HStoreContextSlot::kNoCheck, value);
8019  AddInstruction(store);
8020  if (store->HasObservableSideEffects()) AddSimulate(proxy->id());
8021  }
8022  break;
8023  case Variable::LOOKUP:
8024  return Bailout("unsupported lookup slot in declaration");
8025  }
8026 }
8027 
8028 
8029 void HGraphBuilder::VisitFunctionDeclaration(FunctionDeclaration* declaration) {
8030  VariableProxy* proxy = declaration->proxy();
8031  Variable* variable = proxy->var();
8032  switch (variable->location()) {
8033  case Variable::UNALLOCATED: {
8034  globals_.Add(variable->name(), zone());
8035  Handle<SharedFunctionInfo> function =
8036  Compiler::BuildFunctionInfo(declaration->fun(), info()->script());
8037  // Check for stack-overflow exception.
8038  if (function.is_null()) return SetStackOverflow();
8039  globals_.Add(function, zone());
8040  return;
8041  }
8042  case Variable::PARAMETER:
8043  case Variable::LOCAL: {
8044  CHECK_ALIVE(VisitForValue(declaration->fun()));
8045  HValue* value = Pop();
8046  environment()->Bind(variable, value);
8047  break;
8048  }
8049  case Variable::CONTEXT: {
8050  CHECK_ALIVE(VisitForValue(declaration->fun()));
8051  HValue* value = Pop();
8052  HValue* context = environment()->LookupContext();
8053  HStoreContextSlot* store = new(zone()) HStoreContextSlot(
8054  context, variable->index(), HStoreContextSlot::kNoCheck, value);
8055  AddInstruction(store);
8056  if (store->HasObservableSideEffects()) AddSimulate(proxy->id());
8057  break;
8058  }
8059  case Variable::LOOKUP:
8060  return Bailout("unsupported lookup slot in declaration");
8061  }
8062 }
8063 
8064 
8065 void HGraphBuilder::VisitModuleDeclaration(ModuleDeclaration* declaration) {
8066  UNREACHABLE();
8067 }
8068 
8069 
8070 void HGraphBuilder::VisitImportDeclaration(ImportDeclaration* declaration) {
8071  UNREACHABLE();
8072 }
8073 
8074 
8075 void HGraphBuilder::VisitExportDeclaration(ExportDeclaration* declaration) {
8076  UNREACHABLE();
8077 }
8078 
8079 
8080 void HGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) {
8081  UNREACHABLE();
8082 }
8083 
8084 
8085 void HGraphBuilder::VisitModuleVariable(ModuleVariable* module) {
8086  UNREACHABLE();
8087 }
8088 
8089 
8090 void HGraphBuilder::VisitModulePath(ModulePath* module) {
8091  UNREACHABLE();
8092 }
8093 
8094 
8095 void HGraphBuilder::VisitModuleUrl(ModuleUrl* module) {
8096  UNREACHABLE();
8097 }
8098 
8099 
8100 // Generators for inline runtime functions.
8101 // Support for types.
8102 void HGraphBuilder::GenerateIsSmi(CallRuntime* call) {
8103  ASSERT(call->arguments()->length() == 1);
8104  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8105  HValue* value = Pop();
8106  HIsSmiAndBranch* result = new(zone()) HIsSmiAndBranch(value);
8107  return ast_context()->ReturnControl(result, call->id());
8108 }
8109 
8110 
8111 void HGraphBuilder::GenerateIsSpecObject(CallRuntime* call) {
8112  ASSERT(call->arguments()->length() == 1);
8113  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8114  HValue* value = Pop();
8115  HHasInstanceTypeAndBranch* result =
8116  new(zone()) HHasInstanceTypeAndBranch(value,
8119  return ast_context()->ReturnControl(result, call->id());
8120 }
8121 
8122 
8123 void HGraphBuilder::GenerateIsFunction(CallRuntime* call) {
8124  ASSERT(call->arguments()->length() == 1);
8125  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8126  HValue* value = Pop();
8127  HHasInstanceTypeAndBranch* result =
8128  new(zone()) HHasInstanceTypeAndBranch(value, JS_FUNCTION_TYPE);
8129  return ast_context()->ReturnControl(result, call->id());
8130 }
8131 
8132 
8133 void HGraphBuilder::GenerateHasCachedArrayIndex(CallRuntime* call) {
8134  ASSERT(call->arguments()->length() == 1);
8135  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8136  HValue* value = Pop();
8137  HHasCachedArrayIndexAndBranch* result =
8138  new(zone()) HHasCachedArrayIndexAndBranch(value);
8139  return ast_context()->ReturnControl(result, call->id());
8140 }
8141 
8142 
8143 void HGraphBuilder::GenerateIsArray(CallRuntime* call) {
8144  ASSERT(call->arguments()->length() == 1);
8145  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8146  HValue* value = Pop();
8147  HHasInstanceTypeAndBranch* result =
8148  new(zone()) HHasInstanceTypeAndBranch(value, JS_ARRAY_TYPE);
8149  return ast_context()->ReturnControl(result, call->id());
8150 }
8151 
8152 
8153 void HGraphBuilder::GenerateIsRegExp(CallRuntime* call) {
8154  ASSERT(call->arguments()->length() == 1);
8155  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8156  HValue* value = Pop();
8157  HHasInstanceTypeAndBranch* result =
8158  new(zone()) HHasInstanceTypeAndBranch(value, JS_REGEXP_TYPE);
8159  return ast_context()->ReturnControl(result, call->id());
8160 }
8161 
8162 
8163 void HGraphBuilder::GenerateIsObject(CallRuntime* call) {
8164  ASSERT(call->arguments()->length() == 1);
8165  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8166  HValue* value = Pop();
8167  HIsObjectAndBranch* result = new(zone()) HIsObjectAndBranch(value);
8168  return ast_context()->ReturnControl(result, call->id());
8169 }
8170 
8171 
8172 void HGraphBuilder::GenerateIsNonNegativeSmi(CallRuntime* call) {
8173  return Bailout("inlined runtime function: IsNonNegativeSmi");
8174 }
8175 
8176 
8177 void HGraphBuilder::GenerateIsUndetectableObject(CallRuntime* call) {
8178  ASSERT(call->arguments()->length() == 1);
8179  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8180  HValue* value = Pop();
8181  HIsUndetectableAndBranch* result =
8182  new(zone()) HIsUndetectableAndBranch(value);
8183  return ast_context()->ReturnControl(result, call->id());
8184 }
8185 
8186 
8187 void HGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf(
8188  CallRuntime* call) {
8189  return Bailout(
8190  "inlined runtime function: IsStringWrapperSafeForDefaultValueOf");
8191 }
8192 
8193 
8194 // Support for construct call checks.
8195 void HGraphBuilder::GenerateIsConstructCall(CallRuntime* call) {
8196  ASSERT(call->arguments()->length() == 0);
8197  if (function_state()->outer() != NULL) {
8198  // We are generating graph for inlined function.
8199  HValue* value = function_state()->is_construct()
8200  ? graph()->GetConstantTrue()
8201  : graph()->GetConstantFalse();
8202  return ast_context()->ReturnValue(value);
8203  } else {
8204  return ast_context()->ReturnControl(new(zone()) HIsConstructCallAndBranch,
8205  call->id());
8206  }
8207 }
8208 
8209 
8210 // Support for arguments.length and arguments[?].
8211 void HGraphBuilder::GenerateArgumentsLength(CallRuntime* call) {
8212  // Our implementation of arguments (based on this stack frame or an
8213  // adapter below it) does not work for inlined functions. This runtime
8214  // function is blacklisted by AstNode::IsInlineable.
8215  ASSERT(function_state()->outer() == NULL);
8216  ASSERT(call->arguments()->length() == 0);
8217  HInstruction* elements = AddInstruction(
8218  new(zone()) HArgumentsElements(false));
8219  HArgumentsLength* result = new(zone()) HArgumentsLength(elements);
8220  return ast_context()->ReturnInstruction(result, call->id());
8221 }
8222 
8223 
8224 void HGraphBuilder::GenerateArguments(CallRuntime* call) {
8225  // Our implementation of arguments (based on this stack frame or an
8226  // adapter below it) does not work for inlined functions. This runtime
8227  // function is blacklisted by AstNode::IsInlineable.
8228  ASSERT(function_state()->outer() == NULL);
8229  ASSERT(call->arguments()->length() == 1);
8230  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8231  HValue* index = Pop();
8232  HInstruction* elements = AddInstruction(
8233  new(zone()) HArgumentsElements(false));
8234  HInstruction* length = AddInstruction(new(zone()) HArgumentsLength(elements));
8235  HAccessArgumentsAt* result =
8236  new(zone()) HAccessArgumentsAt(elements, length, index);
8237  return ast_context()->ReturnInstruction(result, call->id());
8238 }
8239 
8240 
8241 // Support for accessing the class and value fields of an object.
8242 void HGraphBuilder::GenerateClassOf(CallRuntime* call) {
8243  // The special form detected by IsClassOfTest is detected before we get here
8244  // and does not cause a bailout.
8245  return Bailout("inlined runtime function: ClassOf");
8246 }
8247 
8248 
8249 void HGraphBuilder::GenerateValueOf(CallRuntime* call) {
8250  ASSERT(call->arguments()->length() == 1);
8251  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8252  HValue* value = Pop();
8253  HValueOf* result = new(zone()) HValueOf(value);
8254  return ast_context()->ReturnInstruction(result, call->id());
8255 }
8256 
8257 
8258 void HGraphBuilder::GenerateDateField(CallRuntime* call) {
8259  ASSERT(call->arguments()->length() == 2);
8260  ASSERT_NE(NULL, call->arguments()->at(1)->AsLiteral());
8261  Smi* index = Smi::cast(*(call->arguments()->at(1)->AsLiteral()->handle()));
8262  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8263  HValue* date = Pop();
8264  HDateField* result = new(zone()) HDateField(date, index);
8265  return ast_context()->ReturnInstruction(result, call->id());
8266 }
8267 
8268 
8269 void HGraphBuilder::GenerateSetValueOf(CallRuntime* call) {
8270  ASSERT(call->arguments()->length() == 2);
8271  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8272  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
8273  HValue* value = Pop();
8274  HValue* object = Pop();
8275  // Check if object is a not a smi.
8276  HIsSmiAndBranch* smicheck = new(zone()) HIsSmiAndBranch(object);
8277  HBasicBlock* if_smi = graph()->CreateBasicBlock();
8278  HBasicBlock* if_heap_object = graph()->CreateBasicBlock();
8279  HBasicBlock* join = graph()->CreateBasicBlock();
8280  smicheck->SetSuccessorAt(0, if_smi);
8281  smicheck->SetSuccessorAt(1, if_heap_object);
8282  current_block()->Finish(smicheck);
8283  if_smi->Goto(join);
8284 
8285  // Check if object is a JSValue.
8286  set_current_block(if_heap_object);
8287  HHasInstanceTypeAndBranch* typecheck =
8288  new(zone()) HHasInstanceTypeAndBranch(object, JS_VALUE_TYPE);
8289  HBasicBlock* if_js_value = graph()->CreateBasicBlock();
8290  HBasicBlock* not_js_value = graph()->CreateBasicBlock();
8291  typecheck->SetSuccessorAt(0, if_js_value);
8292  typecheck->SetSuccessorAt(1, not_js_value);
8293  current_block()->Finish(typecheck);
8294  not_js_value->Goto(join);
8295 
8296  // Create in-object property store to kValueOffset.
8297  set_current_block(if_js_value);
8298  Handle<String> name = isolate()->factory()->undefined_symbol();
8299  AddInstruction(new(zone()) HStoreNamedField(object,
8300  name,
8301  value,
8302  true, // in-object store.
8304  if_js_value->Goto(join);
8305  join->SetJoinId(call->id());
8306  set_current_block(join);
8307  return ast_context()->ReturnValue(value);
8308 }
8309 
8310 
8311 // Fast support for charCodeAt(n).
8312 void HGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) {
8313  ASSERT(call->arguments()->length() == 2);
8314  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8315  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
8316  HValue* index = Pop();
8317  HValue* string = Pop();
8318  HValue* context = environment()->LookupContext();
8319  HStringCharCodeAt* result = BuildStringCharCodeAt(context, string, index);
8320  return ast_context()->ReturnInstruction(result, call->id());
8321 }
8322 
8323 
8324 // Fast support for string.charAt(n) and string[n].
8325 void HGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) {
8326  ASSERT(call->arguments()->length() == 1);
8327  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8328  HValue* char_code = Pop();
8329  HValue* context = environment()->LookupContext();
8330  HStringCharFromCode* result =
8331  new(zone()) HStringCharFromCode(context, char_code);
8332  return ast_context()->ReturnInstruction(result, call->id());
8333 }
8334 
8335 
8336 // Fast support for string.charAt(n) and string[n].
8337 void HGraphBuilder::GenerateStringCharAt(CallRuntime* call) {
8338  ASSERT(call->arguments()->length() == 2);
8339  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8340  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
8341  HValue* index = Pop();
8342  HValue* string = Pop();
8343  HValue* context = environment()->LookupContext();
8344  HStringCharCodeAt* char_code = BuildStringCharCodeAt(context, string, index);
8345  AddInstruction(char_code);
8346  HStringCharFromCode* result =
8347  new(zone()) HStringCharFromCode(context, char_code);
8348  return ast_context()->ReturnInstruction(result, call->id());
8349 }
8350 
8351 
8352 // Fast support for object equality testing.
8353 void HGraphBuilder::GenerateObjectEquals(CallRuntime* call) {
8354  ASSERT(call->arguments()->length() == 2);
8355  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8356  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
8357  HValue* right = Pop();
8358  HValue* left = Pop();
8359  HCompareObjectEqAndBranch* result =
8360  new(zone()) HCompareObjectEqAndBranch(left, right);
8361  return ast_context()->ReturnControl(result, call->id());
8362 }
8363 
8364 
8365 void HGraphBuilder::GenerateLog(CallRuntime* call) {
8366  // %_Log is ignored in optimized code.
8367  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
8368 }
8369 
8370 
8371 // Fast support for Math.random().
8372 void HGraphBuilder::GenerateRandomHeapNumber(CallRuntime* call) {
8373  HValue* context = environment()->LookupContext();
8374  HGlobalObject* global_object = new(zone()) HGlobalObject(context);
8375  AddInstruction(global_object);
8376  HRandom* result = new(zone()) HRandom(global_object);
8377  return ast_context()->ReturnInstruction(result, call->id());
8378 }
8379 
8380 
8381 // Fast support for StringAdd.
8382 void HGraphBuilder::GenerateStringAdd(CallRuntime* call) {
8383  ASSERT_EQ(2, call->arguments()->length());
8384  CHECK_ALIVE(VisitArgumentList(call->arguments()));
8385  HValue* context = environment()->LookupContext();
8386  HCallStub* result = new(zone()) HCallStub(context, CodeStub::StringAdd, 2);
8387  Drop(2);
8388  return ast_context()->ReturnInstruction(result, call->id());
8389 }
8390 
8391 
8392 // Fast support for SubString.
8393 void HGraphBuilder::GenerateSubString(CallRuntime* call) {
8394  ASSERT_EQ(3, call->arguments()->length());
8395  CHECK_ALIVE(VisitArgumentList(call->arguments()));
8396  HValue* context = environment()->LookupContext();
8397  HCallStub* result = new(zone()) HCallStub(context, CodeStub::SubString, 3);
8398  Drop(3);
8399  return ast_context()->ReturnInstruction(result, call->id());
8400 }
8401 
8402 
8403 // Fast support for StringCompare.
8404 void HGraphBuilder::GenerateStringCompare(CallRuntime* call) {
8405  ASSERT_EQ(2, call->arguments()->length());
8406  CHECK_ALIVE(VisitArgumentList(call->arguments()));
8407  HValue* context = environment()->LookupContext();
8408  HCallStub* result =
8409  new(zone()) HCallStub(context, CodeStub::StringCompare, 2);
8410  Drop(2);
8411  return ast_context()->ReturnInstruction(result, call->id());
8412 }
8413 
8414 
8415 // Support for direct calls from JavaScript to native RegExp code.
8416 void HGraphBuilder::GenerateRegExpExec(CallRuntime* call) {
8417  ASSERT_EQ(4, call->arguments()->length());
8418  CHECK_ALIVE(VisitArgumentList(call->arguments()));
8419  HValue* context = environment()->LookupContext();
8420  HCallStub* result = new(zone()) HCallStub(context, CodeStub::RegExpExec, 4);
8421  Drop(4);
8422  return ast_context()->ReturnInstruction(result, call->id());
8423 }
8424 
8425 
8426 // Construct a RegExp exec result with two in-object properties.
8427 void HGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) {
8428  ASSERT_EQ(3, call->arguments()->length());
8429  CHECK_ALIVE(VisitArgumentList(call->arguments()));
8430  HValue* context = environment()->LookupContext();
8431  HCallStub* result =
8432  new(zone()) HCallStub(context, CodeStub::RegExpConstructResult, 3);
8433  Drop(3);
8434  return ast_context()->ReturnInstruction(result, call->id());
8435 }
8436 
8437 
8438 // Support for fast native caches.
8439 void HGraphBuilder::GenerateGetFromCache(CallRuntime* call) {
8440  return Bailout("inlined runtime function: GetFromCache");
8441 }
8442 
8443 
8444 // Fast support for number to string.
8445 void HGraphBuilder::GenerateNumberToString(CallRuntime* call) {
8446  ASSERT_EQ(1, call->arguments()->length());
8447  CHECK_ALIVE(VisitArgumentList(call->arguments()));
8448  HValue* context = environment()->LookupContext();
8449  HCallStub* result =
8450  new(zone()) HCallStub(context, CodeStub::NumberToString, 1);
8451  Drop(1);
8452  return ast_context()->ReturnInstruction(result, call->id());
8453 }
8454 
8455 
8456 // Fast call for custom callbacks.
8457 void HGraphBuilder::GenerateCallFunction(CallRuntime* call) {
8458  // 1 ~ The function to call is not itself an argument to the call.
8459  int arg_count = call->arguments()->length() - 1;
8460  ASSERT(arg_count >= 1); // There's always at least a receiver.
8461 
8462  for (int i = 0; i < arg_count; ++i) {
8463  CHECK_ALIVE(VisitArgument(call->arguments()->at(i)));
8464  }
8465  CHECK_ALIVE(VisitForValue(call->arguments()->last()));
8466 
8467  HValue* function = Pop();
8468  HValue* context = environment()->LookupContext();
8469 
8470  // Branch for function proxies, or other non-functions.
8471  HHasInstanceTypeAndBranch* typecheck =
8472  new(zone()) HHasInstanceTypeAndBranch(function, JS_FUNCTION_TYPE);
8473  HBasicBlock* if_jsfunction = graph()->CreateBasicBlock();
8474  HBasicBlock* if_nonfunction = graph()->CreateBasicBlock();
8475  HBasicBlock* join = graph()->CreateBasicBlock();
8476  typecheck->SetSuccessorAt(0, if_jsfunction);
8477  typecheck->SetSuccessorAt(1, if_nonfunction);
8478  current_block()->Finish(typecheck);
8479 
8480  set_current_block(if_jsfunction);
8481  HInstruction* invoke_result = AddInstruction(
8482  new(zone()) HInvokeFunction(context, function, arg_count));
8483  Drop(arg_count);
8484  Push(invoke_result);
8485  if_jsfunction->Goto(join);
8486 
8487  set_current_block(if_nonfunction);
8488  HInstruction* call_result = AddInstruction(
8489  new(zone()) HCallFunction(context, function, arg_count));
8490  Drop(arg_count);
8491  Push(call_result);
8492  if_nonfunction->Goto(join);
8493 
8494  set_current_block(join);
8495  join->SetJoinId(call->id());
8496  return ast_context()->ReturnValue(Pop());
8497 }
8498 
8499 
8500 // Fast call to math functions.
8501 void HGraphBuilder::GenerateMathPow(CallRuntime* call) {
8502  ASSERT_EQ(2, call->arguments()->length());
8503  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8504  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
8505  HValue* right = Pop();
8506  HValue* left = Pop();
8507  HPower* result = new(zone()) HPower(left, right);
8508  return ast_context()->ReturnInstruction(result, call->id());
8509 }
8510 
8511 
8512 void HGraphBuilder::GenerateMathSin(CallRuntime* call) {
8513  ASSERT_EQ(1, call->arguments()->length());
8514  CHECK_ALIVE(VisitArgumentList(call->arguments()));
8515  HValue* context = environment()->LookupContext();
8516  HCallStub* result =
8517  new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1);
8518  result->set_transcendental_type(TranscendentalCache::SIN);
8519  Drop(1);
8520  return ast_context()->ReturnInstruction(result, call->id());
8521 }
8522 
8523 
8524 void HGraphBuilder::GenerateMathCos(CallRuntime* call) {
8525  ASSERT_EQ(1, call->arguments()->length());
8526  CHECK_ALIVE(VisitArgumentList(call->arguments()));
8527  HValue* context = environment()->LookupContext();
8528  HCallStub* result =
8529  new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1);
8530  result->set_transcendental_type(TranscendentalCache::COS);
8531  Drop(1);
8532  return ast_context()->ReturnInstruction(result, call->id());
8533 }
8534 
8535 
8536 void HGraphBuilder::GenerateMathTan(CallRuntime* call) {
8537  ASSERT_EQ(1, call->arguments()->length());
8538  CHECK_ALIVE(VisitArgumentList(call->arguments()));
8539  HValue* context = environment()->LookupContext();
8540  HCallStub* result =
8541  new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1);
8542  result->set_transcendental_type(TranscendentalCache::TAN);
8543  Drop(1);
8544  return ast_context()->ReturnInstruction(result, call->id());
8545 }
8546 
8547 
8548 void HGraphBuilder::GenerateMathLog(CallRuntime* call) {
8549  ASSERT_EQ(1, call->arguments()->length());
8550  CHECK_ALIVE(VisitArgumentList(call->arguments()));
8551  HValue* context = environment()->LookupContext();
8552  HCallStub* result =
8553  new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1);
8554  result->set_transcendental_type(TranscendentalCache::LOG);
8555  Drop(1);
8556  return ast_context()->ReturnInstruction(result, call->id());
8557 }
8558 
8559 
8560 void HGraphBuilder::GenerateMathSqrt(CallRuntime* call) {
8561  return Bailout("inlined runtime function: MathSqrt");
8562 }
8563 
8564 
8565 // Check whether two RegExps are equivalent
8566 void HGraphBuilder::GenerateIsRegExpEquivalent(CallRuntime* call) {
8567  return Bailout("inlined runtime function: IsRegExpEquivalent");
8568 }
8569 
8570 
8571 void HGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) {
8572  ASSERT(call->arguments()->length() == 1);
8573  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8574  HValue* value = Pop();
8575  HGetCachedArrayIndex* result = new(zone()) HGetCachedArrayIndex(value);
8576  return ast_context()->ReturnInstruction(result, call->id());
8577 }
8578 
8579 
8580 void HGraphBuilder::GenerateFastAsciiArrayJoin(CallRuntime* call) {
8581  return Bailout("inlined runtime function: FastAsciiArrayJoin");
8582 }
8583 
8584 
8585 #undef CHECK_BAILOUT
8586 #undef CHECK_ALIVE
8587 
8588 
8590  Scope* scope,
8591  Handle<JSFunction> closure,
8592  Zone* zone)
8593  : closure_(closure),
8594  values_(0, zone),
8595  assigned_variables_(4, zone),
8596  frame_type_(JS_FUNCTION),
8597  parameter_count_(0),
8598  specials_count_(1),
8599  local_count_(0),
8600  outer_(outer),
8601  pop_count_(0),
8602  push_count_(0),
8603  ast_id_(AstNode::kNoNumber),
8604  zone_(zone) {
8605  Initialize(scope->num_parameters() + 1, scope->num_stack_slots(), 0);
8606 }
8607 
8608 
8609 HEnvironment::HEnvironment(const HEnvironment* other, Zone* zone)
8610  : values_(0, zone),
8611  assigned_variables_(0, zone),
8612  frame_type_(JS_FUNCTION),
8613  parameter_count_(0),
8614  specials_count_(1),
8615  local_count_(0),
8616  outer_(NULL),
8617  pop_count_(0),
8618  push_count_(0),
8619  ast_id_(other->ast_id()),
8620  zone_(zone) {
8621  Initialize(other);
8622 }
8623 
8624 
8625 HEnvironment::HEnvironment(HEnvironment* outer,
8626  Handle<JSFunction> closure,
8627  FrameType frame_type,
8628  int arguments,
8629  Zone* zone)
8630  : closure_(closure),
8631  values_(arguments, zone),
8632  assigned_variables_(0, zone),
8633  frame_type_(frame_type),
8634  parameter_count_(arguments),
8635  local_count_(0),
8636  outer_(outer),
8637  pop_count_(0),
8638  push_count_(0),
8639  ast_id_(AstNode::kNoNumber),
8640  zone_(zone) {
8641 }
8642 
8643 
8644 void HEnvironment::Initialize(int parameter_count,
8645  int local_count,
8646  int stack_height) {
8647  parameter_count_ = parameter_count;
8648  local_count_ = local_count;
8649 
8650  // Avoid reallocating the temporaries' backing store on the first Push.
8651  int total = parameter_count + specials_count_ + local_count + stack_height;
8652  values_.Initialize(total + 4, zone());
8653  for (int i = 0; i < total; ++i) values_.Add(NULL, zone());
8654 }
8655 
8656 
8657 void HEnvironment::Initialize(const HEnvironment* other) {
8658  closure_ = other->closure();
8659  values_.AddAll(other->values_, zone());
8660  assigned_variables_.AddAll(other->assigned_variables_, zone());
8661  frame_type_ = other->frame_type_;
8662  parameter_count_ = other->parameter_count_;
8663  local_count_ = other->local_count_;
8664  if (other->outer_ != NULL) outer_ = other->outer_->Copy(); // Deep copy.
8665  pop_count_ = other->pop_count_;
8666  push_count_ = other->push_count_;
8667  ast_id_ = other->ast_id_;
8668 }
8669 
8670 
8672  ASSERT(!block->IsLoopHeader());
8673  ASSERT(values_.length() == other->values_.length());
8674 
8675  int length = values_.length();
8676  for (int i = 0; i < length; ++i) {
8677  HValue* value = values_[i];
8678  if (value != NULL && value->IsPhi() && value->block() == block) {
8679  // There is already a phi for the i'th value.
8680  HPhi* phi = HPhi::cast(value);
8681  // Assert index is correct and that we haven't missed an incoming edge.
8682  ASSERT(phi->merged_index() == i);
8683  ASSERT(phi->OperandCount() == block->predecessors()->length());
8684  phi->AddInput(other->values_[i]);
8685  } else if (values_[i] != other->values_[i]) {
8686  // There is a fresh value on the incoming edge, a phi is needed.
8687  ASSERT(values_[i] != NULL && other->values_[i] != NULL);
8688  HPhi* phi = new(zone()) HPhi(i, zone());
8689  HValue* old_value = values_[i];
8690  for (int j = 0; j < block->predecessors()->length(); j++) {
8691  phi->AddInput(old_value);
8692  }
8693  phi->AddInput(other->values_[i]);
8694  this->values_[i] = phi;
8695  block->AddPhi(phi);
8696  }
8697  }
8698 }
8699 
8700 
8701 void HEnvironment::Bind(int index, HValue* value) {
8702  ASSERT(value != NULL);
8703  if (!assigned_variables_.Contains(index)) {
8704  assigned_variables_.Add(index, zone());
8705  }
8706  values_[index] = value;
8707 }
8708 
8709 
8710 bool HEnvironment::HasExpressionAt(int index) const {
8711  return index >= parameter_count_ + specials_count_ + local_count_;
8712 }
8713 
8714 
8717  return length() == first_expression_index();
8718 }
8719 
8720 
8721 void HEnvironment::SetExpressionStackAt(int index_from_top, HValue* value) {
8722  int count = index_from_top + 1;
8723  int index = values_.length() - count;
8724  ASSERT(HasExpressionAt(index));
8725  // The push count must include at least the element in question or else
8726  // the new value will not be included in this environment's history.
8727  if (push_count_ < count) {
8728  // This is the same effect as popping then re-pushing 'count' elements.
8729  pop_count_ += (count - push_count_);
8730  push_count_ = count;
8731  }
8732  values_[index] = value;
8733 }
8734 
8735 
8736 void HEnvironment::Drop(int count) {
8737  for (int i = 0; i < count; ++i) {
8738  Pop();
8739  }
8740 }
8741 
8742 
8744  return new(zone()) HEnvironment(this, zone());
8745 }
8746 
8747 
8749  HEnvironment* result = Copy();
8750  result->ClearHistory();
8751  return result;
8752 }
8753 
8754 
8756  HEnvironment* new_env = Copy();
8757  for (int i = 0; i < values_.length(); ++i) {
8758  HPhi* phi = new(zone()) HPhi(i, zone());
8759  phi->AddInput(values_[i]);
8760  new_env->values_[i] = phi;
8761  loop_header->AddPhi(phi);
8762  }
8763  new_env->ClearHistory();
8764  return new_env;
8765 }
8766 
8767 
8768 HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
8769  Handle<JSFunction> target,
8770  FrameType frame_type,
8771  int arguments) const {
8772  HEnvironment* new_env =
8773  new(zone()) HEnvironment(outer, target, frame_type,
8774  arguments + 1, zone());
8775  for (int i = 0; i <= arguments; ++i) { // Include receiver.
8776  new_env->Push(ExpressionStackAt(arguments - i));
8777  }
8778  new_env->ClearHistory();
8779  return new_env;
8780 }
8781 
8782 
8784  Handle<JSFunction> target,
8785  int arguments,
8786  FunctionLiteral* function,
8787  HConstant* undefined,
8788  CallKind call_kind,
8789  bool is_construct) const {
8791 
8792  Zone* zone = closure()->GetIsolate()->zone();
8793 
8794  // Outer environment is a copy of this one without the arguments.
8795  int arity = function->scope()->num_parameters();
8796 
8797  HEnvironment* outer = Copy();
8798  outer->Drop(arguments + 1); // Including receiver.
8799  outer->ClearHistory();
8800 
8801  if (is_construct) {
8802  // Create artificial constructor stub environment. The receiver should
8803  // actually be the constructor function, but we pass the newly allocated
8804  // object instead, DoComputeConstructStubFrame() relies on that.
8805  outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments);
8806  }
8807 
8808  if (arity != arguments) {
8809  // Create artificial arguments adaptation environment.
8810  outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments);
8811  }
8812 
8813  HEnvironment* inner =
8814  new(zone) HEnvironment(outer, function->scope(), target, zone);
8815  // Get the argument values from the original environment.
8816  for (int i = 0; i <= arity; ++i) { // Include receiver.
8817  HValue* push = (i <= arguments) ?
8818  ExpressionStackAt(arguments - i) : undefined;
8819  inner->SetValueAt(i, push);
8820  }
8821  // If the function we are inlining is a strict mode function or a
8822  // builtin function, pass undefined as the receiver for function
8823  // calls (instead of the global receiver).
8824  if ((target->shared()->native() || !function->is_classic_mode()) &&
8825  call_kind == CALL_AS_FUNCTION && !is_construct) {
8826  inner->SetValueAt(0, undefined);
8827  }
8828  inner->SetValueAt(arity + 1, LookupContext());
8829  for (int i = arity + 2; i < inner->length(); ++i) {
8830  inner->SetValueAt(i, undefined);
8831  }
8832 
8834  return inner;
8835 }
8836 
8837 
8839  for (int i = 0; i < length(); i++) {
8840  if (i == 0) stream->Add("parameters\n");
8841  if (i == parameter_count()) stream->Add("specials\n");
8842  if (i == parameter_count() + specials_count()) stream->Add("locals\n");
8843  if (i == parameter_count() + specials_count() + local_count()) {
8844  stream->Add("expressions\n");
8845  }
8846  HValue* val = values_.at(i);
8847  stream->Add("%d: ", i);
8848  if (val != NULL) {
8849  val->PrintNameTo(stream);
8850  } else {
8851  stream->Add("NULL");
8852  }
8853  stream->Add("\n");
8854  }
8855  PrintF("\n");
8856 }
8857 
8858 
8860  HeapStringAllocator string_allocator;
8861  StringStream trace(&string_allocator);
8862  PrintTo(&trace);
8863  PrintF("%s", *trace.ToCString());
8864 }
8865 
8866 
8868  Tag tag(this, "compilation");
8869  Handle<String> name = function->debug_name();
8870  PrintStringProperty("name", *name->ToCString());
8871  PrintStringProperty("method", *name->ToCString());
8872  PrintLongProperty("date", static_cast<int64_t>(OS::TimeCurrentMillis()));
8873 }
8874 
8875 
8876 void HTracer::TraceLithium(const char* name, LChunk* chunk) {
8877  Trace(name, chunk->graph(), chunk);
8878 }
8879 
8880 
8881 void HTracer::TraceHydrogen(const char* name, HGraph* graph) {
8882  Trace(name, graph, NULL);
8883 }
8884 
8885 
8886 void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
8887  Tag tag(this, "cfg");
8888  PrintStringProperty("name", name);
8889  const ZoneList<HBasicBlock*>* blocks = graph->blocks();
8890  for (int i = 0; i < blocks->length(); i++) {
8891  HBasicBlock* current = blocks->at(i);
8892  Tag block_tag(this, "block");
8893  PrintBlockProperty("name", current->block_id());
8894  PrintIntProperty("from_bci", -1);
8895  PrintIntProperty("to_bci", -1);
8896 
8897  if (!current->predecessors()->is_empty()) {
8898  PrintIndent();
8899  trace_.Add("predecessors");
8900  for (int j = 0; j < current->predecessors()->length(); ++j) {
8901  trace_.Add(" \"B%d\"", current->predecessors()->at(j)->block_id());
8902  }
8903  trace_.Add("\n");
8904  } else {
8905  PrintEmptyProperty("predecessors");
8906  }
8907 
8908  if (current->end()->SuccessorCount() == 0) {
8909  PrintEmptyProperty("successors");
8910  } else {
8911  PrintIndent();
8912  trace_.Add("successors");
8913  for (HSuccessorIterator it(current->end()); !it.Done(); it.Advance()) {
8914  trace_.Add(" \"B%d\"", it.Current()->block_id());
8915  }
8916  trace_.Add("\n");
8917  }
8918 
8919  PrintEmptyProperty("xhandlers");
8920  const char* flags = current->IsLoopSuccessorDominator()
8921  ? "dom-loop-succ"
8922  : "";
8923  PrintStringProperty("flags", flags);
8924 
8925  if (current->dominator() != NULL) {
8926  PrintBlockProperty("dominator", current->dominator()->block_id());
8927  }
8928 
8929  PrintIntProperty("loop_depth", current->LoopNestingDepth());
8930 
8931  if (chunk != NULL) {
8932  int first_index = current->first_instruction_index();
8933  int last_index = current->last_instruction_index();
8934  PrintIntProperty(
8935  "first_lir_id",
8936  LifetimePosition::FromInstructionIndex(first_index).Value());
8937  PrintIntProperty(
8938  "last_lir_id",
8939  LifetimePosition::FromInstructionIndex(last_index).Value());
8940  }
8941 
8942  {
8943  Tag states_tag(this, "states");
8944  Tag locals_tag(this, "locals");
8945  int total = current->phis()->length();
8946  PrintIntProperty("size", current->phis()->length());
8947  PrintStringProperty("method", "None");
8948  for (int j = 0; j < total; ++j) {
8949  HPhi* phi = current->phis()->at(j);
8950  PrintIndent();
8951  trace_.Add("%d ", phi->merged_index());
8952  phi->PrintNameTo(&trace_);
8953  trace_.Add(" ");
8954  phi->PrintTo(&trace_);
8955  trace_.Add("\n");
8956  }
8957  }
8958 
8959  {
8960  Tag HIR_tag(this, "HIR");
8961  HInstruction* instruction = current->first();
8962  while (instruction != NULL) {
8963  int bci = 0;
8964  int uses = instruction->UseCount();
8965  PrintIndent();
8966  trace_.Add("%d %d ", bci, uses);
8967  instruction->PrintNameTo(&trace_);
8968  trace_.Add(" ");
8969  instruction->PrintTo(&trace_);
8970  trace_.Add(" <|@\n");
8971  instruction = instruction->next();
8972  }
8973  }
8974 
8975 
8976  if (chunk != NULL) {
8977  Tag LIR_tag(this, "LIR");
8978  int first_index = current->first_instruction_index();
8979  int last_index = current->last_instruction_index();
8980  if (first_index != -1 && last_index != -1) {
8981  const ZoneList<LInstruction*>* instructions = chunk->instructions();
8982  for (int i = first_index; i <= last_index; ++i) {
8983  LInstruction* linstr = instructions->at(i);
8984  if (linstr != NULL) {
8985  PrintIndent();
8986  trace_.Add("%d ",
8988  linstr->PrintTo(&trace_);
8989  trace_.Add(" <|@\n");
8990  }
8991  }
8992  }
8993  }
8994  }
8995 }
8996 
8997 
8998 void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) {
8999  Tag tag(this, "intervals");
9000  PrintStringProperty("name", name);
9001 
9002  const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges();
9003  for (int i = 0; i < fixed_d->length(); ++i) {
9004  TraceLiveRange(fixed_d->at(i), "fixed", allocator->zone());
9005  }
9006 
9007  const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges();
9008  for (int i = 0; i < fixed->length(); ++i) {
9009  TraceLiveRange(fixed->at(i), "fixed", allocator->zone());
9010  }
9011 
9012  const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges();
9013  for (int i = 0; i < live_ranges->length(); ++i) {
9014  TraceLiveRange(live_ranges->at(i), "object", allocator->zone());
9015  }
9016 }
9017 
9018 
9019 void HTracer::TraceLiveRange(LiveRange* range, const char* type,
9020  Zone* zone) {
9021  if (range != NULL && !range->IsEmpty()) {
9022  PrintIndent();
9023  trace_.Add("%d %s", range->id(), type);
9024  if (range->HasRegisterAssigned()) {
9025  LOperand* op = range->CreateAssignedOperand(zone);
9026  int assigned_reg = op->index();
9027  if (op->IsDoubleRegister()) {
9028  trace_.Add(" \"%s\"",
9030  } else {
9031  ASSERT(op->IsRegister());
9032  trace_.Add(" \"%s\"", Register::AllocationIndexToString(assigned_reg));
9033  }
9034  } else if (range->IsSpilled()) {
9035  LOperand* op = range->TopLevel()->GetSpillOperand();
9036  if (op->IsDoubleStackSlot()) {
9037  trace_.Add(" \"double_stack:%d\"", op->index());
9038  } else {
9039  ASSERT(op->IsStackSlot());
9040  trace_.Add(" \"stack:%d\"", op->index());
9041  }
9042  }
9043  int parent_index = -1;
9044  if (range->IsChild()) {
9045  parent_index = range->parent()->id();
9046  } else {
9047  parent_index = range->id();
9048  }
9049  LOperand* op = range->FirstHint();
9050  int hint_index = -1;
9051  if (op != NULL && op->IsUnallocated()) {
9052  hint_index = LUnallocated::cast(op)->virtual_register();
9053  }
9054  trace_.Add(" %d %d", parent_index, hint_index);
9055  UseInterval* cur_interval = range->first_interval();
9056  while (cur_interval != NULL && range->Covers(cur_interval->start())) {
9057  trace_.Add(" [%d, %d[",
9058  cur_interval->start().Value(),
9059  cur_interval->end().Value());
9060  cur_interval = cur_interval->next();
9061  }
9062 
9063  UsePosition* current_pos = range->first_pos();
9064  while (current_pos != NULL) {
9065  if (current_pos->RegisterIsBeneficial() || FLAG_trace_all_uses) {
9066  trace_.Add(" %d M", current_pos->pos().Value());
9067  }
9068  current_pos = current_pos->next();
9069  }
9070 
9071  trace_.Add(" \"\"\n");
9072  }
9073 }
9074 
9075 
9076 void HTracer::FlushToFile() {
9077  AppendChars(filename_, *trace_.ToCString(), trace_.length(), false);
9078  trace_.Reset();
9079 }
9080 
9081 
9082 void HStatistics::Initialize(CompilationInfo* info) {
9083  source_size_ += info->shared_info()->SourceSize();
9084 }
9085 
9086 
9088  PrintF("Timing results:\n");
9089  int64_t sum = 0;
9090  for (int i = 0; i < timing_.length(); ++i) {
9091  sum += timing_[i];
9092  }
9093 
9094  for (int i = 0; i < names_.length(); ++i) {
9095  PrintF("%30s", names_[i]);
9096  double ms = static_cast<double>(timing_[i]) / 1000;
9097  double percent = static_cast<double>(timing_[i]) * 100 / sum;
9098  PrintF(" - %7.3f ms / %4.1f %% ", ms, percent);
9099 
9100  unsigned size = sizes_[i];
9101  double size_percent = static_cast<double>(size) * 100 / total_size_;
9102  PrintF(" %8u bytes / %4.1f %%\n", size, size_percent);
9103  }
9104  double source_size_in_kb = static_cast<double>(source_size_) / 1024;
9105  double normalized_time = source_size_in_kb > 0
9106  ? (static_cast<double>(sum) / 1000) / source_size_in_kb
9107  : 0;
9108  double normalized_bytes = source_size_in_kb > 0
9109  ? total_size_ / source_size_in_kb
9110  : 0;
9111  PrintF("%30s - %7.3f ms %7.3f bytes\n", "Sum",
9112  normalized_time, normalized_bytes);
9113  PrintF("---------------------------------------------------------------\n");
9114  PrintF("%30s - %7.3f ms (%.1f times slower than full code gen)\n",
9115  "Total",
9116  static_cast<double>(total_) / 1000,
9117  static_cast<double>(total_) / full_code_gen_);
9118 }
9119 
9120 
9121 void HStatistics::SaveTiming(const char* name, int64_t ticks, unsigned size) {
9122  if (name == HPhase::kFullCodeGen) {
9123  full_code_gen_ += ticks;
9124  } else if (name == HPhase::kTotal) {
9125  total_ += ticks;
9126  } else {
9127  total_size_ += size;
9128  for (int i = 0; i < names_.length(); ++i) {
9129  if (names_[i] == name) {
9130  timing_[i] += ticks;
9131  sizes_[i] += size;
9132  return;
9133  }
9134  }
9135  names_.Add(name);
9136  timing_.Add(ticks);
9137  sizes_.Add(size);
9138  }
9139 }
9140 
9141 
9142 const char* const HPhase::kFullCodeGen = "Full code generator";
9143 const char* const HPhase::kTotal = "Total";
9144 
9145 
9146 void HPhase::Begin(const char* name,
9147  HGraph* graph,
9148  LChunk* chunk,
9149  LAllocator* allocator) {
9150  name_ = name;
9151  graph_ = graph;
9152  chunk_ = chunk;
9153  allocator_ = allocator;
9154  if (allocator != NULL && chunk_ == NULL) {
9155  chunk_ = allocator->chunk();
9156  }
9157  if (FLAG_hydrogen_stats) start_ = OS::Ticks();
9158  start_allocation_size_ = Zone::allocation_size_;
9159 }
9160 
9161 
9162 void HPhase::End() const {
9163  if (FLAG_hydrogen_stats) {
9164  int64_t end = OS::Ticks();
9165  unsigned size = Zone::allocation_size_ - start_allocation_size_;
9166  HStatistics::Instance()->SaveTiming(name_, end - start_, size);
9167  }
9168 
9169  // Produce trace output if flag is set so that the first letter of the
9170  // phase name matches the command line parameter FLAG_trace_phase.
9171  if (FLAG_trace_hydrogen &&
9172  OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL) {
9173  if (graph_ != NULL) HTracer::Instance()->TraceHydrogen(name_, graph_);
9174  if (chunk_ != NULL) HTracer::Instance()->TraceLithium(name_, chunk_);
9175  if (allocator_ != NULL) {
9176  HTracer::Instance()->TraceLiveRanges(name_, allocator_);
9177  }
9178  }
9179 
9180 #ifdef DEBUG
9181  if (graph_ != NULL) graph_->Verify(false); // No full verify.
9182  if (allocator_ != NULL) allocator_->Verify();
9183 #endif
9184 }
9185 
9186 } } // namespace v8::internal
Isolate * isolate()
Definition: hydrogen.h:249
static HPhi * cast(HValue *value)
void SetInitialEnvironment(HEnvironment *env)
Definition: hydrogen.cc:200
bool HasObservableSideEffects() const
#define INLINE_FUNCTION_LIST(F)
Definition: runtime.h:493
bool IsTest() const
Definition: hydrogen.h:599
int index() const
Definition: lithium.h:62
ArgumentsAllowedFlag
Definition: hydrogen.h:588
static LUnallocated * cast(LOperand *op)
Definition: lithium.h:196
FunctionState(HGraphBuilder *owner, CompilationInfo *info, TypeFeedbackOracle *oracle, ReturnHandlingFlag return_handling)
Definition: hydrogen.cc:2521
static const int kDefaultMaxOptCount
Definition: compiler.h:276
bool IsFinished() const
Definition: hydrogen.h:101
#define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize)
Definition: hydrogen.cc:7078
bool IsDeoptimizing() const
Definition: hydrogen.h:151
HBasicBlock * if_true() const
Definition: hydrogen.h:696
GVNFlagSet gvn_flags() const
VariableDeclaration * function() const
Definition: scopes.h:323
bool IsExternalArrayElementsKind(ElementsKind kind)
Definition: elements-kind.h:94
bool Contains(const T &elm) const
Definition: list-inl.h:178
const char * ToCString(const v8::String::Utf8Value &value)
void set_entry(HEnterInlined *entry)
Definition: hydrogen.h:740
Code * target() const
Definition: ic.h:97
bool calls_eval() const
Definition: scopes.h:296
HValueMap * Copy(Zone *zone) const
Definition: hydrogen.h:1222
void PrintF(const char *format,...)
Definition: v8utils.cc:40
BoundsCheckBbData(BoundsCheckKey *key, int32_t lower_offset, int32_t upper_offset, HBasicBlock *bb, HBoundsCheck *check, BoundsCheckBbData *next_in_bb, BoundsCheckBbData *father_in_dt)
Definition: hydrogen.cc:3077
virtual void ReturnControl(HControlInstruction *instr, int ast_id)
Definition: hydrogen.cc:2628
HBasicBlock * loop_header() const
Definition: hydrogen.h:226
void set_block_id(int id)
Definition: hydrogen.h:62
virtual HValue * Canonicalize()
static int64_t Ticks()
bool IsEffect() const
Definition: hydrogen.h:597
virtual void SetDehoisted(bool is_dehoisted)=0
static const int kDeclarationsId
Definition: ast.h:202
static uint32_t encode(boolvalue)
Definition: utils.h:261
HInstruction * previous() const
static const char * Name(Value tok)
Definition: token.h:196
static Smi * FromInt(int value)
Definition: objects-inl.h:973
static bool MakeCode(CompilationInfo *info)
void AddLeaveInlined(HValue *return_value, HBasicBlock *target, FunctionState *state=NULL)
Definition: hydrogen.cc:183
static const int kEnumCacheBridgeIndicesCacheIndex
Definition: objects.h:2617
#define TRACE_GVN_4(msg, a1, a2, a3, a4)
Definition: hydrogen.cc:1180
void Delete(BoundsCheckKey *key)
Definition: hydrogen.cc:3158
LiveRange * parent() const
HValue * LookupContext() const
Definition: hydrogen.h:470
virtual int ContinueId() const =0
bool Dominates(HBasicBlock *other) const
Definition: hydrogen.cc:223
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
HBasicBlock * block() const
static Handle< T > cast(Handle< S > that)
Definition: handles.h:81
static Representation Integer32()
void Insert(BoundsCheckKey *key, BoundsCheckBbData *data, Zone *zone)
Definition: hydrogen.cc:3154
static bool Analyze(CompilationInfo *info)
Definition: scopes.cc:263
HConstant * GetConstant1()
Definition: hydrogen.cc:582
bool HasIllegalRedeclaration() const
Definition: scopes.h:214
Expression * condition() const
Definition: hydrogen.h:695
void SetArgumentsObject(HArgumentsObject *object)
Definition: hydrogen.h:300
void Push(HValue *value)
Definition: hydrogen.h:863
static HCheckInstanceType * NewIsString(HValue *value, Zone *zone)
void Finish(HControlInstruction *last)
Definition: hydrogen.cc:158
HInstruction * first() const
Definition: hydrogen.h:65
static TypeInfo Unknown()
Definition: type-info.h:59
HEnvironment * CopyForInlining(Handle< JSFunction > target, int arguments, FunctionLiteral *function, HConstant *undefined, CallKind call_kind, bool is_construct) const
Definition: hydrogen.cc:8783
HValueMap(Zone *zone)
Definition: hydrogen.h:1201
Flag flags[]
Definition: flags.cc:1467
static const int kNoNumber
Definition: ast.h:197
int int32_t
Definition: unicode.cc:47
#define DECLARE_FLAG(type)
HEnvironment * arguments_environment()
Definition: hydrogen.h:420
Location location() const
Definition: variables.h:153
HValue * Lookup(Variable *variable) const
Definition: hydrogen.h:460
void AssumeRepresentation(Representation r)
void MarkAsLoopSuccessorDominator()
Definition: hydrogen.h:157
void Bind(Variable *variable, HValue *value)
Definition: hydrogen.h:450
virtual void ReturnValue(HValue *value)
Definition: hydrogen.cc:2616
HGraphBuilder * owner() const
Definition: hydrogen.h:625
void FinishExitWithDeoptimization(HDeoptimize::UseEnvironment has_uses)
Definition: hydrogen.h:132
EqualityKind
Definition: v8.h:145
static LifetimePosition FromInstructionIndex(int index)
HBasicBlock * dominator() const
Definition: hydrogen.h:82
void SetExpressionStackAt(int index_from_top, HValue *value)
Definition: hydrogen.cc:8721
FrameType frame_type() const
Definition: hydrogen.h:430
bool CheckArgumentsPhiUses()
Definition: hydrogen.cc:928
Zone * zone() const
Definition: hydrogen.h:535
HRangeAnalysis(HGraph *graph)
Definition: hydrogen.cc:993
static GVNFlagSet ConvertChangesToDependsFlags(GVNFlagSet flags)
static GvnBasicBlockState * CreateEntry(Zone *zone, HBasicBlock *entry_block, HValueMap *entry_map)
Definition: hydrogen.cc:1846
int PredecessorIndexOf(HBasicBlock *predecessor) const
Definition: hydrogen.cc:382
static HCheckMaps * NewWithTransitions(HValue *object, Handle< Map > map, Zone *zone)
List< Handle< Map > > MapHandleList
Definition: list.h:193
#define ASSERT(condition)
Definition: checks.h:270
BoundsCheckBbData * NextInBasicBlock() const
Definition: hydrogen.cc:3016
void SaveTiming(const char *name, int64_t ticks, unsigned size)
Definition: hydrogen.cc:9121
void InsertRepresentationChanges()
Definition: hydrogen.cc:2417
HBasicBlock * current_block() const
Definition: hydrogen.h:850
HConstant * GetConstantTrue()
Definition: hydrogen.cc:592
virtual void ReturnInstruction(HInstruction *instr, int ast_id)
Definition: hydrogen.cc:2671
static HInstruction * NewHMod(Zone *zone, HValue *context, HValue *left, HValue *right)
FunctionState * outer()
Definition: hydrogen.h:737
void AddSimulate(int ast_id)
Definition: hydrogen.h:128
const ZoneList< HBasicBlock * > * predecessors() const
Definition: hydrogen.h:71
HBoundsCheck * Check() const
Definition: hydrogen.cc:3015
HEnvironment * Copy() const
Definition: hydrogen.cc:8743
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3902
void RegisterBackEdge(HBasicBlock *block)
Definition: hydrogen.cc:407
static HInstruction * NewHMul(Zone *zone, HValue *context, HValue *left, HValue *right)
static SharedFunctionInfo * cast(Object *obj)
HControlInstruction * end() const
Definition: hydrogen.h:69
bool HasPredecessor() const
Definition: hydrogen.h:72
static Representation Double()
HEnvironment * CopyAsLoopHeader(HBasicBlock *block) const
Definition: hydrogen.cc:8755
virtual void ReturnValue(HValue *value)
Definition: hydrogen.cc:2601
FunctionState * function_state() const
Definition: hydrogen.h:874
int GetMaximumValueID() const
Definition: hydrogen.h:304
bool Equals(const Representation &other)
bool ContainsAnyOf(const EnumSet &set) const
Definition: utils.h:960
int parameter_count() const
Definition: hydrogen.h:431
Factory * factory()
Definition: isolate.h:977
bool IsFastElementsKind(ElementsKind kind)
Representation representation() const
ZoneList< HUnknownOSRValue * > * osr_values()
Definition: hydrogen.h:331
virtual void ReturnInstruction(HInstruction *instr, int ast_id)
Definition: hydrogen.cc:2640
static const int kFunctionEntryId
Definition: ast.h:198
Handle< String > name() const
Definition: variables.h:96
virtual void ReturnControl(HControlInstruction *instr, int ast_id)
Definition: hydrogen.cc:2651
static HStatistics * Instance()
Definition: hydrogen.h:1288
TypeFeedbackOracle * oracle() const
Definition: hydrogen.h:872
void AssignCommonDominator(HBasicBlock *other)
Definition: hydrogen.cc:299
static Smi * cast(Object *object)
HEnvironment * environment() const
Definition: hydrogen.h:852
HEnvironment * last_environment() const
Definition: hydrogen.h:83
static const char * AllocationIndexToString(int index)
void DehoistSimpleArrayIndexComputations()
Definition: hydrogen.cc:3293
static Handle< ScopeInfo > Create(Scope *scope, Zone *zone)
Definition: scopeinfo.cc:41
const ZoneList< HValue * > * values() const
Definition: hydrogen.h:426
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 instructions(ARM only)") DEFINE_bool(enable_armv7
static bool Parse(CompilationInfo *info, int flags)
Definition: parser.cc:6026
int last_instruction_index() const
Definition: hydrogen.h:90
void TraceLithium(const char *name, LChunk *chunk)
Definition: hydrogen.cc:8876
Handle< String > SubString(Handle< String > str, int start, int end, PretenureFlag pretenure)
Definition: handles.cc:326
static HCheckInstanceType * NewIsJSArray(HValue *value, Zone *zone)
AstContext(HGraphBuilder *owner, Expression::Context kind)
Definition: hydrogen.cc:2567
static void PrintCode(Handle< Code > code, CompilationInfo *info)
Definition: codegen.cc:115
void Add(Vector< const char > format, Vector< FmtElm > elms)
HConstant * GetConstantFalse()
Definition: hydrogen.cc:597
bool Covers(LifetimePosition position)
LOperand * CreateAssignedOperand(Zone *zone)
static HCheckInstanceType * NewIsSpecObject(HValue *value, Zone *zone)
void Initialize(CompilationInfo *info)
Definition: hydrogen.cc:9082
#define IN
BoundsCheckKey * Key() const
Definition: hydrogen.cc:3011
void RemovePhi(HPhi *phi)
Definition: hydrogen.cc:96
int virtual_register() const
Definition: lithium.h:182
void VisitDeclarations(ZoneList< Declaration * > *declarations)
Definition: hydrogen.cc:7976
bool IsEmpty() const
Definition: data-flow.h:176
Variable * var() const
Definition: ast.h:1484
#define UNREACHABLE()
Definition: checks.h:50
bool ExpressionStackIsEmpty() const
Definition: hydrogen.cc:8715
VariableProxy * proxy() const
Definition: ast.h:452
bool Equals(HValue *other)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kMaxVirtualRegisters
Definition: lithium.h:156
void set_osr_loop_entry(HBasicBlock *entry)
Definition: hydrogen.h:327
Zone * zone() const
Definition: hydrogen.h:250
BoundsCheckBbData ** LookupOrInsert(BoundsCheckKey *key, Zone *zone)
Definition: hydrogen.cc:3149
void set_arguments_elements(HArgumentsElements *arguments_elements)
Definition: hydrogen.h:743
HBasicBlock * GetLastBackEdge() const
Definition: hydrogen.cc:413
int32_t LowerOffset() const
Definition: hydrogen.cc:3012
const ZoneList< HBasicBlock * > * dominated_blocks() const
Definition: hydrogen.h:73
EnumSet< GVNFlag > GVNFlagSet
int first_expression_index() const
Definition: hydrogen.h:446
bool IsFastPackedElementsKind(ElementsKind kind)
HConstant * GetConstantUndefined() const
Definition: hydrogen.h:288
int num_stack_slots() const
Definition: scopes.h:365
void AddIncomingEdge(HBasicBlock *block, HEnvironment *other)
Definition: hydrogen.cc:8671
HBasicBlock(HGraph *graph)
Definition: hydrogen.cc:55
static HUnaryOperation * cast(HValue *value)
void set_ast_id(int id)
Definition: hydrogen.h:439
bool HasRegisterAssigned() const
#define TRACE_GVN_1(msg, a1)
Definition: hydrogen.cc:1165
static bool IsValidElementsTransition(ElementsKind from_kind, ElementsKind to_kind)
Definition: objects.cc:9924
NilValue
Definition: v8.h:141
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1051
virtual void ReturnControl(HControlInstruction *instr, int ast_id)=0
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
void PostProcessLoopHeader(IterationStatement *stmt)
Definition: hydrogen.cc:244
const int kPointerSize
Definition: globals.h:234
bool HasEnvironment() const
Definition: hydrogen.h:110
void set_current_block(HBasicBlock *block)
Definition: hydrogen.h:851
T * NewArray(size_t size)
Definition: allocation.h:83
int32_t UpperOffset() const
Definition: hydrogen.cc:3013
void UpdateEnvironment(HEnvironment *env)
Definition: hydrogen.h:111
BreakAndContinueScope * break_scope() const
Definition: hydrogen.h:847
static Value NegateCompareOp(Value op)
Definition: token.h:226
#define GVN_TRACKED_FLAG_LIST(V)
HArgumentsElements * arguments_elements()
Definition: hydrogen.h:742
ZoneList< HValue * > * arguments_values()
void EliminateUnreachablePhis()
Definition: hydrogen.cc:884
void set_undefined_constant(HConstant *constant)
Definition: hydrogen.h:285
static HTracer * Instance()
Definition: hydrogen.h:1359
virtual HValue * OperandAt(int index)
static HInstruction * NewHSub(Zone *zone, HValue *context, HValue *left, HValue *right)
GvnBasicBlockState * next_in_dominator_tree_traversal(Zone *zone, HBasicBlock **dominator)
Definition: hydrogen.cc:1857
void AddInput(HValue *value)
UseInterval * first_interval() const
Entry * Lookup(void *key, uint32_t hash, bool insert, ZoneAllocationPolicyallocator=ZoneAllocationPolicy())
static unsigned allocation_size_
Definition: zone.h:89
HValue * ExpressionStackAt(int index_from_top) const
Definition: hydrogen.h:497
HEnvironment(HEnvironment *outer, Scope *scope, Handle< JSFunction > closure, Zone *zone)
Definition: hydrogen.cc:8589
static void VPrint(const char *format, va_list args)
SparseSet(Zone *zone, int capacity)
Definition: hydrogen.cc:1444
static TestContext * cast(AstContext *context)
Definition: hydrogen.h:690
HGraphBuilder(CompilationInfo *info, TypeFeedbackOracle *oracle, Zone *zone)
Definition: hydrogen.cc:607
int index() const
Definition: variables.h:154
void PropagateDeoptimizingMark()
Definition: hydrogen.cc:837
int length() const
Definition: utils.h:383
HInstruction * next() const
HGlobalValueNumberer(HGraph *graph, CompilationInfo *info)
Definition: hydrogen.cc:1483
int local_count() const
Definition: hydrogen.h:433
static double TimeCurrentMillis()
HBasicBlock * osr_loop_entry()
Definition: hydrogen.h:323
static GVNFlag DependsOnFlagFromInt(int x)
int num_parameters() const
Definition: scopes.h:335
HConstant * GetConstantMinus1()
Definition: hydrogen.cc:587
Definition: v8.h:104
static int SizeFor(int length)
Definition: objects.h:2369
static HInstruction * NewHShr(Zone *zone, HValue *context, HValue *left, HValue *right)
static HInstruction * NewHBitwise(Zone *zone, Token::Value op, HValue *context, HValue *left, HValue *right)
static Value InvertCompareOp(Value op)
Definition: token.h:241
bool Contains(int n) const
Definition: hydrogen.cc:1455
HInstruction * last() const
Definition: hydrogen.h:66
static bool IsEqualityOp(Value op)
Definition: token.h:222
SmartArrayPointer< char > GetGVNFlagsString(GVNFlagSet flags)
Definition: hydrogen.cc:1589
#define BASE_EMBEDDED
Definition: allocation.h:68
void MarkDeoptimizeOnUndefined()
Definition: hydrogen.cc:2473
void ComputeMinusZeroChecks()
Definition: hydrogen.cc:2495
static ScopeInfo * Empty()
Definition: scopeinfo.cc:152
HStackCheckEliminator(HGraph *graph)
Definition: hydrogen.cc:1401
LOperand * FirstHint() const
BoundsCheckBbData * FatherInDominatorTree() const
Definition: hydrogen.cc:3017
T * NewArray(int length)
Definition: zone-inl.h:72
void RecordDeletedPhi(int merge_index)
Definition: hydrogen.h:79
void DeleteAndReplaceWith(HValue *other)
Vector< const char > CStrVector(const char *data)
Definition: utils.h:525
static HInstruction * NewHDiv(Zone *zone, HValue *context, HValue *left, HValue *right)
void Drop(int count)
Definition: hydrogen.cc:8736
#define GVN_UNTRACKED_FLAG_LIST(V)
void AddEnvironmentValue(HValue *value, Zone *zone)
virtual intptr_t Hashcode()
HBasicBlock * CreateBasicBlock()
Definition: hydrogen.cc:735
bool IsInlineReturnTarget() const
Definition: hydrogen.h:148
static int SizeFor(int length)
Definition: objects.h:2288
bool OffsetIsCovered(int32_t offset) const
Definition: hydrogen.cc:3019
void BindContext(HValue *value)
Definition: hydrogen.h:456
void PrintNameTo(StringStream *stream)
static BoundsCheckKey * Create(Zone *zone, HBoundsCheck *check, int32_t *offset)
Definition: hydrogen.cc:2946
const int kElementsKindCount
Definition: elements-kind.h:76
void InsertAfter(HInstruction *previous)
static const int kHeaderSize
Definition: objects.h:2233
int specials_count() const
Definition: hydrogen.h:432
void SetValueAt(int index, HValue *value)
Definition: hydrogen.h:527
static int SNPrintF(Vector< char > str, const char *format,...)
HArgumentsObject * GetArgumentsObject() const
Definition: hydrogen.h:296
const ZoneList< HBasicBlock * > * blocks() const
Definition: hydrogen.h:252
virtual void SetSuccessorAt(int i, HBasicBlock *block)=0
int first_instruction_index() const
Definition: hydrogen.h:86
HBasicBlock * BasicBlock() const
Definition: hydrogen.cc:3014
#define CHECK_ALIVE(call)
Definition: hydrogen.cc:2729
UsePosition * first_pos() const
Handle< Code > Compile(CompilationInfo *info, Zone *zone)
Definition: hydrogen.cc:694
int LoopNestingDepth() const
Definition: hydrogen.cc:233
HGraph * graph() const
Definition: lithium-arm.h:2241
const ZoneList< HPhi * > * phi_list() const
Definition: hydrogen.h:253
HBasicBlock * parent_loop_header() const
Definition: hydrogen.h:112
bool IsLoopSuccessorDominator() const
Definition: hydrogen.h:154
void InitializeInferredTypes()
Definition: hydrogen.cc:2280
HEnvironment * DiscardInlined(bool drop_extra)
Definition: hydrogen.h:413
void SetOperandAt(int index, HValue *value)
HEnterInlined * entry()
Definition: hydrogen.h:739
void FinishExit(HControlInstruction *instruction)
Definition: hydrogen.cc:671
ElementsKind GetInitialFastElementsKind()
bool CheckFlag(Flag f) const
FunctionSorter(int index, int ticks, int ast_length, int src_length)
Definition: hydrogen.cc:5976
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, PropertyType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
Definition: objects-inl.h:3312
unsigned test_id() const
Definition: ast.h:359
T ToIntegral() const
Definition: utils.h:969
int AppendChars(const char *filename, const char *str, int size, bool verbose)
Definition: v8utils.cc:205
static Handle< Object > CreateArrayLiteralBoilerplate(Isolate *isolate, Handle< FixedArray > literals, Handle< FixedArray > elements)
Definition: runtime.cc:461
HValue * IndexBase() const
Definition: hydrogen.cc:2939
void InsertBefore(HInstruction *next)
HBasicBlock * CreateJoin(HBasicBlock *first, HBasicBlock *second, int join_id)
Definition: hydrogen.cc:626
void Goto(HBasicBlock *block, FunctionState *state=NULL)
Definition: hydrogen.cc:168
void EliminateRedundantPhis()
Definition: hydrogen.cc:850
bool binding_needs_init() const
Definition: variables.h:131
HLoopInformation * loop_information() const
Definition: hydrogen.h:70
bool IsStartBlock() const
Definition: hydrogen.h:98
static Handle< T > null()
Definition: handles.h:86
void CoverCheck(HBoundsCheck *new_check, int32_t new_offset)
Definition: hydrogen.cc:3044
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset flag
Definition: objects-inl.h:3682
virtual void SetIndexOffset(uint32_t index_offset)=0
static HInstruction * NewHAdd(Zone *zone, HValue *context, HValue *left, HValue *right)
bool IsLoopHeader() const
Definition: hydrogen.h:97
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
virtual void ReturnValue(HValue *value)
Definition: hydrogen.cc:2606
void SetBlock(HBasicBlock *block)
void PrintTo(StringStream *stream)
Definition: hydrogen.cc:8838
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
Definition: compiler.cc:708
void USE(T)
Definition: globals.h:303
void set_parent_loop_header(HBasicBlock *block)
Definition: hydrogen.h:114
#define TRACE_GVN_2(msg, a1, a2)
Definition: hydrogen.cc:1170
ContainedInLattice AddRange(ContainedInLattice containment, const int *ranges, int ranges_length, Interval new_range)
Definition: jsregexp.cc:111
#define ASSERT_NE(v1, v2)
Definition: checks.h:272
bool end_
HConstant * GetConstantHole()
Definition: hydrogen.cc:602
void AddPhi(HPhi *phi)
Definition: hydrogen.cc:89
CompilationInfo * compilation_info()
Definition: hydrogen.h:725
static const int kEnumCacheBridgeCacheIndex
Definition: objects.h:2616
const T & at(int index) const
Definition: utils.h:397
Zone * zone() const
Definition: hydrogen.h:1196
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
HGraph(CompilationInfo *info, Zone *zone)
Definition: hydrogen.cc:677
bool CheckConstPhiUses()
Definition: hydrogen.cc:941
HBasicBlock * if_false() const
Definition: hydrogen.h:697
static const char * AllocationIndexToString(int index)
Definition: assembler-arm.h:87
static bool HasCustomCallGenerator(Handle< JSFunction > function)
Definition: stub-cache.cc:1428
void EliminateRedundantBoundsChecks()
Definition: hydrogen.cc:3238
static GVNFlag ChangesFlagFromInt(int x)
static const int kMaxInliningLevels
Definition: compiler.h:278
void TraceHydrogen(const char *name, HGraph *graph)
Definition: hydrogen.cc:8881
void Bailout(const char *reason)
Definition: hydrogen.cc:2736
void AddIndirectUsesTo(int *use_count)
const char * name_
Definition: flags.cc:1352
const ZoneList< HPhi * > * phis() const
Definition: hydrogen.h:64
void TraceCompilation(FunctionLiteral *function)
Definition: hydrogen.cc:8867
void AssignLoopSuccessorDominators()
Definition: hydrogen.cc:326
static const int kValueOffset
Definition: objects.h:6188
bool is_function_scope() const
Definition: scopes.h:276
int block_id() const
Definition: hydrogen.h:61
bool IsValue() const
Definition: hydrogen.h:598
void Push(HValue *value)
Definition: hydrogen.h:475
virtual void ReturnInstruction(HInstruction *instr, int ast_id)=0
unsigned id() const
Definition: ast.h:358
virtual void ReturnControl(HControlInstruction *instr, int ast_id)
Definition: hydrogen.cc:2686
static Representation Tagged()
Handle< JSFunction > closure() const
Definition: hydrogen.h:425
SmartArrayPointer< const char > ToCString() const
static HCheckInstanceType * NewIsSymbol(HValue *value, Zone *zone)
T Min(T a, T b)
Definition: utils.h:229
HValue * Length() const
Definition: hydrogen.cc:2940
#define INLINE_RUNTIME_FUNCTION_LIST(F)
Definition: runtime.h:531
void SetJoinId(int ast_id)
Definition: hydrogen.cc:207
virtual void SetKey(HValue *key)=0
VariableMode mode() const
Definition: ast.h:453
static Handle< Code > MakeCodeEpilogue(MacroAssembler *masm, Code::Flags flags, CompilationInfo *info)
Definition: codegen.cc:96
Zone * zone() const
Definition: hydrogen.h:399
void Add(E element)
Definition: utils.h:963
static void RecordFunctionCompilation(Logger::LogEventsAndTags tag, CompilationInfo *info, Handle< SharedFunctionInfo > shared)
Definition: compiler.cc:790
static HInstruction * NewHSar(Zone *zone, HValue *context, HValue *left, HValue *right)
static HInstruction * NewHShl(Zone *zone, HValue *context, HValue *left, HValue *right)
#define VOID
static HValue * cast(HValue *value)
void check(i::Vector< const char > string)
HSideEffectMap * dominators()
Definition: hydrogen.cc:1855
#define CHECK_BAILOUT(call)
Definition: hydrogen.cc:2722
static void MakeCodePrologue(CompilationInfo *info)
Definition: codegen.cc:61
#define ARRAY_SIZE(a)
Definition: globals.h:295
virtual void ReturnValue(HValue *value)=0
virtual void ReturnInstruction(HInstruction *instr, int ast_id)
Definition: hydrogen.cc:2621
void TraceLiveRanges(const char *name, LAllocator *allocator)
Definition: hydrogen.cc:8998
void set_osr_values(ZoneList< HUnknownOSRValue * > *values)
Definition: hydrogen.h:335
LOperand * GetSpillOperand() const
HBasicBlock * entry_block() const
Definition: hydrogen.h:254
static JSObject * cast(Object *obj)
FlagType type() const
Definition: flags.cc:1358
HInferRepresentation(HGraph *graph)
Definition: hydrogen.cc:2058
void AddInstruction(HInstruction *instr)
Definition: hydrogen.cc:106
ZoneList< Declaration * > * declarations()
Definition: scopes.h:341
static char * StrChr(char *str, int c)
void AddAll(const List< T, AllocationPolicy > &other, AllocationPolicy allocator=AllocationPolicy())
void TraceGVN(const char *msg,...)
Definition: hydrogen.cc:1156
HGraph * graph() const
Definition: hydrogen.h:846
HInstruction * AddInstruction(HInstruction *instr)
Definition: hydrogen.cc:3333
HBasicBlock * function_return()
Definition: hydrogen.h:730
bool IsFastDoubleElementsKind(ElementsKind kind)
const ZoneList< HBasicBlock * > * blocks() const
Definition: hydrogen.h:225
const ZoneList< LInstruction * > * instructions() const
Definition: lithium-arm.h:2242
HUseIterator uses() const
#define TRACE_GVN_5(msg, a1, a2, a3, a4, a5)
Definition: hydrogen.cc:1185
friend class FunctionState
Definition: hydrogen.h:1189
HEnvironment * CopyWithoutHistory() const
Definition: hydrogen.cc:8748
void AddSimulate(int ast_id)
Definition: hydrogen.cc:3340
static JSFunction * cast(Object *obj)