v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
hydrogen.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 #include "hydrogen.h"
30 
31 #include "codegen.h"
32 #include "full-codegen.h"
33 #include "hashmap.h"
34 #include "lithium-allocator.h"
35 #include "parser.h"
36 #include "scopeinfo.h"
37 #include "scopes.h"
38 #include "stub-cache.h"
39 
40 #if V8_TARGET_ARCH_IA32
42 #elif V8_TARGET_ARCH_X64
44 #elif V8_TARGET_ARCH_ARM
46 #elif V8_TARGET_ARCH_MIPS
48 #else
49 #error Unsupported target architecture.
50 #endif
51 
52 namespace v8 {
53 namespace internal {
54 
56  : block_id_(graph->GetNextBlockID()),
57  graph_(graph),
58  phis_(4, graph->zone()),
59  first_(NULL),
60  last_(NULL),
61  end_(NULL),
62  loop_information_(NULL),
63  predecessors_(2, graph->zone()),
64  dominator_(NULL),
65  dominated_blocks_(4, graph->zone()),
66  last_environment_(NULL),
67  argument_count_(-1),
68  first_instruction_index_(-1),
69  last_instruction_index_(-1),
70  deleted_phis_(4, graph->zone()),
71  parent_loop_header_(NULL),
72  is_inline_return_target_(false),
73  is_deoptimizing_(false),
74  dominates_loop_successors_(false) { }
75 
76 
78  ASSERT(!IsLoopHeader());
79  loop_information_ = new(zone()) HLoopInformation(this, zone());
80 }
81 
82 
85  loop_information_ = NULL;
86 }
87 
88 
90  ASSERT(!IsStartBlock());
91  phis_.Add(phi, zone());
92  phi->SetBlock(this);
93 }
94 
95 
97  ASSERT(phi->block() == this);
98  ASSERT(phis_.Contains(phi));
99  ASSERT(phi->HasNoUses() || !phi->is_live());
100  phi->Kill();
101  phis_.RemoveElement(phi);
102  phi->SetBlock(NULL);
103 }
104 
105 
107  ASSERT(!IsStartBlock() || !IsFinished());
108  ASSERT(!instr->IsLinked());
109  ASSERT(!IsFinished());
110  if (first_ == NULL) {
111  HBlockEntry* entry = new(zone()) HBlockEntry();
112  entry->InitializeAsFirst(this);
113  first_ = last_ = entry;
114  }
115  instr->InsertAfter(last_);
116 }
117 
118 
119 HDeoptimize* HBasicBlock::CreateDeoptimize(
120  HDeoptimize::UseEnvironment has_uses) {
122  if (has_uses == HDeoptimize::kNoUses)
123  return new(zone()) HDeoptimize(0, zone());
124 
125  HEnvironment* environment = last_environment();
126  HDeoptimize* instr = new(zone()) HDeoptimize(environment->length(), zone());
127  for (int i = 0; i < environment->length(); i++) {
128  HValue* val = environment->values()->at(i);
129  instr->AddEnvironmentValue(val, zone());
130  }
131 
132  return instr;
133 }
134 
135 
136 HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id) {
138  HEnvironment* environment = last_environment();
139  ASSERT(ast_id.IsNone() ||
140  environment->closure()->shared()->VerifyBailoutId(ast_id));
141 
142  int push_count = environment->push_count();
143  int pop_count = environment->pop_count();
144 
145  HSimulate* instr = new(zone()) HSimulate(ast_id, pop_count, zone());
146  for (int i = push_count - 1; i >= 0; --i) {
147  instr->AddPushedValue(environment->ExpressionStackAt(i));
148  }
149  for (int i = 0; i < environment->assigned_variables()->length(); ++i) {
150  int index = environment->assigned_variables()->at(i);
151  instr->AddAssignedValue(index, environment->Lookup(index));
152  }
153  environment->ClearHistory();
154  return instr;
155 }
156 
157 
159  ASSERT(!IsFinished());
160  AddInstruction(end);
161  end_ = end;
162  for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
163  it.Current()->RegisterPredecessor(this);
164  }
165 }
166 
167 
169  bool drop_extra = state != NULL &&
171 
172  if (block->IsInlineReturnTarget()) {
174  last_environment_ = last_environment()->DiscardInlined(drop_extra);
175  }
176 
178  HGoto* instr = new(zone()) HGoto(block);
179  Finish(instr);
180 }
181 
182 
184  FunctionState* state) {
185  HBasicBlock* target = state->function_return();
186  bool drop_extra = state->inlining_kind() == DROP_EXTRA_ON_RETURN;
187 
188  ASSERT(target->IsInlineReturnTarget());
189  ASSERT(return_value != NULL);
191  last_environment_ = last_environment()->DiscardInlined(drop_extra);
192  last_environment()->Push(return_value);
194  HGoto* instr = new(zone()) HGoto(target);
195  Finish(instr);
196 }
197 
198 
201  ASSERT(first() == NULL);
202  UpdateEnvironment(env);
203 }
204 
205 
207  int length = predecessors_.length();
208  ASSERT(length > 0);
209  for (int i = 0; i < length; i++) {
210  HBasicBlock* predecessor = predecessors_[i];
211  ASSERT(predecessor->end()->IsGoto());
212  HSimulate* simulate = HSimulate::cast(predecessor->end()->previous());
213  // We only need to verify the ID once.
214  ASSERT(i != 0 ||
215  predecessor->last_environment()->closure()->shared()
216  ->VerifyBailoutId(ast_id));
217  simulate->set_ast_id(ast_id);
218  }
219 }
220 
221 
223  HBasicBlock* current = other->dominator();
224  while (current != NULL) {
225  if (current == this) return true;
226  current = current->dominator();
227  }
228  return false;
229 }
230 
231 
233  const HBasicBlock* current = this;
234  int result = (current->IsLoopHeader()) ? 1 : 0;
235  while (current->parent_loop_header() != NULL) {
236  current = current->parent_loop_header();
237  result++;
238  }
239  return result;
240 }
241 
242 
244  ASSERT(IsLoopHeader());
245 
246  SetJoinId(stmt->EntryId());
247  if (predecessors()->length() == 1) {
248  // This is a degenerated loop.
250  return;
251  }
252 
253  // Only the first entry into the loop is from outside the loop. All other
254  // entries must be back edges.
255  for (int i = 1; i < predecessors()->length(); ++i) {
257  }
258 }
259 
260 
261 void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
262  if (HasPredecessor()) {
263  // Only loop header blocks can have a predecessor added after
264  // instructions have been added to the block (they have phis for all
265  // values in the environment, these phis may be eliminated later).
266  ASSERT(IsLoopHeader() || first_ == NULL);
267  HEnvironment* incoming_env = pred->last_environment();
268  if (IsLoopHeader()) {
269  ASSERT(phis()->length() == incoming_env->length());
270  for (int i = 0; i < phis_.length(); ++i) {
271  phis_[i]->AddInput(incoming_env->values()->at(i));
272  }
273  } else {
275  }
276  } else if (!HasEnvironment() && !IsFinished()) {
277  ASSERT(!IsLoopHeader());
279  }
280 
281  predecessors_.Add(pred, zone());
282 }
283 
284 
285 void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
286  ASSERT(!dominated_blocks_.Contains(block));
287  // Keep the list of dominated blocks sorted such that if there is two
288  // succeeding block in this list, the predecessor is before the successor.
289  int index = 0;
290  while (index < dominated_blocks_.length() &&
291  dominated_blocks_[index]->block_id() < block->block_id()) {
292  ++index;
293  }
294  dominated_blocks_.InsertAt(index, block, zone());
295 }
296 
297 
299  if (dominator_ == NULL) {
300  dominator_ = other;
301  other->AddDominatedBlock(this);
302  } else if (other->dominator() != NULL) {
303  HBasicBlock* first = dominator_;
304  HBasicBlock* second = other;
305 
306  while (first != second) {
307  if (first->block_id() > second->block_id()) {
308  first = first->dominator();
309  } else {
310  second = second->dominator();
311  }
312  ASSERT(first != NULL && second != NULL);
313  }
314 
315  if (dominator_ != first) {
316  ASSERT(dominator_->dominated_blocks_.Contains(this));
317  dominator_->dominated_blocks_.RemoveElement(this);
318  dominator_ = first;
319  first->AddDominatedBlock(this);
320  }
321  }
322 }
323 
324 
326  // Mark blocks that dominate all subsequent reachable blocks inside their
327  // loop. Exploit the fact that blocks are sorted in reverse post order. When
328  // the loop is visited in increasing block id order, if the number of
329  // non-loop-exiting successor edges at the dominator_candidate block doesn't
330  // exceed the number of previously encountered predecessor edges, there is no
331  // path from the loop header to any block with higher id that doesn't go
332  // through the dominator_candidate block. In this case, the
333  // dominator_candidate block is guaranteed to dominate all blocks reachable
334  // from it with higher ids.
336  int outstanding_successors = 1; // one edge from the pre-header
337  // Header always dominates everything.
339  for (int j = block_id(); j <= last->block_id(); ++j) {
340  HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
341  for (HPredecessorIterator it(dominator_candidate); !it.Done();
342  it.Advance()) {
343  HBasicBlock* predecessor = it.Current();
344  // Don't count back edges.
345  if (predecessor->block_id() < dominator_candidate->block_id()) {
346  outstanding_successors--;
347  }
348  }
349 
350  // If more successors than predecessors have been seen in the loop up to
351  // now, it's not possible to guarantee that the current block dominates
352  // all of the blocks with higher IDs. In this case, assume conservatively
353  // that those paths through loop that don't go through the current block
354  // contain all of the loop's dependencies. Also be careful to record
355  // dominator information about the current loop that's being processed,
356  // and not nested loops, which will be processed when
357  // AssignLoopSuccessorDominators gets called on their header.
358  ASSERT(outstanding_successors >= 0);
359  HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
360  if (outstanding_successors == 0 &&
361  (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
362  dominator_candidate->MarkAsLoopSuccessorDominator();
363  }
364  HControlInstruction* end = dominator_candidate->end();
365  for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
366  HBasicBlock* successor = it.Current();
367  // Only count successors that remain inside the loop and don't loop back
368  // to a loop header.
369  if (successor->block_id() > dominator_candidate->block_id() &&
370  successor->block_id() <= last->block_id()) {
371  // Backwards edges must land on loop headers.
372  ASSERT(successor->block_id() > dominator_candidate->block_id() ||
373  successor->IsLoopHeader());
374  outstanding_successors++;
375  }
376  }
377  }
378 }
379 
380 
382  for (int i = 0; i < predecessors_.length(); ++i) {
383  if (predecessors_[i] == predecessor) return i;
384  }
385  UNREACHABLE();
386  return -1;
387 }
388 
389 
390 #ifdef DEBUG
391 void HBasicBlock::Verify() {
392  // Check that every block is finished.
393  ASSERT(IsFinished());
394  ASSERT(block_id() >= 0);
395 
396  // Check that the incoming edges are in edge split form.
397  if (predecessors_.length() > 1) {
398  for (int i = 0; i < predecessors_.length(); ++i) {
399  ASSERT(predecessors_[i]->end()->SecondSuccessor() == NULL);
400  }
401  }
402 }
403 #endif
404 
405 
407  this->back_edges_.Add(block, block->zone());
408  AddBlock(block);
409 }
410 
411 
413  int max_id = -1;
414  HBasicBlock* result = NULL;
415  for (int i = 0; i < back_edges_.length(); ++i) {
416  HBasicBlock* cur = back_edges_[i];
417  if (cur->block_id() > max_id) {
418  max_id = cur->block_id();
419  result = cur;
420  }
421  }
422  return result;
423 }
424 
425 
426 void HLoopInformation::AddBlock(HBasicBlock* block) {
427  if (block == loop_header()) return;
428  if (block->parent_loop_header() == loop_header()) return;
429  if (block->parent_loop_header() != NULL) {
430  AddBlock(block->parent_loop_header());
431  } else {
433  blocks_.Add(block, block->zone());
434  for (int i = 0; i < block->predecessors()->length(); ++i) {
435  AddBlock(block->predecessors()->at(i));
436  }
437  }
438 }
439 
440 
441 #ifdef DEBUG
442 
443 // Checks reachability of the blocks in this graph and stores a bit in
444 // the BitVector "reachable()" for every block that can be reached
445 // from the start block of the graph. If "dont_visit" is non-null, the given
446 // block is treated as if it would not be part of the graph. "visited_count()"
447 // returns the number of reachable blocks.
448 class ReachabilityAnalyzer BASE_EMBEDDED {
449  public:
450  ReachabilityAnalyzer(HBasicBlock* entry_block,
451  int block_count,
452  HBasicBlock* dont_visit)
453  : visited_count_(0),
454  stack_(16, entry_block->zone()),
455  reachable_(block_count, entry_block->zone()),
456  dont_visit_(dont_visit) {
457  PushBlock(entry_block);
458  Analyze();
459  }
460 
461  int visited_count() const { return visited_count_; }
462  const BitVector* reachable() const { return &reachable_; }
463 
464  private:
465  void PushBlock(HBasicBlock* block) {
466  if (block != NULL && block != dont_visit_ &&
467  !reachable_.Contains(block->block_id())) {
468  reachable_.Add(block->block_id());
469  stack_.Add(block, block->zone());
470  visited_count_++;
471  }
472  }
473 
474  void Analyze() {
475  while (!stack_.is_empty()) {
476  HControlInstruction* end = stack_.RemoveLast()->end();
477  for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
478  PushBlock(it.Current());
479  }
480  }
481  }
482 
483  int visited_count_;
484  ZoneList<HBasicBlock*> stack_;
485  BitVector reachable_;
486  HBasicBlock* dont_visit_;
487 };
488 
489 
490 void HGraph::Verify(bool do_full_verify) const {
491  for (int i = 0; i < blocks_.length(); i++) {
492  HBasicBlock* block = blocks_.at(i);
493 
494  block->Verify();
495 
496  // Check that every block contains at least one node and that only the last
497  // node is a control instruction.
498  HInstruction* current = block->first();
499  ASSERT(current != NULL && current->IsBlockEntry());
500  while (current != NULL) {
501  ASSERT((current->next() == NULL) == current->IsControlInstruction());
502  ASSERT(current->block() == block);
503  current->Verify();
504  current = current->next();
505  }
506 
507  // Check that successors are correctly set.
508  HBasicBlock* first = block->end()->FirstSuccessor();
509  HBasicBlock* second = block->end()->SecondSuccessor();
510  ASSERT(second == NULL || first != NULL);
511 
512  // Check that the predecessor array is correct.
513  if (first != NULL) {
514  ASSERT(first->predecessors()->Contains(block));
515  if (second != NULL) {
516  ASSERT(second->predecessors()->Contains(block));
517  }
518  }
519 
520  // Check that phis have correct arguments.
521  for (int j = 0; j < block->phis()->length(); j++) {
522  HPhi* phi = block->phis()->at(j);
523  phi->Verify();
524  }
525 
526  // Check that all join blocks have predecessors that end with an
527  // unconditional goto and agree on their environment node id.
528  if (block->predecessors()->length() >= 2) {
529  BailoutId id =
530  block->predecessors()->first()->last_environment()->ast_id();
531  for (int k = 0; k < block->predecessors()->length(); k++) {
532  HBasicBlock* predecessor = block->predecessors()->at(k);
533  ASSERT(predecessor->end()->IsGoto());
534  ASSERT(predecessor->last_environment()->ast_id() == id);
535  }
536  }
537  }
538 
539  // Check special property of first block to have no predecessors.
540  ASSERT(blocks_.at(0)->predecessors()->is_empty());
541 
542  if (do_full_verify) {
543  // Check that the graph is fully connected.
544  ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL);
545  ASSERT(analyzer.visited_count() == blocks_.length());
546 
547  // Check that entry block dominator is NULL.
548  ASSERT(entry_block_->dominator() == NULL);
549 
550  // Check dominators.
551  for (int i = 0; i < blocks_.length(); ++i) {
552  HBasicBlock* block = blocks_.at(i);
553  if (block->dominator() == NULL) {
554  // Only start block may have no dominator assigned to.
555  ASSERT(i == 0);
556  } else {
557  // Assert that block is unreachable if dominator must not be visited.
558  ReachabilityAnalyzer dominator_analyzer(entry_block_,
559  blocks_.length(),
560  block->dominator());
561  ASSERT(!dominator_analyzer.reachable()->Contains(block->block_id()));
562  }
563  }
564  }
565 }
566 
567 #endif
568 
569 
570 HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
571  Handle<Object> value) {
572  if (!pointer->is_set()) {
573  HConstant* constant = new(zone()) HConstant(value,
575  constant->InsertAfter(GetConstantUndefined());
576  pointer->set(constant);
577  }
578  return pointer->get();
579 }
580 
581 
582 HConstant* HGraph::GetConstantInt32(SetOncePointer<HConstant>* pointer,
583  int32_t value) {
584  if (!pointer->is_set()) {
585  HConstant* constant =
586  new(zone()) HConstant(value, Representation::Integer32());
587  constant->InsertAfter(GetConstantUndefined());
588  pointer->set(constant);
589  }
590  return pointer->get();
591 }
592 
593 
595  return GetConstantInt32(&constant_1_, 1);
596 }
597 
598 
600  return GetConstantInt32(&constant_minus1_, -1);
601 }
602 
603 
605  return GetConstant(&constant_true_, isolate()->factory()->true_value());
606 }
607 
608 
610  return GetConstant(&constant_false_, isolate()->factory()->false_value());
611 }
612 
613 
615  return GetConstant(&constant_hole_, isolate()->factory()->the_hole_value());
616 }
617 
618 
620  TypeFeedbackOracle* oracle)
621  : function_state_(NULL),
622  initial_function_state_(this, info, oracle, NORMAL_RETURN),
623  ast_context_(NULL),
624  break_scope_(NULL),
625  graph_(NULL),
626  current_block_(NULL),
627  inlined_count_(0),
628  globals_(10, info->zone()),
629  zone_(info->zone()),
630  inline_bailout_(false) {
631  // This is not initialized in the initializer list because the
632  // constructor for the initial state relies on function_state_ == NULL
633  // to know it's the initial state.
634  function_state_= &initial_function_state_;
635 }
636 
638  HBasicBlock* second,
639  BailoutId join_id) {
640  if (first == NULL) {
641  return second;
642  } else if (second == NULL) {
643  return first;
644  } else {
645  HBasicBlock* join_block = graph_->CreateBasicBlock();
646  first->Goto(join_block);
647  second->Goto(join_block);
648  join_block->SetJoinId(join_id);
649  return join_block;
650  }
651 }
652 
653 
654 HBasicBlock* HGraphBuilder::JoinContinue(IterationStatement* statement,
655  HBasicBlock* exit_block,
656  HBasicBlock* continue_block) {
657  if (continue_block != NULL) {
658  if (exit_block != NULL) exit_block->Goto(continue_block);
659  continue_block->SetJoinId(statement->ContinueId());
660  return continue_block;
661  }
662  return exit_block;
663 }
664 
665 
666 HBasicBlock* HGraphBuilder::CreateLoop(IterationStatement* statement,
667  HBasicBlock* loop_entry,
668  HBasicBlock* body_exit,
669  HBasicBlock* loop_successor,
670  HBasicBlock* break_block) {
671  if (body_exit != NULL) body_exit->Goto(loop_entry);
672  loop_entry->PostProcessLoopHeader(statement);
673  if (break_block != NULL) {
674  if (loop_successor != NULL) loop_successor->Goto(break_block);
675  break_block->SetJoinId(statement->ExitId());
676  return break_block;
677  }
678  return loop_successor;
679 }
680 
681 
683  Finish(instruction);
685 }
686 
687 
689  : isolate_(info->isolate()),
690  next_block_id_(0),
691  entry_block_(NULL),
692  blocks_(8, info->zone()),
693  values_(16, info->zone()),
694  phi_list_(NULL),
695  uint32_instructions_(NULL),
696  info_(info),
697  zone_(info->zone()),
698  is_recursive_(false),
699  use_optimistic_licm_(false),
700  type_change_checksum_(0) {
701  start_environment_ =
702  new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_);
703  start_environment_->set_ast_id(BailoutId::FunctionEntry());
704  entry_block_ = CreateBasicBlock();
705  entry_block_->SetInitialEnvironment(start_environment_);
706 }
707 
708 
710  HBasicBlock* result = new(zone()) HBasicBlock(this);
711  blocks_.Add(result, zone());
712  return result;
713 }
714 
715 
717  if (!FLAG_use_canonicalizing) return;
718  HPhase phase("H_Canonicalize", this);
719  for (int i = 0; i < blocks()->length(); ++i) {
720  HInstruction* instr = blocks()->at(i)->first();
721  while (instr != NULL) {
722  HValue* value = instr->Canonicalize();
723  if (value != instr) instr->DeleteAndReplaceWith(value);
724  instr = instr->next();
725  }
726  }
727 }
728 
729 // Block ordering was implemented with two mutually recursive methods,
730 // HGraph::Postorder and HGraph::PostorderLoopBlocks.
731 // The recursion could lead to stack overflow so the algorithm has been
732 // implemented iteratively.
733 // At a high level the algorithm looks like this:
734 //
735 // Postorder(block, loop_header) : {
736 // if (block has already been visited or is of another loop) return;
737 // mark block as visited;
738 // if (block is a loop header) {
739 // VisitLoopMembers(block, loop_header);
740 // VisitSuccessorsOfLoopHeader(block);
741 // } else {
742 // VisitSuccessors(block)
743 // }
744 // put block in result list;
745 // }
746 //
747 // VisitLoopMembers(block, outer_loop_header) {
748 // foreach (block b in block loop members) {
749 // VisitSuccessorsOfLoopMember(b, outer_loop_header);
750 // if (b is loop header) VisitLoopMembers(b);
751 // }
752 // }
753 //
754 // VisitSuccessorsOfLoopMember(block, outer_loop_header) {
755 // foreach (block b in block successors) Postorder(b, outer_loop_header)
756 // }
757 //
758 // VisitSuccessorsOfLoopHeader(block) {
759 // foreach (block b in block successors) Postorder(b, block)
760 // }
761 //
762 // VisitSuccessors(block, loop_header) {
763 // foreach (block b in block successors) Postorder(b, loop_header)
764 // }
765 //
766 // The ordering is started calling Postorder(entry, NULL).
767 //
768 // Each instance of PostorderProcessor represents the "stack frame" of the
769 // recursion, and particularly keeps the state of the loop (iteration) of the
770 // "Visit..." function it represents.
771 // To recycle memory we keep all the frames in a double linked list but
772 // this means that we cannot use constructors to initialize the frames.
773 //
775  public:
776  // Back link (towards the stack bottom).
777  PostorderProcessor* parent() {return father_; }
778  // Forward link (towards the stack top).
779  PostorderProcessor* child() {return child_; }
780  HBasicBlock* block() { return block_; }
781  HLoopInformation* loop() { return loop_; }
782  HBasicBlock* loop_header() { return loop_header_; }
783 
785  HBasicBlock* block,
786  BitVector* visited) {
787  PostorderProcessor* result = new(zone) PostorderProcessor(NULL);
788  return result->SetupSuccessors(zone, block, NULL, visited);
789  }
790 
792  BitVector* visited,
793  ZoneList<HBasicBlock*>* order) {
794  PostorderProcessor* next =
795  PerformNonBacktrackingStep(zone, visited, order);
796  if (next != NULL) {
797  return next;
798  } else {
799  return Backtrack(zone, visited, order);
800  }
801  }
802 
803  private:
804  explicit PostorderProcessor(PostorderProcessor* father)
805  : father_(father), child_(NULL), successor_iterator(NULL) { }
806 
807  // Each enum value states the cycle whose state is kept by this instance.
808  enum LoopKind {
809  NONE,
810  SUCCESSORS,
811  SUCCESSORS_OF_LOOP_HEADER,
812  LOOP_MEMBERS,
813  SUCCESSORS_OF_LOOP_MEMBER
814  };
815 
816  // Each "Setup..." method is like a constructor for a cycle state.
817  PostorderProcessor* SetupSuccessors(Zone* zone,
818  HBasicBlock* block,
819  HBasicBlock* loop_header,
820  BitVector* visited) {
821  if (block == NULL || visited->Contains(block->block_id()) ||
822  block->parent_loop_header() != loop_header) {
823  kind_ = NONE;
824  block_ = NULL;
825  loop_ = NULL;
826  loop_header_ = NULL;
827  return this;
828  } else {
829  block_ = block;
830  loop_ = NULL;
831  visited->Add(block->block_id());
832 
833  if (block->IsLoopHeader()) {
834  kind_ = SUCCESSORS_OF_LOOP_HEADER;
835  loop_header_ = block;
836  InitializeSuccessors();
837  PostorderProcessor* result = Push(zone);
838  return result->SetupLoopMembers(zone, block, block->loop_information(),
839  loop_header);
840  } else {
841  ASSERT(block->IsFinished());
842  kind_ = SUCCESSORS;
843  loop_header_ = loop_header;
844  InitializeSuccessors();
845  return this;
846  }
847  }
848  }
849 
850  PostorderProcessor* SetupLoopMembers(Zone* zone,
851  HBasicBlock* block,
852  HLoopInformation* loop,
853  HBasicBlock* loop_header) {
854  kind_ = LOOP_MEMBERS;
855  block_ = block;
856  loop_ = loop;
857  loop_header_ = loop_header;
858  InitializeLoopMembers();
859  return this;
860  }
861 
862  PostorderProcessor* SetupSuccessorsOfLoopMember(
863  HBasicBlock* block,
864  HLoopInformation* loop,
865  HBasicBlock* loop_header) {
866  kind_ = SUCCESSORS_OF_LOOP_MEMBER;
867  block_ = block;
868  loop_ = loop;
869  loop_header_ = loop_header;
870  InitializeSuccessors();
871  return this;
872  }
873 
874  // This method "allocates" a new stack frame.
875  PostorderProcessor* Push(Zone* zone) {
876  if (child_ == NULL) {
877  child_ = new(zone) PostorderProcessor(this);
878  }
879  return child_;
880  }
881 
882  void ClosePostorder(ZoneList<HBasicBlock*>* order, Zone* zone) {
883  ASSERT(block_->end()->FirstSuccessor() == NULL ||
884  order->Contains(block_->end()->FirstSuccessor()) ||
885  block_->end()->FirstSuccessor()->IsLoopHeader());
886  ASSERT(block_->end()->SecondSuccessor() == NULL ||
887  order->Contains(block_->end()->SecondSuccessor()) ||
888  block_->end()->SecondSuccessor()->IsLoopHeader());
889  order->Add(block_, zone);
890  }
891 
892  // This method is the basic block to walk up the stack.
893  PostorderProcessor* Pop(Zone* zone,
894  BitVector* visited,
895  ZoneList<HBasicBlock*>* order) {
896  switch (kind_) {
897  case SUCCESSORS:
898  case SUCCESSORS_OF_LOOP_HEADER:
899  ClosePostorder(order, zone);
900  return father_;
901  case LOOP_MEMBERS:
902  return father_;
903  case SUCCESSORS_OF_LOOP_MEMBER:
904  if (block()->IsLoopHeader() && block() != loop_->loop_header()) {
905  // In this case we need to perform a LOOP_MEMBERS cycle so we
906  // initialize it and return this instead of father.
907  return SetupLoopMembers(zone, block(),
908  block()->loop_information(), loop_header_);
909  } else {
910  return father_;
911  }
912  case NONE:
913  return father_;
914  }
915  UNREACHABLE();
916  return NULL;
917  }
918 
919  // Walks up the stack.
920  PostorderProcessor* Backtrack(Zone* zone,
921  BitVector* visited,
922  ZoneList<HBasicBlock*>* order) {
923  PostorderProcessor* parent = Pop(zone, visited, order);
924  while (parent != NULL) {
925  PostorderProcessor* next =
926  parent->PerformNonBacktrackingStep(zone, visited, order);
927  if (next != NULL) {
928  return next;
929  } else {
930  parent = parent->Pop(zone, visited, order);
931  }
932  }
933  return NULL;
934  }
935 
936  PostorderProcessor* PerformNonBacktrackingStep(
937  Zone* zone,
938  BitVector* visited,
939  ZoneList<HBasicBlock*>* order) {
940  HBasicBlock* next_block;
941  switch (kind_) {
942  case SUCCESSORS:
943  next_block = AdvanceSuccessors();
944  if (next_block != NULL) {
945  PostorderProcessor* result = Push(zone);
946  return result->SetupSuccessors(zone, next_block,
947  loop_header_, visited);
948  }
949  break;
950  case SUCCESSORS_OF_LOOP_HEADER:
951  next_block = AdvanceSuccessors();
952  if (next_block != NULL) {
953  PostorderProcessor* result = Push(zone);
954  return result->SetupSuccessors(zone, next_block,
955  block(), visited);
956  }
957  break;
958  case LOOP_MEMBERS:
959  next_block = AdvanceLoopMembers();
960  if (next_block != NULL) {
961  PostorderProcessor* result = Push(zone);
962  return result->SetupSuccessorsOfLoopMember(next_block,
963  loop_, loop_header_);
964  }
965  break;
966  case SUCCESSORS_OF_LOOP_MEMBER:
967  next_block = AdvanceSuccessors();
968  if (next_block != NULL) {
969  PostorderProcessor* result = Push(zone);
970  return result->SetupSuccessors(zone, next_block,
971  loop_header_, visited);
972  }
973  break;
974  case NONE:
975  return NULL;
976  }
977  return NULL;
978  }
979 
980  // The following two methods implement a "foreach b in successors" cycle.
981  void InitializeSuccessors() {
982  loop_index = 0;
983  loop_length = 0;
984  successor_iterator = HSuccessorIterator(block_->end());
985  }
986 
987  HBasicBlock* AdvanceSuccessors() {
988  if (!successor_iterator.Done()) {
989  HBasicBlock* result = successor_iterator.Current();
990  successor_iterator.Advance();
991  return result;
992  }
993  return NULL;
994  }
995 
996  // The following two methods implement a "foreach b in loop members" cycle.
997  void InitializeLoopMembers() {
998  loop_index = 0;
999  loop_length = loop_->blocks()->length();
1000  }
1001 
1002  HBasicBlock* AdvanceLoopMembers() {
1003  if (loop_index < loop_length) {
1004  HBasicBlock* result = loop_->blocks()->at(loop_index);
1005  loop_index++;
1006  return result;
1007  } else {
1008  return NULL;
1009  }
1010  }
1011 
1012  LoopKind kind_;
1013  PostorderProcessor* father_;
1014  PostorderProcessor* child_;
1015  HLoopInformation* loop_;
1016  HBasicBlock* block_;
1017  HBasicBlock* loop_header_;
1018  int loop_index;
1019  int loop_length;
1020  HSuccessorIterator successor_iterator;
1021 };
1022 
1023 
1025  HPhase phase("H_Block ordering");
1026  BitVector visited(blocks_.length(), zone());
1027 
1028  ZoneList<HBasicBlock*> reverse_result(8, zone());
1029  HBasicBlock* start = blocks_[0];
1030  PostorderProcessor* postorder =
1031  PostorderProcessor::CreateEntryProcessor(zone(), start, &visited);
1032  while (postorder != NULL) {
1033  postorder = postorder->PerformStep(zone(), &visited, &reverse_result);
1034  }
1035  blocks_.Rewind(0);
1036  int index = 0;
1037  for (int i = reverse_result.length() - 1; i >= 0; --i) {
1038  HBasicBlock* b = reverse_result[i];
1039  blocks_.Add(b, zone());
1040  b->set_block_id(index++);
1041  }
1042 }
1043 
1044 
1046  HPhase phase("H_Assign dominators", this);
1047  for (int i = 0; i < blocks_.length(); ++i) {
1048  HBasicBlock* block = blocks_[i];
1049  if (block->IsLoopHeader()) {
1050  // Only the first predecessor of a loop header is from outside the loop.
1051  // All others are back edges, and thus cannot dominate the loop header.
1052  block->AssignCommonDominator(block->predecessors()->first());
1054  } else {
1055  for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
1056  blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
1057  }
1058  }
1059  }
1060 }
1061 
1062 // Mark all blocks that are dominated by an unconditional soft deoptimize to
1063 // prevent code motion across those blocks.
1065  HPhase phase("H_Propagate deoptimizing mark", this);
1066  MarkAsDeoptimizingRecursively(entry_block());
1067 }
1068 
1069 void HGraph::MarkAsDeoptimizingRecursively(HBasicBlock* block) {
1070  for (int i = 0; i < block->dominated_blocks()->length(); ++i) {
1071  HBasicBlock* dominated = block->dominated_blocks()->at(i);
1072  if (block->IsDeoptimizing()) dominated->MarkAsDeoptimizing();
1073  MarkAsDeoptimizingRecursively(dominated);
1074  }
1075 }
1076 
1078  HPhase phase("H_Redundant phi elimination", this);
1079 
1080  // Worklist of phis that can potentially be eliminated. Initialized with
1081  // all phi nodes. When elimination of a phi node modifies another phi node
1082  // the modified phi node is added to the worklist.
1083  ZoneList<HPhi*> worklist(blocks_.length(), zone());
1084  for (int i = 0; i < blocks_.length(); ++i) {
1085  worklist.AddAll(*blocks_[i]->phis(), zone());
1086  }
1087 
1088  while (!worklist.is_empty()) {
1089  HPhi* phi = worklist.RemoveLast();
1090  HBasicBlock* block = phi->block();
1091 
1092  // Skip phi node if it was already replaced.
1093  if (block == NULL) continue;
1094 
1095  // Get replacement value if phi is redundant.
1096  HValue* replacement = phi->GetRedundantReplacement();
1097 
1098  if (replacement != NULL) {
1099  // Iterate through the uses and replace them all.
1100  for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
1101  HValue* value = it.value();
1102  value->SetOperandAt(it.index(), replacement);
1103  if (value->IsPhi()) worklist.Add(HPhi::cast(value), zone());
1104  }
1105  block->RemovePhi(phi);
1106  }
1107  }
1108 }
1109 
1110 
1112  HPhase phase("H_Unreachable phi elimination", this);
1113 
1114  // Initialize worklist.
1115  ZoneList<HPhi*> phi_list(blocks_.length(), zone());
1116  ZoneList<HPhi*> worklist(blocks_.length(), zone());
1117  for (int i = 0; i < blocks_.length(); ++i) {
1118  for (int j = 0; j < blocks_[i]->phis()->length(); j++) {
1119  HPhi* phi = blocks_[i]->phis()->at(j);
1120  phi_list.Add(phi, zone());
1121  // We can't eliminate phis in the receiver position in the environment
1122  // because in case of throwing an error we need this value to
1123  // construct a stack trace.
1124  if (phi->HasRealUses() || phi->IsReceiver()) {
1125  phi->set_is_live(true);
1126  worklist.Add(phi, zone());
1127  }
1128  }
1129  }
1130 
1131  // Iteratively mark live phis.
1132  while (!worklist.is_empty()) {
1133  HPhi* phi = worklist.RemoveLast();
1134  for (int i = 0; i < phi->OperandCount(); i++) {
1135  HValue* operand = phi->OperandAt(i);
1136  if (operand->IsPhi() && !HPhi::cast(operand)->is_live()) {
1137  HPhi::cast(operand)->set_is_live(true);
1138  worklist.Add(HPhi::cast(operand), zone());
1139  }
1140  }
1141  }
1142 
1143  // Remove unreachable phis.
1144  for (int i = 0; i < phi_list.length(); i++) {
1145  HPhi* phi = phi_list[i];
1146  if (!phi->is_live()) {
1147  HBasicBlock* block = phi->block();
1148  block->RemovePhi(phi);
1149  block->RecordDeletedPhi(phi->merged_index());
1150  }
1151  }
1152 }
1153 
1154 
1156  int block_count = blocks_.length();
1157  for (int i = 0; i < block_count; ++i) {
1158  for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
1159  HPhi* phi = blocks_[i]->phis()->at(j);
1160  // We don't support phi uses of arguments for now.
1161  if (phi->CheckFlag(HValue::kIsArguments)) return false;
1162  }
1163  }
1164  return true;
1165 }
1166 
1167 
1169  int block_count = blocks_.length();
1170  for (int i = 0; i < block_count; ++i) {
1171  for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
1172  HPhi* phi = blocks_[i]->phis()->at(j);
1173  // Check for the hole value (from an uninitialized const).
1174  for (int k = 0; k < phi->OperandCount(); k++) {
1175  if (phi->OperandAt(k) == GetConstantHole()) return false;
1176  }
1177  }
1178  }
1179  return true;
1180 }
1181 
1182 
1184  int block_count = blocks_.length();
1185  phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone());
1186  for (int i = 0; i < block_count; ++i) {
1187  for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
1188  HPhi* phi = blocks_[i]->phis()->at(j);
1189  phi_list_->Add(phi, zone());
1190  }
1191  }
1192 }
1193 
1194 
1195 void HGraph::InferTypes(ZoneList<HValue*>* worklist) {
1196  BitVector in_worklist(GetMaximumValueID(), zone());
1197  for (int i = 0; i < worklist->length(); ++i) {
1198  ASSERT(!in_worklist.Contains(worklist->at(i)->id()));
1199  in_worklist.Add(worklist->at(i)->id());
1200  }
1201 
1202  while (!worklist->is_empty()) {
1203  HValue* current = worklist->RemoveLast();
1204  in_worklist.Remove(current->id());
1205  if (current->UpdateInferredType()) {
1206  for (HUseIterator it(current->uses()); !it.Done(); it.Advance()) {
1207  HValue* use = it.value();
1208  if (!in_worklist.Contains(use->id())) {
1209  in_worklist.Add(use->id());
1210  worklist->Add(use, zone());
1211  }
1212  }
1213  }
1214  }
1215 }
1216 
1217 
1218 class HRangeAnalysis BASE_EMBEDDED {
1219  public:
1220  explicit HRangeAnalysis(HGraph* graph) :
1221  graph_(graph), zone_(graph->zone()), changed_ranges_(16, zone_) { }
1222 
1223  void Analyze();
1224 
1225  private:
1226  void TraceRange(const char* msg, ...);
1227  void Analyze(HBasicBlock* block);
1228  void InferControlFlowRange(HCompareIDAndBranch* test, HBasicBlock* dest);
1229  void UpdateControlFlowRange(Token::Value op, HValue* value, HValue* other);
1230  void InferRange(HValue* value);
1231  void RollBackTo(int index);
1232  void AddRange(HValue* value, Range* range);
1233 
1234  HGraph* graph_;
1235  Zone* zone_;
1236  ZoneList<HValue*> changed_ranges_;
1237 };
1238 
1239 
1240 void HRangeAnalysis::TraceRange(const char* msg, ...) {
1241  if (FLAG_trace_range) {
1242  va_list arguments;
1243  va_start(arguments, msg);
1244  OS::VPrint(msg, arguments);
1245  va_end(arguments);
1246  }
1247 }
1248 
1249 
1250 void HRangeAnalysis::Analyze() {
1251  HPhase phase("H_Range analysis", graph_);
1252  Analyze(graph_->entry_block());
1253 }
1254 
1255 
1256 void HRangeAnalysis::Analyze(HBasicBlock* block) {
1257  TraceRange("Analyzing block B%d\n", block->block_id());
1258 
1259  int last_changed_range = changed_ranges_.length() - 1;
1260 
1261  // Infer range based on control flow.
1262  if (block->predecessors()->length() == 1) {
1263  HBasicBlock* pred = block->predecessors()->first();
1264  if (pred->end()->IsCompareIDAndBranch()) {
1265  InferControlFlowRange(HCompareIDAndBranch::cast(pred->end()), block);
1266  }
1267  }
1268 
1269  // Process phi instructions.
1270  for (int i = 0; i < block->phis()->length(); ++i) {
1271  HPhi* phi = block->phis()->at(i);
1272  InferRange(phi);
1273  }
1274 
1275  // Go through all instructions of the current block.
1276  HInstruction* instr = block->first();
1277  while (instr != block->end()) {
1278  InferRange(instr);
1279  instr = instr->next();
1280  }
1281 
1282  // Continue analysis in all dominated blocks.
1283  for (int i = 0; i < block->dominated_blocks()->length(); ++i) {
1284  Analyze(block->dominated_blocks()->at(i));
1285  }
1286 
1287  RollBackTo(last_changed_range);
1288 }
1289 
1290 
1291 void HRangeAnalysis::InferControlFlowRange(HCompareIDAndBranch* test,
1292  HBasicBlock* dest) {
1293  ASSERT((test->FirstSuccessor() == dest) == (test->SecondSuccessor() != dest));
1294  if (test->GetInputRepresentation().IsInteger32()) {
1295  Token::Value op = test->token();
1296  if (test->SecondSuccessor() == dest) {
1297  op = Token::NegateCompareOp(op);
1298  }
1299  Token::Value inverted_op = Token::InvertCompareOp(op);
1300  UpdateControlFlowRange(op, test->left(), test->right());
1301  UpdateControlFlowRange(inverted_op, test->right(), test->left());
1302  }
1303 }
1304 
1305 
1306 // We know that value [op] other. Use this information to update the range on
1307 // value.
1308 void HRangeAnalysis::UpdateControlFlowRange(Token::Value op,
1309  HValue* value,
1310  HValue* other) {
1311  Range temp_range;
1312  Range* range = other->range() != NULL ? other->range() : &temp_range;
1313  Range* new_range = NULL;
1314 
1315  TraceRange("Control flow range infer %d %s %d\n",
1316  value->id(),
1317  Token::Name(op),
1318  other->id());
1319 
1320  if (op == Token::EQ || op == Token::EQ_STRICT) {
1321  // The same range has to apply for value.
1322  new_range = range->Copy(zone_);
1323  } else if (op == Token::LT || op == Token::LTE) {
1324  new_range = range->CopyClearLower(zone_);
1325  if (op == Token::LT) {
1326  new_range->AddConstant(-1);
1327  }
1328  } else if (op == Token::GT || op == Token::GTE) {
1329  new_range = range->CopyClearUpper(zone_);
1330  if (op == Token::GT) {
1331  new_range->AddConstant(1);
1332  }
1333  }
1334 
1335  if (new_range != NULL && !new_range->IsMostGeneric()) {
1336  AddRange(value, new_range);
1337  }
1338 }
1339 
1340 
1341 void HRangeAnalysis::InferRange(HValue* value) {
1342  ASSERT(!value->HasRange());
1343  if (!value->representation().IsNone()) {
1344  value->ComputeInitialRange(zone_);
1345  Range* range = value->range();
1346  TraceRange("Initial inferred range of %d (%s) set to [%d,%d]\n",
1347  value->id(),
1348  value->Mnemonic(),
1349  range->lower(),
1350  range->upper());
1351  }
1352 }
1353 
1354 
1355 void HRangeAnalysis::RollBackTo(int index) {
1356  for (int i = index + 1; i < changed_ranges_.length(); ++i) {
1357  changed_ranges_[i]->RemoveLastAddedRange();
1358  }
1359  changed_ranges_.Rewind(index + 1);
1360 }
1361 
1362 
1363 void HRangeAnalysis::AddRange(HValue* value, Range* range) {
1364  Range* original_range = value->range();
1365  value->AddNewRange(range, zone_);
1366  changed_ranges_.Add(value, zone_);
1367  Range* new_range = value->range();
1368  TraceRange("Updated range of %d set to [%d,%d]\n",
1369  value->id(),
1370  new_range->lower(),
1371  new_range->upper());
1372  if (original_range != NULL) {
1373  TraceRange("Original range was [%d,%d]\n",
1374  original_range->lower(),
1375  original_range->upper());
1376  }
1377  TraceRange("New information was [%d,%d]\n",
1378  range->lower(),
1379  range->upper());
1380 }
1381 
1382 
1383 void TraceGVN(const char* msg, ...) {
1384  va_list arguments;
1385  va_start(arguments, msg);
1386  OS::VPrint(msg, arguments);
1387  va_end(arguments);
1388 }
1389 
1390 // Wrap TraceGVN in macros to avoid the expense of evaluating its arguments when
1391 // --trace-gvn is off.
1392 #define TRACE_GVN_1(msg, a1) \
1393  if (FLAG_trace_gvn) { \
1394  TraceGVN(msg, a1); \
1395  }
1396 
1397 #define TRACE_GVN_2(msg, a1, a2) \
1398  if (FLAG_trace_gvn) { \
1399  TraceGVN(msg, a1, a2); \
1400  }
1401 
1402 #define TRACE_GVN_3(msg, a1, a2, a3) \
1403  if (FLAG_trace_gvn) { \
1404  TraceGVN(msg, a1, a2, a3); \
1405  }
1406 
1407 #define TRACE_GVN_4(msg, a1, a2, a3, a4) \
1408  if (FLAG_trace_gvn) { \
1409  TraceGVN(msg, a1, a2, a3, a4); \
1410  }
1411 
1412 #define TRACE_GVN_5(msg, a1, a2, a3, a4, a5) \
1413  if (FLAG_trace_gvn) { \
1414  TraceGVN(msg, a1, a2, a3, a4, a5); \
1415  }
1416 
1417 
1418 HValueMap::HValueMap(Zone* zone, const HValueMap* other)
1419  : array_size_(other->array_size_),
1420  lists_size_(other->lists_size_),
1421  count_(other->count_),
1422  present_flags_(other->present_flags_),
1423  array_(zone->NewArray<HValueMapListElement>(other->array_size_)),
1424  lists_(zone->NewArray<HValueMapListElement>(other->lists_size_)),
1425  free_list_head_(other->free_list_head_) {
1426  memcpy(array_, other->array_, array_size_ * sizeof(HValueMapListElement));
1427  memcpy(lists_, other->lists_, lists_size_ * sizeof(HValueMapListElement));
1428 }
1429 
1430 
1431 void HValueMap::Kill(GVNFlagSet flags) {
1432  GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(flags);
1433  if (!present_flags_.ContainsAnyOf(depends_flags)) return;
1434  present_flags_.RemoveAll();
1435  for (int i = 0; i < array_size_; ++i) {
1436  HValue* value = array_[i].value;
1437  if (value != NULL) {
1438  // Clear list of collisions first, so we know if it becomes empty.
1439  int kept = kNil; // List of kept elements.
1440  int next;
1441  for (int current = array_[i].next; current != kNil; current = next) {
1442  next = lists_[current].next;
1443  HValue* value = lists_[current].value;
1444  if (value->gvn_flags().ContainsAnyOf(depends_flags)) {
1445  // Drop it.
1446  count_--;
1447  lists_[current].next = free_list_head_;
1448  free_list_head_ = current;
1449  } else {
1450  // Keep it.
1451  lists_[current].next = kept;
1452  kept = current;
1453  present_flags_.Add(value->gvn_flags());
1454  }
1455  }
1456  array_[i].next = kept;
1457 
1458  // Now possibly drop directly indexed element.
1459  value = array_[i].value;
1460  if (value->gvn_flags().ContainsAnyOf(depends_flags)) { // Drop it.
1461  count_--;
1462  int head = array_[i].next;
1463  if (head == kNil) {
1464  array_[i].value = NULL;
1465  } else {
1466  array_[i].value = lists_[head].value;
1467  array_[i].next = lists_[head].next;
1468  lists_[head].next = free_list_head_;
1469  free_list_head_ = head;
1470  }
1471  } else {
1472  present_flags_.Add(value->gvn_flags()); // Keep it.
1473  }
1474  }
1475  }
1476 }
1477 
1478 
1479 HValue* HValueMap::Lookup(HValue* value) const {
1480  uint32_t hash = static_cast<uint32_t>(value->Hashcode());
1481  uint32_t pos = Bound(hash);
1482  if (array_[pos].value != NULL) {
1483  if (array_[pos].value->Equals(value)) return array_[pos].value;
1484  int next = array_[pos].next;
1485  while (next != kNil) {
1486  if (lists_[next].value->Equals(value)) return lists_[next].value;
1487  next = lists_[next].next;
1488  }
1489  }
1490  return NULL;
1491 }
1492 
1493 
1494 void HValueMap::Resize(int new_size, Zone* zone) {
1495  ASSERT(new_size > count_);
1496  // Hashing the values into the new array has no more collisions than in the
1497  // old hash map, so we can use the existing lists_ array, if we are careful.
1498 
1499  // Make sure we have at least one free element.
1500  if (free_list_head_ == kNil) {
1501  ResizeLists(lists_size_ << 1, zone);
1502  }
1503 
1504  HValueMapListElement* new_array =
1505  zone->NewArray<HValueMapListElement>(new_size);
1506  memset(new_array, 0, sizeof(HValueMapListElement) * new_size);
1507 
1508  HValueMapListElement* old_array = array_;
1509  int old_size = array_size_;
1510 
1511  int old_count = count_;
1512  count_ = 0;
1513  // Do not modify present_flags_. It is currently correct.
1514  array_size_ = new_size;
1515  array_ = new_array;
1516 
1517  if (old_array != NULL) {
1518  // Iterate over all the elements in lists, rehashing them.
1519  for (int i = 0; i < old_size; ++i) {
1520  if (old_array[i].value != NULL) {
1521  int current = old_array[i].next;
1522  while (current != kNil) {
1523  Insert(lists_[current].value, zone);
1524  int next = lists_[current].next;
1525  lists_[current].next = free_list_head_;
1526  free_list_head_ = current;
1527  current = next;
1528  }
1529  // Rehash the directly stored value.
1530  Insert(old_array[i].value, zone);
1531  }
1532  }
1533  }
1534  USE(old_count);
1535  ASSERT(count_ == old_count);
1536 }
1537 
1538 
1539 void HValueMap::ResizeLists(int new_size, Zone* zone) {
1540  ASSERT(new_size > lists_size_);
1541 
1542  HValueMapListElement* new_lists =
1543  zone->NewArray<HValueMapListElement>(new_size);
1544  memset(new_lists, 0, sizeof(HValueMapListElement) * new_size);
1545 
1546  HValueMapListElement* old_lists = lists_;
1547  int old_size = lists_size_;
1548 
1549  lists_size_ = new_size;
1550  lists_ = new_lists;
1551 
1552  if (old_lists != NULL) {
1553  memcpy(lists_, old_lists, old_size * sizeof(HValueMapListElement));
1554  }
1555  for (int i = old_size; i < lists_size_; ++i) {
1556  lists_[i].next = free_list_head_;
1557  free_list_head_ = i;
1558  }
1559 }
1560 
1561 
1562 void HValueMap::Insert(HValue* value, Zone* zone) {
1563  ASSERT(value != NULL);
1564  // Resizing when half of the hashtable is filled up.
1565  if (count_ >= array_size_ >> 1) Resize(array_size_ << 1, zone);
1566  ASSERT(count_ < array_size_);
1567  count_++;
1568  uint32_t pos = Bound(static_cast<uint32_t>(value->Hashcode()));
1569  if (array_[pos].value == NULL) {
1570  array_[pos].value = value;
1571  array_[pos].next = kNil;
1572  } else {
1573  if (free_list_head_ == kNil) {
1574  ResizeLists(lists_size_ << 1, zone);
1575  }
1576  int new_element_pos = free_list_head_;
1577  ASSERT(new_element_pos != kNil);
1578  free_list_head_ = lists_[free_list_head_].next;
1579  lists_[new_element_pos].value = value;
1580  lists_[new_element_pos].next = array_[pos].next;
1581  ASSERT(array_[pos].next == kNil || lists_[array_[pos].next].value != NULL);
1582  array_[pos].next = new_element_pos;
1583  }
1584 }
1585 
1586 
1587 HSideEffectMap::HSideEffectMap() : count_(0) {
1588  memset(data_, 0, kNumberOfTrackedSideEffects * kPointerSize);
1589 }
1590 
1591 
1592 HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) {
1593  *this = *other; // Calls operator=.
1594 }
1595 
1596 
1597 HSideEffectMap& HSideEffectMap::operator= (const HSideEffectMap& other) {
1598  if (this != &other) {
1599  memcpy(data_, other.data_, kNumberOfTrackedSideEffects * kPointerSize);
1600  }
1601  return *this;
1602 }
1603 
1604 void HSideEffectMap::Kill(GVNFlagSet flags) {
1605  for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
1606  GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
1607  if (flags.Contains(changes_flag)) {
1608  if (data_[i] != NULL) count_--;
1609  data_[i] = NULL;
1610  }
1611  }
1612 }
1613 
1614 
1615 void HSideEffectMap::Store(GVNFlagSet flags, HInstruction* instr) {
1616  for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
1617  GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
1618  if (flags.Contains(changes_flag)) {
1619  if (data_[i] == NULL) count_++;
1620  data_[i] = instr;
1621  }
1622  }
1623 }
1624 
1625 
1626 class HStackCheckEliminator BASE_EMBEDDED {
1627  public:
1628  explicit HStackCheckEliminator(HGraph* graph) : graph_(graph) { }
1629 
1630  void Process();
1631 
1632  private:
1633  HGraph* graph_;
1634 };
1635 
1636 
1637 void HStackCheckEliminator::Process() {
1638  // For each loop block walk the dominator tree from the backwards branch to
1639  // the loop header. If a call instruction is encountered the backwards branch
1640  // is dominated by a call and the stack check in the backwards branch can be
1641  // removed.
1642  for (int i = 0; i < graph_->blocks()->length(); i++) {
1643  HBasicBlock* block = graph_->blocks()->at(i);
1644  if (block->IsLoopHeader()) {
1645  HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
1646  HBasicBlock* dominator = back_edge;
1647  while (true) {
1648  HInstruction* instr = dominator->first();
1649  while (instr != NULL) {
1650  if (instr->IsCall()) {
1651  block->loop_information()->stack_check()->Eliminate();
1652  break;
1653  }
1654  instr = instr->next();
1655  }
1656 
1657  // Done when the loop header is processed.
1658  if (dominator == block) break;
1659 
1660  // Move up the dominator tree.
1661  dominator = dominator->dominator();
1662  }
1663  }
1664  }
1665 }
1666 
1667 
1668 // Simple sparse set with O(1) add, contains, and clear.
1669 class SparseSet {
1670  public:
1671  SparseSet(Zone* zone, int capacity)
1672  : capacity_(capacity),
1673  length_(0),
1674  dense_(zone->NewArray<int>(capacity)),
1675  sparse_(zone->NewArray<int>(capacity)) {
1676 #ifndef NVALGRIND
1677  // Initialize the sparse array to make valgrind happy.
1678  memset(sparse_, 0, sizeof(sparse_[0]) * capacity);
1679 #endif
1680  }
1681 
1682  bool Contains(int n) const {
1683  ASSERT(0 <= n && n < capacity_);
1684  int d = sparse_[n];
1685  return 0 <= d && d < length_ && dense_[d] == n;
1686  }
1687 
1688  bool Add(int n) {
1689  if (Contains(n)) return false;
1690  dense_[length_] = n;
1691  sparse_[n] = length_;
1692  ++length_;
1693  return true;
1694  }
1695 
1696  void Clear() { length_ = 0; }
1697 
1698  private:
1699  int capacity_;
1700  int length_;
1701  int* dense_;
1702  int* sparse_;
1703 
1704  DISALLOW_COPY_AND_ASSIGN(SparseSet);
1705 };
1706 
1707 
1708 class HGlobalValueNumberer BASE_EMBEDDED {
1709  public:
1711  : graph_(graph),
1712  info_(info),
1713  removed_side_effects_(false),
1714  block_side_effects_(graph->blocks()->length(), graph->zone()),
1715  loop_side_effects_(graph->blocks()->length(), graph->zone()),
1716  visited_on_paths_(graph->zone(), graph->blocks()->length()) {
1717 #ifdef DEBUG
1718  ASSERT(info->isolate()->optimizing_compiler_thread()->IsOptimizerThread() ||
1719  !info->isolate()->heap()->IsAllocationAllowed());
1720 #endif
1721  block_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length(),
1722  graph_->zone());
1723  loop_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length(),
1724  graph_->zone());
1725  }
1726 
1727  // Returns true if values with side effects are removed.
1728  bool Analyze();
1729 
1730  private:
1731  GVNFlagSet CollectSideEffectsOnPathsToDominatedBlock(
1732  HBasicBlock* dominator,
1733  HBasicBlock* dominated);
1734  void AnalyzeGraph();
1735  void ComputeBlockSideEffects();
1736  void LoopInvariantCodeMotion();
1737  void ProcessLoopBlock(HBasicBlock* block,
1738  HBasicBlock* before_loop,
1739  GVNFlagSet loop_kills,
1740  GVNFlagSet* accumulated_first_time_depends,
1741  GVNFlagSet* accumulated_first_time_changes);
1742  bool AllowCodeMotion();
1743  bool ShouldMove(HInstruction* instr, HBasicBlock* loop_header);
1744 
1745  HGraph* graph() { return graph_; }
1746  CompilationInfo* info() { return info_; }
1747  Zone* zone() const { return graph_->zone(); }
1748 
1749  HGraph* graph_;
1750  CompilationInfo* info_;
1751  bool removed_side_effects_;
1752 
1753  // A map of block IDs to their side effects.
1754  ZoneList<GVNFlagSet> block_side_effects_;
1755 
1756  // A map of loop header block IDs to their loop's side effects.
1757  ZoneList<GVNFlagSet> loop_side_effects_;
1758 
1759  // Used when collecting side effects on paths from dominator to
1760  // dominated.
1761  SparseSet visited_on_paths_;
1762 };
1763 
1764 
1765 bool HGlobalValueNumberer::Analyze() {
1766  removed_side_effects_ = false;
1767  ComputeBlockSideEffects();
1768  if (FLAG_loop_invariant_code_motion) {
1769  LoopInvariantCodeMotion();
1770  }
1771  AnalyzeGraph();
1772  return removed_side_effects_;
1773 }
1774 
1775 
1776 void HGlobalValueNumberer::ComputeBlockSideEffects() {
1777  // The Analyze phase of GVN can be called multiple times. Clear loop side
1778  // effects before computing them to erase the contents from previous Analyze
1779  // passes.
1780  for (int i = 0; i < loop_side_effects_.length(); ++i) {
1781  loop_side_effects_[i].RemoveAll();
1782  }
1783  for (int i = graph_->blocks()->length() - 1; i >= 0; --i) {
1784  // Compute side effects for the block.
1785  HBasicBlock* block = graph_->blocks()->at(i);
1786  HInstruction* instr = block->first();
1787  int id = block->block_id();
1788  GVNFlagSet side_effects;
1789  while (instr != NULL) {
1790  side_effects.Add(instr->ChangesFlags());
1791  if (instr->IsSoftDeoptimize()) {
1792  block_side_effects_[id].RemoveAll();
1793  side_effects.RemoveAll();
1794  break;
1795  }
1796  instr = instr->next();
1797  }
1798  block_side_effects_[id].Add(side_effects);
1799 
1800  // Loop headers are part of their loop.
1801  if (block->IsLoopHeader()) {
1802  loop_side_effects_[id].Add(side_effects);
1803  }
1804 
1805  // Propagate loop side effects upwards.
1806  if (block->HasParentLoopHeader()) {
1807  int header_id = block->parent_loop_header()->block_id();
1808  loop_side_effects_[header_id].Add(block->IsLoopHeader()
1809  ? loop_side_effects_[id]
1810  : side_effects);
1811  }
1812  }
1813 }
1814 
1815 
1817  char underlying_buffer[kLastFlag * 128];
1818  Vector<char> buffer(underlying_buffer, sizeof(underlying_buffer));
1819 #if DEBUG
1820  int offset = 0;
1821  const char* separator = "";
1822  const char* comma = ", ";
1823  buffer[0] = 0;
1824  uint32_t set_depends_on = 0;
1825  uint32_t set_changes = 0;
1826  for (int bit = 0; bit < kLastFlag; ++bit) {
1827  if ((flags.ToIntegral() & (1 << bit)) != 0) {
1828  if (bit % 2 == 0) {
1829  set_changes++;
1830  } else {
1831  set_depends_on++;
1832  }
1833  }
1834  }
1835  bool positive_changes = set_changes < (kLastFlag / 2);
1836  bool positive_depends_on = set_depends_on < (kLastFlag / 2);
1837  if (set_changes > 0) {
1838  if (positive_changes) {
1839  offset += OS::SNPrintF(buffer + offset, "changes [");
1840  } else {
1841  offset += OS::SNPrintF(buffer + offset, "changes all except [");
1842  }
1843  for (int bit = 0; bit < kLastFlag; ++bit) {
1844  if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_changes) {
1845  switch (static_cast<GVNFlag>(bit)) {
1846 #define DECLARE_FLAG(type) \
1847  case kChanges##type: \
1848  offset += OS::SNPrintF(buffer + offset, separator); \
1849  offset += OS::SNPrintF(buffer + offset, #type); \
1850  separator = comma; \
1851  break;
1854 #undef DECLARE_FLAG
1855  default:
1856  break;
1857  }
1858  }
1859  }
1860  offset += OS::SNPrintF(buffer + offset, "]");
1861  }
1862  if (set_depends_on > 0) {
1863  separator = "";
1864  if (set_changes > 0) {
1865  offset += OS::SNPrintF(buffer + offset, ", ");
1866  }
1867  if (positive_depends_on) {
1868  offset += OS::SNPrintF(buffer + offset, "depends on [");
1869  } else {
1870  offset += OS::SNPrintF(buffer + offset, "depends on all except [");
1871  }
1872  for (int bit = 0; bit < kLastFlag; ++bit) {
1873  if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_depends_on) {
1874  switch (static_cast<GVNFlag>(bit)) {
1875 #define DECLARE_FLAG(type) \
1876  case kDependsOn##type: \
1877  offset += OS::SNPrintF(buffer + offset, separator); \
1878  offset += OS::SNPrintF(buffer + offset, #type); \
1879  separator = comma; \
1880  break;
1883 #undef DECLARE_FLAG
1884  default:
1885  break;
1886  }
1887  }
1888  }
1889  offset += OS::SNPrintF(buffer + offset, "]");
1890  }
1891 #else
1892  OS::SNPrintF(buffer, "0x%08X", flags.ToIntegral());
1893 #endif
1894  size_t string_len = strlen(underlying_buffer) + 1;
1895  ASSERT(string_len <= sizeof(underlying_buffer));
1896  char* result = new char[strlen(underlying_buffer) + 1];
1897  memcpy(result, underlying_buffer, string_len);
1898  return SmartArrayPointer<char>(result);
1899 }
1900 
1901 
1902 void HGlobalValueNumberer::LoopInvariantCodeMotion() {
1903  TRACE_GVN_1("Using optimistic loop invariant code motion: %s\n",
1904  graph_->use_optimistic_licm() ? "yes" : "no");
1905  for (int i = graph_->blocks()->length() - 1; i >= 0; --i) {
1906  HBasicBlock* block = graph_->blocks()->at(i);
1907  if (block->IsLoopHeader()) {
1908  GVNFlagSet side_effects = loop_side_effects_[block->block_id()];
1909  TRACE_GVN_2("Try loop invariant motion for block B%d %s\n",
1910  block->block_id(),
1911  *GetGVNFlagsString(side_effects));
1912 
1913  GVNFlagSet accumulated_first_time_depends;
1914  GVNFlagSet accumulated_first_time_changes;
1915  HBasicBlock* last = block->loop_information()->GetLastBackEdge();
1916  for (int j = block->block_id(); j <= last->block_id(); ++j) {
1917  ProcessLoopBlock(graph_->blocks()->at(j), block, side_effects,
1918  &accumulated_first_time_depends,
1919  &accumulated_first_time_changes);
1920  }
1921  }
1922  }
1923 }
1924 
1925 
1926 void HGlobalValueNumberer::ProcessLoopBlock(
1927  HBasicBlock* block,
1928  HBasicBlock* loop_header,
1929  GVNFlagSet loop_kills,
1930  GVNFlagSet* first_time_depends,
1931  GVNFlagSet* first_time_changes) {
1932  HBasicBlock* pre_header = loop_header->predecessors()->at(0);
1933  GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills);
1934  TRACE_GVN_2("Loop invariant motion for B%d %s\n",
1935  block->block_id(),
1936  *GetGVNFlagsString(depends_flags));
1937  HInstruction* instr = block->first();
1938  while (instr != NULL) {
1939  HInstruction* next = instr->next();
1940  bool hoisted = false;
1941  if (instr->CheckFlag(HValue::kUseGVN)) {
1942  TRACE_GVN_4("Checking instruction %d (%s) %s. Loop %s\n",
1943  instr->id(),
1944  instr->Mnemonic(),
1945  *GetGVNFlagsString(instr->gvn_flags()),
1946  *GetGVNFlagsString(loop_kills));
1947  bool can_hoist = !instr->gvn_flags().ContainsAnyOf(depends_flags);
1948  if (can_hoist && !graph()->use_optimistic_licm()) {
1949  can_hoist = block->IsLoopSuccessorDominator();
1950  }
1951 
1952  if (can_hoist) {
1953  bool inputs_loop_invariant = true;
1954  for (int i = 0; i < instr->OperandCount(); ++i) {
1955  if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) {
1956  inputs_loop_invariant = false;
1957  }
1958  }
1959 
1960  if (inputs_loop_invariant && ShouldMove(instr, loop_header)) {
1961  TRACE_GVN_1("Hoisting loop invariant instruction %d\n", instr->id());
1962  // Move the instruction out of the loop.
1963  instr->Unlink();
1964  instr->InsertBefore(pre_header->end());
1965  if (instr->HasSideEffects()) removed_side_effects_ = true;
1966  hoisted = true;
1967  }
1968  }
1969  }
1970  if (!hoisted) {
1971  // If an instruction is not hoisted, we have to account for its side
1972  // effects when hoisting later HTransitionElementsKind instructions.
1973  GVNFlagSet previous_depends = *first_time_depends;
1974  GVNFlagSet previous_changes = *first_time_changes;
1975  first_time_depends->Add(instr->DependsOnFlags());
1976  first_time_changes->Add(instr->ChangesFlags());
1977  if (!(previous_depends == *first_time_depends)) {
1978  TRACE_GVN_1("Updated first-time accumulated %s\n",
1979  *GetGVNFlagsString(*first_time_depends));
1980  }
1981  if (!(previous_changes == *first_time_changes)) {
1982  TRACE_GVN_1("Updated first-time accumulated %s\n",
1983  *GetGVNFlagsString(*first_time_changes));
1984  }
1985  }
1986  instr = next;
1987  }
1988 }
1989 
1990 
1991 bool HGlobalValueNumberer::AllowCodeMotion() {
1992  return info()->shared_info()->opt_count() + 1 < FLAG_max_opt_count;
1993 }
1994 
1995 
1996 bool HGlobalValueNumberer::ShouldMove(HInstruction* instr,
1997  HBasicBlock* loop_header) {
1998  // If we've disabled code motion or we're in a block that unconditionally
1999  // deoptimizes, don't move any instructions.
2000  return AllowCodeMotion() && !instr->block()->IsDeoptimizing();
2001 }
2002 
2003 
2004 GVNFlagSet HGlobalValueNumberer::CollectSideEffectsOnPathsToDominatedBlock(
2005  HBasicBlock* dominator, HBasicBlock* dominated) {
2006  GVNFlagSet side_effects;
2007  for (int i = 0; i < dominated->predecessors()->length(); ++i) {
2008  HBasicBlock* block = dominated->predecessors()->at(i);
2009  if (dominator->block_id() < block->block_id() &&
2010  block->block_id() < dominated->block_id() &&
2011  visited_on_paths_.Add(block->block_id())) {
2012  side_effects.Add(block_side_effects_[block->block_id()]);
2013  if (block->IsLoopHeader()) {
2014  side_effects.Add(loop_side_effects_[block->block_id()]);
2015  }
2016  side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock(
2017  dominator, block));
2018  }
2019  }
2020  return side_effects;
2021 }
2022 
2023 
2024 // Each instance of this class is like a "stack frame" for the recursive
2025 // traversal of the dominator tree done during GVN (the stack is handled
2026 // as a double linked list).
2027 // We reuse frames when possible so the list length is limited by the depth
2028 // of the dominator tree but this forces us to initialize each frame calling
2029 // an explicit "Initialize" method instead of a using constructor.
2031  public:
2033  HBasicBlock* entry_block,
2034  HValueMap* entry_map) {
2035  return new(zone)
2036  GvnBasicBlockState(NULL, entry_block, entry_map, NULL, zone);
2037  }
2038 
2039  HBasicBlock* block() { return block_; }
2040  HValueMap* map() { return map_; }
2041  HSideEffectMap* dominators() { return &dominators_; }
2042 
2044  Zone* zone,
2045  HBasicBlock** dominator) {
2046  // This assignment needs to happen before calling next_dominated() because
2047  // that call can reuse "this" if we are at the last dominated block.
2048  *dominator = block();
2049  GvnBasicBlockState* result = next_dominated(zone);
2050  if (result == NULL) {
2051  GvnBasicBlockState* dominator_state = pop();
2052  if (dominator_state != NULL) {
2053  // This branch is guaranteed not to return NULL because pop() never
2054  // returns a state where "is_done() == true".
2055  *dominator = dominator_state->block();
2056  result = dominator_state->next_dominated(zone);
2057  } else {
2058  // Unnecessary (we are returning NULL) but done for cleanness.
2059  *dominator = NULL;
2060  }
2061  }
2062  return result;
2063  }
2064 
2065  private:
2066  void Initialize(HBasicBlock* block,
2067  HValueMap* map,
2068  HSideEffectMap* dominators,
2069  bool copy_map,
2070  Zone* zone) {
2071  block_ = block;
2072  map_ = copy_map ? map->Copy(zone) : map;
2073  dominated_index_ = -1;
2074  length_ = block->dominated_blocks()->length();
2075  if (dominators != NULL) {
2076  dominators_ = *dominators;
2077  }
2078  }
2079  bool is_done() { return dominated_index_ >= length_; }
2080 
2081  GvnBasicBlockState(GvnBasicBlockState* previous,
2082  HBasicBlock* block,
2083  HValueMap* map,
2084  HSideEffectMap* dominators,
2085  Zone* zone)
2086  : previous_(previous), next_(NULL) {
2087  Initialize(block, map, dominators, true, zone);
2088  }
2089 
2090  GvnBasicBlockState* next_dominated(Zone* zone) {
2091  dominated_index_++;
2092  if (dominated_index_ == length_ - 1) {
2093  // No need to copy the map for the last child in the dominator tree.
2094  Initialize(block_->dominated_blocks()->at(dominated_index_),
2095  map(),
2096  dominators(),
2097  false,
2098  zone);
2099  return this;
2100  } else if (dominated_index_ < length_) {
2101  return push(zone,
2102  block_->dominated_blocks()->at(dominated_index_),
2103  dominators());
2104  } else {
2105  return NULL;
2106  }
2107  }
2108 
2109  GvnBasicBlockState* push(Zone* zone,
2110  HBasicBlock* block,
2111  HSideEffectMap* dominators) {
2112  if (next_ == NULL) {
2113  next_ =
2114  new(zone) GvnBasicBlockState(this, block, map(), dominators, zone);
2115  } else {
2116  next_->Initialize(block, map(), dominators, true, zone);
2117  }
2118  return next_;
2119  }
2120  GvnBasicBlockState* pop() {
2121  GvnBasicBlockState* result = previous_;
2122  while (result != NULL && result->is_done()) {
2123  TRACE_GVN_2("Backtracking from block B%d to block b%d\n",
2124  block()->block_id(),
2125  previous_->block()->block_id())
2126  result = result->previous_;
2127  }
2128  return result;
2129  }
2130 
2131  GvnBasicBlockState* previous_;
2132  GvnBasicBlockState* next_;
2133  HBasicBlock* block_;
2134  HValueMap* map_;
2135  HSideEffectMap dominators_;
2136  int dominated_index_;
2137  int length_;
2138 };
2139 
2140 // This is a recursive traversal of the dominator tree but it has been turned
2141 // into a loop to avoid stack overflows.
2142 // The logical "stack frames" of the recursion are kept in a list of
2143 // GvnBasicBlockState instances.
2144 void HGlobalValueNumberer::AnalyzeGraph() {
2145  HBasicBlock* entry_block = graph_->entry_block();
2146  HValueMap* entry_map = new(zone()) HValueMap(zone());
2147  GvnBasicBlockState* current =
2148  GvnBasicBlockState::CreateEntry(zone(), entry_block, entry_map);
2149 
2150  while (current != NULL) {
2151  HBasicBlock* block = current->block();
2152  HValueMap* map = current->map();
2153  HSideEffectMap* dominators = current->dominators();
2154 
2155  TRACE_GVN_2("Analyzing block B%d%s\n",
2156  block->block_id(),
2157  block->IsLoopHeader() ? " (loop header)" : "");
2158 
2159  // If this is a loop header kill everything killed by the loop.
2160  if (block->IsLoopHeader()) {
2161  map->Kill(loop_side_effects_[block->block_id()]);
2162  }
2163 
2164  // Go through all instructions of the current block.
2165  HInstruction* instr = block->first();
2166  while (instr != NULL) {
2167  HInstruction* next = instr->next();
2168  GVNFlagSet flags = instr->ChangesFlags();
2169  if (!flags.IsEmpty()) {
2170  // Clear all instructions in the map that are affected by side effects.
2171  // Store instruction as the dominating one for tracked side effects.
2172  map->Kill(flags);
2173  dominators->Store(flags, instr);
2174  TRACE_GVN_2("Instruction %d %s\n", instr->id(),
2175  *GetGVNFlagsString(flags));
2176  }
2177  if (instr->CheckFlag(HValue::kUseGVN)) {
2178  ASSERT(!instr->HasObservableSideEffects());
2179  HValue* other = map->Lookup(instr);
2180  if (other != NULL) {
2181  ASSERT(instr->Equals(other) && other->Equals(instr));
2182  TRACE_GVN_4("Replacing value %d (%s) with value %d (%s)\n",
2183  instr->id(),
2184  instr->Mnemonic(),
2185  other->id(),
2186  other->Mnemonic());
2187  if (instr->HasSideEffects()) removed_side_effects_ = true;
2188  instr->DeleteAndReplaceWith(other);
2189  } else {
2190  map->Add(instr, zone());
2191  }
2192  }
2193  if (instr->CheckFlag(HValue::kTrackSideEffectDominators)) {
2194  for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
2195  HValue* other = dominators->at(i);
2196  GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
2197  GVNFlag depends_on_flag = HValue::DependsOnFlagFromInt(i);
2198  if (instr->DependsOnFlags().Contains(depends_on_flag) &&
2199  (other != NULL)) {
2200  TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n",
2201  i,
2202  instr->id(),
2203  instr->Mnemonic(),
2204  other->id(),
2205  other->Mnemonic());
2206  instr->SetSideEffectDominator(changes_flag, other);
2207  }
2208  }
2209  }
2210  instr = next;
2211  }
2212 
2213  HBasicBlock* dominator_block;
2214  GvnBasicBlockState* next =
2215  current->next_in_dominator_tree_traversal(zone(), &dominator_block);
2216 
2217  if (next != NULL) {
2218  HBasicBlock* dominated = next->block();
2219  HValueMap* successor_map = next->map();
2220  HSideEffectMap* successor_dominators = next->dominators();
2221 
2222  // Kill everything killed on any path between this block and the
2223  // dominated block. We don't have to traverse these paths if the
2224  // value map and the dominators list is already empty. If the range
2225  // of block ids (block_id, dominated_id) is empty there are no such
2226  // paths.
2227  if ((!successor_map->IsEmpty() || !successor_dominators->IsEmpty()) &&
2228  dominator_block->block_id() + 1 < dominated->block_id()) {
2229  visited_on_paths_.Clear();
2230  GVNFlagSet side_effects_on_all_paths =
2231  CollectSideEffectsOnPathsToDominatedBlock(dominator_block,
2232  dominated);
2233  successor_map->Kill(side_effects_on_all_paths);
2234  successor_dominators->Kill(side_effects_on_all_paths);
2235  }
2236  }
2237  current = next;
2238  }
2239 }
2240 
2241 
2242 class HInferRepresentation BASE_EMBEDDED {
2243  public:
2244  explicit HInferRepresentation(HGraph* graph)
2245  : graph_(graph),
2246  worklist_(8, graph->zone()),
2247  in_worklist_(graph->GetMaximumValueID(), graph->zone()) { }
2248 
2249  void Analyze();
2250 
2251  private:
2252  Representation TryChange(HValue* current);
2253  void AddToWorklist(HValue* current);
2254  void InferBasedOnInputs(HValue* current);
2255  void AddDependantsToWorklist(HValue* current);
2256  void InferBasedOnUses(HValue* current);
2257 
2258  Zone* zone() const { return graph_->zone(); }
2259 
2260  HGraph* graph_;
2261  ZoneList<HValue*> worklist_;
2262  BitVector in_worklist_;
2263 };
2264 
2265 
2266 void HInferRepresentation::AddToWorklist(HValue* current) {
2267  if (current->representation().IsSpecialization()) return;
2268  if (!current->CheckFlag(HValue::kFlexibleRepresentation)) return;
2269  if (in_worklist_.Contains(current->id())) return;
2270  worklist_.Add(current, zone());
2271  in_worklist_.Add(current->id());
2272 }
2273 
2274 
2275 // This method tries to specialize the representation type of the value
2276 // given as a parameter. The value is asked to infer its representation type
2277 // based on its inputs. If the inferred type is more specialized, then this
2278 // becomes the new representation type of the node.
2279 void HInferRepresentation::InferBasedOnInputs(HValue* current) {
2280  Representation r = current->representation();
2281  if (r.IsSpecialization()) return;
2282  ASSERT(current->CheckFlag(HValue::kFlexibleRepresentation));
2283  Representation inferred = current->InferredRepresentation();
2284  if (inferred.IsSpecialization()) {
2285  if (FLAG_trace_representation) {
2286  PrintF("Changing #%d representation %s -> %s based on inputs\n",
2287  current->id(),
2288  r.Mnemonic(),
2289  inferred.Mnemonic());
2290  }
2291  current->ChangeRepresentation(inferred);
2292  AddDependantsToWorklist(current);
2293  }
2294 }
2295 
2296 
2297 void HInferRepresentation::AddDependantsToWorklist(HValue* value) {
2298  for (HUseIterator it(value->uses()); !it.Done(); it.Advance()) {
2299  AddToWorklist(it.value());
2300  }
2301  for (int i = 0; i < value->OperandCount(); ++i) {
2302  AddToWorklist(value->OperandAt(i));
2303  }
2304 }
2305 
2306 
2307 // This method calculates whether specializing the representation of the value
2308 // given as the parameter has a benefit in terms of less necessary type
2309 // conversions. If there is a benefit, then the representation of the value is
2310 // specialized.
2311 void HInferRepresentation::InferBasedOnUses(HValue* value) {
2312  Representation r = value->representation();
2313  if (r.IsSpecialization() || value->HasNoUses()) return;
2314  ASSERT(value->CheckFlag(HValue::kFlexibleRepresentation));
2315  Representation new_rep = TryChange(value);
2316  if (!new_rep.IsNone()) {
2317  if (!value->representation().Equals(new_rep)) {
2318  if (FLAG_trace_representation) {
2319  PrintF("Changing #%d representation %s -> %s based on uses\n",
2320  value->id(),
2321  r.Mnemonic(),
2322  new_rep.Mnemonic());
2323  }
2324  value->ChangeRepresentation(new_rep);
2325  AddDependantsToWorklist(value);
2326  }
2327  }
2328 }
2329 
2330 
2331 Representation HInferRepresentation::TryChange(HValue* value) {
2332  // Array of use counts for each representation.
2333  int use_count[Representation::kNumRepresentations] = { 0 };
2334 
2335  for (HUseIterator it(value->uses()); !it.Done(); it.Advance()) {
2336  HValue* use = it.value();
2337  Representation rep = use->ObservedInputRepresentation(it.index());
2338  if (rep.IsNone()) continue;
2339  if (FLAG_trace_representation) {
2340  PrintF("%d %s is used by %d %s as %s\n",
2341  value->id(),
2342  value->Mnemonic(),
2343  use->id(),
2344  use->Mnemonic(),
2345  rep.Mnemonic());
2346  }
2347  if (use->IsPhi()) HPhi::cast(use)->AddIndirectUsesTo(&use_count[0]);
2348  use_count[rep.kind()] += use->LoopWeight();
2349  }
2350  int tagged_count = use_count[Representation::kTagged];
2351  int double_count = use_count[Representation::kDouble];
2352  int int32_count = use_count[Representation::kInteger32];
2353  int non_tagged_count = double_count + int32_count;
2354 
2355  // If a non-loop phi has tagged uses, don't convert it to untagged.
2356  if (value->IsPhi() && !value->block()->IsLoopHeader() && tagged_count > 0) {
2357  return Representation::None();
2358  }
2359 
2360  // Prefer unboxing over boxing, the latter is more expensive.
2361  if (tagged_count > non_tagged_count) return Representation::None();
2362 
2363  // Prefer Integer32 over Double, if possible.
2364  if (int32_count > 0 && value->IsConvertibleToInteger()) {
2365  return Representation::Integer32();
2366  }
2367 
2368  if (double_count > 0) return Representation::Double();
2369 
2370  return Representation::None();
2371 }
2372 
2373 
2374 void HInferRepresentation::Analyze() {
2375  HPhase phase("H_Infer representations", graph_);
2376 
2377  // (1) Initialize bit vectors and count real uses. Each phi gets a
2378  // bit-vector of length <number of phis>.
2379  const ZoneList<HPhi*>* phi_list = graph_->phi_list();
2380  int phi_count = phi_list->length();
2381  ZoneList<BitVector*> connected_phis(phi_count, graph_->zone());
2382  for (int i = 0; i < phi_count; ++i) {
2383  phi_list->at(i)->InitRealUses(i);
2384  BitVector* connected_set = new(zone()) BitVector(phi_count, graph_->zone());
2385  connected_set->Add(i);
2386  connected_phis.Add(connected_set, zone());
2387  }
2388 
2389  // (2) Do a fixed point iteration to find the set of connected phis. A
2390  // phi is connected to another phi if its value is used either directly or
2391  // indirectly through a transitive closure of the def-use relation.
2392  bool change = true;
2393  while (change) {
2394  change = false;
2395  // We normally have far more "forward edges" than "backward edges",
2396  // so we terminate faster when we walk backwards.
2397  for (int i = phi_count - 1; i >= 0; --i) {
2398  HPhi* phi = phi_list->at(i);
2399  for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
2400  HValue* use = it.value();
2401  if (use->IsPhi()) {
2402  int id = HPhi::cast(use)->phi_id();
2403  if (connected_phis[i]->UnionIsChanged(*connected_phis[id]))
2404  change = true;
2405  }
2406  }
2407  }
2408  }
2409 
2410  // (3a) Use the phi reachability information from step 2 to
2411  // push information about values which can't be converted to integer
2412  // without deoptimization through the phi use-def chains, avoiding
2413  // unnecessary deoptimizations later.
2414  for (int i = 0; i < phi_count; ++i) {
2415  HPhi* phi = phi_list->at(i);
2416  bool cti = phi->AllOperandsConvertibleToInteger();
2417  if (cti) continue;
2418 
2419  for (BitVector::Iterator it(connected_phis.at(i));
2420  !it.Done();
2421  it.Advance()) {
2422  HPhi* phi = phi_list->at(it.Current());
2423  phi->set_is_convertible_to_integer(false);
2424  phi->ResetInteger32Uses();
2425  }
2426  }
2427 
2428  // (3b) Use the phi reachability information from step 2 to
2429  // sum up the non-phi use counts of all connected phis.
2430  for (int i = 0; i < phi_count; ++i) {
2431  HPhi* phi = phi_list->at(i);
2432  for (BitVector::Iterator it(connected_phis.at(i));
2433  !it.Done();
2434  it.Advance()) {
2435  int index = it.Current();
2436  HPhi* it_use = phi_list->at(index);
2437  if (index != i) phi->AddNonPhiUsesFrom(it_use); // Don't count twice.
2438  }
2439  }
2440 
2441  // Initialize work list
2442  for (int i = 0; i < graph_->blocks()->length(); ++i) {
2443  HBasicBlock* block = graph_->blocks()->at(i);
2444  const ZoneList<HPhi*>* phis = block->phis();
2445  for (int j = 0; j < phis->length(); ++j) {
2446  AddToWorklist(phis->at(j));
2447  }
2448 
2449  HInstruction* current = block->first();
2450  while (current != NULL) {
2451  AddToWorklist(current);
2452  current = current->next();
2453  }
2454  }
2455 
2456  // Do a fixed point iteration, trying to improve representations
2457  while (!worklist_.is_empty()) {
2458  HValue* current = worklist_.RemoveLast();
2459  in_worklist_.Remove(current->id());
2460  InferBasedOnInputs(current);
2461  InferBasedOnUses(current);
2462  }
2463 }
2464 
2465 
2467  HPhase phase("H_Inferring types", this);
2468  InitializeInferredTypes(0, this->blocks_.length() - 1);
2469 }
2470 
2471 
2472 void HGraph::InitializeInferredTypes(int from_inclusive, int to_inclusive) {
2473  for (int i = from_inclusive; i <= to_inclusive; ++i) {
2474  HBasicBlock* block = blocks_[i];
2475 
2476  const ZoneList<HPhi*>* phis = block->phis();
2477  for (int j = 0; j < phis->length(); j++) {
2478  phis->at(j)->UpdateInferredType();
2479  }
2480 
2481  HInstruction* current = block->first();
2482  while (current != NULL) {
2483  current->UpdateInferredType();
2484  current = current->next();
2485  }
2486 
2487  if (block->IsLoopHeader()) {
2488  HBasicBlock* last_back_edge =
2489  block->loop_information()->GetLastBackEdge();
2490  InitializeInferredTypes(i + 1, last_back_edge->block_id());
2491  // Skip all blocks already processed by the recursive call.
2492  i = last_back_edge->block_id();
2493  // Update phis of the loop header now after the whole loop body is
2494  // guaranteed to be processed.
2495  ZoneList<HValue*> worklist(block->phis()->length(), zone());
2496  for (int j = 0; j < block->phis()->length(); ++j) {
2497  worklist.Add(block->phis()->at(j), zone());
2498  }
2499  InferTypes(&worklist);
2500  }
2501  }
2502 }
2503 
2504 
2505 void HGraph::PropagateMinusZeroChecks(HValue* value, BitVector* visited) {
2506  HValue* current = value;
2507  while (current != NULL) {
2508  if (visited->Contains(current->id())) return;
2509 
2510  // For phis, we must propagate the check to all of its inputs.
2511  if (current->IsPhi()) {
2512  visited->Add(current->id());
2513  HPhi* phi = HPhi::cast(current);
2514  for (int i = 0; i < phi->OperandCount(); ++i) {
2515  PropagateMinusZeroChecks(phi->OperandAt(i), visited);
2516  }
2517  break;
2518  }
2519 
2520  // For multiplication, division, and Math.min/max(), we must propagate
2521  // to the left and the right side.
2522  if (current->IsMul()) {
2523  HMul* mul = HMul::cast(current);
2524  mul->EnsureAndPropagateNotMinusZero(visited);
2525  PropagateMinusZeroChecks(mul->left(), visited);
2526  PropagateMinusZeroChecks(mul->right(), visited);
2527  } else if (current->IsDiv()) {
2528  HDiv* div = HDiv::cast(current);
2529  div->EnsureAndPropagateNotMinusZero(visited);
2530  PropagateMinusZeroChecks(div->left(), visited);
2531  PropagateMinusZeroChecks(div->right(), visited);
2532  } else if (current->IsMathMinMax()) {
2533  HMathMinMax* minmax = HMathMinMax::cast(current);
2534  visited->Add(minmax->id());
2535  PropagateMinusZeroChecks(minmax->left(), visited);
2536  PropagateMinusZeroChecks(minmax->right(), visited);
2537  }
2538 
2539  current = current->EnsureAndPropagateNotMinusZero(visited);
2540  }
2541 }
2542 
2543 
2544 void HGraph::InsertRepresentationChangeForUse(HValue* value,
2545  HValue* use_value,
2546  int use_index,
2547  Representation to) {
2548  // Insert the representation change right before its use. For phi-uses we
2549  // insert at the end of the corresponding predecessor.
2550  HInstruction* next = NULL;
2551  if (use_value->IsPhi()) {
2552  next = use_value->block()->predecessors()->at(use_index)->end();
2553  } else {
2554  next = HInstruction::cast(use_value);
2555  }
2556 
2557  // For constants we try to make the representation change at compile
2558  // time. When a representation change is not possible without loss of
2559  // information we treat constants like normal instructions and insert the
2560  // change instructions for them.
2561  HInstruction* new_value = NULL;
2562  bool is_truncating = use_value->CheckFlag(HValue::kTruncatingToInt32);
2563  bool deoptimize_on_undefined =
2564  use_value->CheckFlag(HValue::kDeoptimizeOnUndefined);
2565  if (value->IsConstant()) {
2566  HConstant* constant = HConstant::cast(value);
2567  // Try to create a new copy of the constant with the new representation.
2568  new_value = is_truncating
2569  ? constant->CopyToTruncatedInt32(zone())
2570  : constant->CopyToRepresentation(to, zone());
2571  }
2572 
2573  if (new_value == NULL) {
2574  new_value = new(zone()) HChange(value, to,
2575  is_truncating, deoptimize_on_undefined);
2576  }
2577 
2578  new_value->InsertBefore(next);
2579  use_value->SetOperandAt(use_index, new_value);
2580 }
2581 
2582 
2583 void HGraph::InsertRepresentationChangesForValue(HValue* value) {
2584  Representation r = value->representation();
2585  if (r.IsNone()) return;
2586  if (value->HasNoUses()) return;
2587 
2588  for (HUseIterator it(value->uses()); !it.Done(); it.Advance()) {
2589  HValue* use_value = it.value();
2590  int use_index = it.index();
2591  Representation req = use_value->RequiredInputRepresentation(use_index);
2592  if (req.IsNone() || req.Equals(r)) continue;
2593  InsertRepresentationChangeForUse(value, use_value, use_index, req);
2594  }
2595  if (value->HasNoUses()) {
2596  ASSERT(value->IsConstant());
2597  value->DeleteAndReplaceWith(NULL);
2598  }
2599 
2600  // The only purpose of a HForceRepresentation is to represent the value
2601  // after the (possible) HChange instruction. We make it disappear.
2602  if (value->IsForceRepresentation()) {
2603  value->DeleteAndReplaceWith(HForceRepresentation::cast(value)->value());
2604  }
2605 }
2606 
2607 
2609  HPhase phase("H_Representation changes", this);
2610 
2611  // Compute truncation flag for phis: Initially assume that all
2612  // int32-phis allow truncation and iteratively remove the ones that
2613  // are used in an operation that does not allow a truncating
2614  // conversion.
2615  // TODO(fschneider): Replace this with a worklist-based iteration.
2616  for (int i = 0; i < phi_list()->length(); i++) {
2617  HPhi* phi = phi_list()->at(i);
2618  if (phi->representation().IsInteger32()) {
2620  }
2621  }
2622  bool change = true;
2623  while (change) {
2624  change = false;
2625  for (int i = 0; i < phi_list()->length(); i++) {
2626  HPhi* phi = phi_list()->at(i);
2627  if (!phi->CheckFlag(HValue::kTruncatingToInt32)) continue;
2630  change = true;
2631  }
2632  }
2633  }
2634 
2635  for (int i = 0; i < blocks_.length(); ++i) {
2636  // Process phi instructions first.
2637  const ZoneList<HPhi*>* phis = blocks_[i]->phis();
2638  for (int j = 0; j < phis->length(); j++) {
2639  InsertRepresentationChangesForValue(phis->at(j));
2640  }
2641 
2642  // Process normal instructions.
2643  HInstruction* current = blocks_[i]->first();
2644  while (current != NULL) {
2645  InsertRepresentationChangesForValue(current);
2646  current = current->next();
2647  }
2648  }
2649 }
2650 
2651 
2652 void HGraph::RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi* phi) {
2653  if (phi->CheckFlag(HValue::kDeoptimizeOnUndefined)) return;
2655  for (int i = 0; i < phi->OperandCount(); ++i) {
2656  HValue* input = phi->OperandAt(i);
2657  if (input->IsPhi()) {
2658  RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi::cast(input));
2659  }
2660  }
2661 }
2662 
2663 
2665  HPhase phase("H_MarkDeoptimizeOnUndefined", this);
2666  // Compute DeoptimizeOnUndefined flag for phis.
2667  // Any phi that can reach a use with DeoptimizeOnUndefined set must
2668  // have DeoptimizeOnUndefined set. Currently only HCompareIDAndBranch, with
2669  // double input representation, has this flag set.
2670  // The flag is used by HChange tagged->double, which must deoptimize
2671  // if one of its uses has this flag set.
2672  for (int i = 0; i < phi_list()->length(); i++) {
2673  HPhi* phi = phi_list()->at(i);
2674  if (phi->representation().IsDouble()) {
2675  for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
2676  if (it.value()->CheckFlag(HValue::kDeoptimizeOnUndefined)) {
2677  RecursivelyMarkPhiDeoptimizeOnUndefined(phi);
2678  break;
2679  }
2680  }
2681  }
2682  }
2683 }
2684 
2685 
2686 // Discover instructions that can be marked with kUint32 flag allowing
2687 // them to produce full range uint32 values.
2688 class Uint32Analysis BASE_EMBEDDED {
2689  public:
2690  explicit Uint32Analysis(Zone* zone) : zone_(zone), phis_(4, zone) { }
2691 
2692  void Analyze(HInstruction* current);
2693 
2694  void UnmarkUnsafePhis();
2695 
2696  private:
2697  bool IsSafeUint32Use(HValue* val, HValue* use);
2698  bool Uint32UsesAreSafe(HValue* uint32val);
2699  bool CheckPhiOperands(HPhi* phi);
2700  void UnmarkPhi(HPhi* phi, ZoneList<HPhi*>* worklist);
2701 
2702  Zone* zone_;
2703  ZoneList<HPhi*> phis_;
2704 };
2705 
2706 
2707 bool Uint32Analysis::IsSafeUint32Use(HValue* val, HValue* use) {
2708  // Operations that operatate on bits are safe.
2709  if (use->IsBitwise() ||
2710  use->IsShl() ||
2711  use->IsSar() ||
2712  use->IsShr() ||
2713  use->IsBitNot()) {
2714  return true;
2715  } else if (use->IsChange() || use->IsSimulate()) {
2716  // Conversions and deoptimization have special support for unt32.
2717  return true;
2718  } else if (use->IsStoreKeyedSpecializedArrayElement()) {
2719  // Storing a value into an external integer array is a bit level operation.
2720  HStoreKeyedSpecializedArrayElement* store =
2722 
2723  if (store->value() == val) {
2724  // Clamping or a conversion to double should have beed inserted.
2725  ASSERT(store->elements_kind() != EXTERNAL_PIXEL_ELEMENTS);
2726  ASSERT(store->elements_kind() != EXTERNAL_FLOAT_ELEMENTS);
2727  ASSERT(store->elements_kind() != EXTERNAL_DOUBLE_ELEMENTS);
2728  return true;
2729  }
2730  }
2731 
2732  return false;
2733 }
2734 
2735 
2736 // Iterate over all uses and verify that they are uint32 safe: either don't
2737 // distinguish between int32 and uint32 due to their bitwise nature or
2738 // have special support for uint32 values.
2739 // Encountered phis are optimisitically treated as safe uint32 uses,
2740 // marked with kUint32 flag and collected in the phis_ list. A separate
2741 // path will be performed later by UnmarkUnsafePhis to clear kUint32 from
2742 // phis that are not actually uint32-safe (it requries fix point iteration).
2743 bool Uint32Analysis::Uint32UsesAreSafe(HValue* uint32val) {
2744  bool collect_phi_uses = false;
2745  for (HUseIterator it(uint32val->uses()); !it.Done(); it.Advance()) {
2746  HValue* use = it.value();
2747 
2748  if (use->IsPhi()) {
2749  if (!use->CheckFlag(HInstruction::kUint32)) {
2750  // There is a phi use of this value from a phis that is not yet
2751  // collected in phis_ array. Separate pass is required.
2752  collect_phi_uses = true;
2753  }
2754 
2755  // Optimistically treat phis as uint32 safe.
2756  continue;
2757  }
2758 
2759  if (!IsSafeUint32Use(uint32val, use)) {
2760  return false;
2761  }
2762  }
2763 
2764  if (collect_phi_uses) {
2765  for (HUseIterator it(uint32val->uses()); !it.Done(); it.Advance()) {
2766  HValue* use = it.value();
2767 
2768  // There is a phi use of this value from a phis that is not yet
2769  // collected in phis_ array. Separate pass is required.
2770  if (use->IsPhi() && !use->CheckFlag(HInstruction::kUint32)) {
2771  use->SetFlag(HInstruction::kUint32);
2772  phis_.Add(HPhi::cast(use), zone_);
2773  }
2774  }
2775  }
2776 
2777  return true;
2778 }
2779 
2780 
2781 // Analyze instruction and mark it with kUint32 if all its uses are uint32
2782 // safe.
2783 void Uint32Analysis::Analyze(HInstruction* current) {
2784  if (Uint32UsesAreSafe(current)) current->SetFlag(HInstruction::kUint32);
2785 }
2786 
2787 
2788 // Check if all operands to the given phi are marked with kUint32 flag.
2789 bool Uint32Analysis::CheckPhiOperands(HPhi* phi) {
2790  if (!phi->CheckFlag(HInstruction::kUint32)) {
2791  // This phi is not uint32 safe. No need to check operands.
2792  return false;
2793  }
2794 
2795  for (int j = 0; j < phi->OperandCount(); j++) {
2796  HValue* operand = phi->OperandAt(j);
2797  if (!operand->CheckFlag(HInstruction::kUint32)) {
2798  // Lazyly mark constants that fit into uint32 range with kUint32 flag.
2799  if (operand->IsConstant() &&
2800  HConstant::cast(operand)->IsUint32()) {
2801  operand->SetFlag(HInstruction::kUint32);
2802  continue;
2803  }
2804 
2805  // This phi is not safe, some operands are not uint32 values.
2806  return false;
2807  }
2808  }
2809 
2810  return true;
2811 }
2812 
2813 
2814 // Remove kUint32 flag from the phi itself and its operands. If any operand
2815 // was a phi marked with kUint32 place it into a worklist for
2816 // transitive clearing of kUint32 flag.
2817 void Uint32Analysis::UnmarkPhi(HPhi* phi, ZoneList<HPhi*>* worklist) {
2818  phi->ClearFlag(HInstruction::kUint32);
2819  for (int j = 0; j < phi->OperandCount(); j++) {
2820  HValue* operand = phi->OperandAt(j);
2821  if (operand->CheckFlag(HInstruction::kUint32)) {
2822  operand->ClearFlag(HInstruction::kUint32);
2823  if (operand->IsPhi()) {
2824  worklist->Add(HPhi::cast(operand), zone_);
2825  }
2826  }
2827  }
2828 }
2829 
2830 
2831 void Uint32Analysis::UnmarkUnsafePhis() {
2832  // No phis were collected. Nothing to do.
2833  if (phis_.length() == 0) return;
2834 
2835  // Worklist used to transitively clear kUint32 from phis that
2836  // are used as arguments to other phis.
2837  ZoneList<HPhi*> worklist(phis_.length(), zone_);
2838 
2839  // Phi can be used as a uint32 value if and only if
2840  // all its operands are uint32 values and all its
2841  // uses are uint32 safe.
2842 
2843  // Iterate over collected phis and unmark those that
2844  // are unsafe. When unmarking phi unmark its operands
2845  // and add it to the worklist if it is a phi as well.
2846  // Phis that are still marked as safe are shifted down
2847  // so that all safe phis form a prefix of the phis_ array.
2848  int phi_count = 0;
2849  for (int i = 0; i < phis_.length(); i++) {
2850  HPhi* phi = phis_[i];
2851 
2852  if (CheckPhiOperands(phi) && Uint32UsesAreSafe(phi)) {
2853  phis_[phi_count++] = phi;
2854  } else {
2855  UnmarkPhi(phi, &worklist);
2856  }
2857  }
2858 
2859  // Now phis array contains only those phis that have safe
2860  // non-phi uses. Start transitively clearing kUint32 flag
2861  // from phi operands of discovered non-safe phies until
2862  // only safe phies are left.
2863  while (!worklist.is_empty()) {
2864  while (!worklist.is_empty()) {
2865  HPhi* phi = worklist.RemoveLast();
2866  UnmarkPhi(phi, &worklist);
2867  }
2868 
2869  // Check if any operands to safe phies were unmarked
2870  // turning a safe phi into unsafe. The same value
2871  // can flow into several phis.
2872  int new_phi_count = 0;
2873  for (int i = 0; i < phi_count; i++) {
2874  HPhi* phi = phis_[i];
2875 
2876  if (CheckPhiOperands(phi)) {
2877  phis_[new_phi_count++] = phi;
2878  } else {
2879  UnmarkPhi(phi, &worklist);
2880  }
2881  }
2882  phi_count = new_phi_count;
2883  }
2884 }
2885 
2886 
2888  if (!FLAG_opt_safe_uint32_operations || uint32_instructions_ == NULL) {
2889  return;
2890  }
2891 
2892  Uint32Analysis analysis(zone());
2893  for (int i = 0; i < uint32_instructions_->length(); ++i) {
2894  HInstruction* current = uint32_instructions_->at(i);
2895  if (current->IsLinked() && current->representation().IsInteger32()) {
2896  analysis.Analyze(current);
2897  }
2898  }
2899 
2900  // Some phis might have been optimistically marked with kUint32 flag.
2901  // Remove this flag from those phis that are unsafe and propagate
2902  // this information transitively potentially clearing kUint32 flag
2903  // from some non-phi operations that are used as operands to unsafe phis.
2904  analysis.UnmarkUnsafePhis();
2905 }
2906 
2907 
2909  BitVector visited(GetMaximumValueID(), zone());
2910  for (int i = 0; i < blocks_.length(); ++i) {
2911  for (HInstruction* current = blocks_[i]->first();
2912  current != NULL;
2913  current = current->next()) {
2914  if (current->IsChange()) {
2915  HChange* change = HChange::cast(current);
2916  // Propagate flags for negative zero checks upwards from conversions
2917  // int32-to-tagged and int32-to-double.
2918  Representation from = change->value()->representation();
2919  ASSERT(from.Equals(change->from()));
2920  if (from.IsInteger32()) {
2921  ASSERT(change->to().IsTagged() || change->to().IsDouble());
2922  ASSERT(visited.IsEmpty());
2923  PropagateMinusZeroChecks(change->value(), &visited);
2924  visited.Clear();
2925  }
2926  }
2927  }
2928  }
2929 }
2930 
2931 
2932 // Implementation of utility class to encapsulate the translation state for
2933 // a (possibly inlined) function.
2935  CompilationInfo* info,
2936  TypeFeedbackOracle* oracle,
2937  InliningKind inlining_kind)
2938  : owner_(owner),
2939  compilation_info_(info),
2940  oracle_(oracle),
2941  call_context_(NULL),
2942  inlining_kind_(inlining_kind),
2943  function_return_(NULL),
2944  test_context_(NULL),
2945  entry_(NULL),
2946  arguments_elements_(NULL),
2947  outer_(owner->function_state()) {
2948  if (outer_ != NULL) {
2949  // State for an inline function.
2950  if (owner->ast_context()->IsTest()) {
2951  HBasicBlock* if_true = owner->graph()->CreateBasicBlock();
2952  HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
2953  if_true->MarkAsInlineReturnTarget();
2954  if_false->MarkAsInlineReturnTarget();
2955  TestContext* outer_test_context = TestContext::cast(owner->ast_context());
2956  Expression* cond = outer_test_context->condition();
2957  TypeFeedbackOracle* outer_oracle = outer_test_context->oracle();
2958  // The AstContext constructor pushed on the context stack. This newed
2959  // instance is the reason that AstContext can't be BASE_EMBEDDED.
2960  test_context_ =
2961  new TestContext(owner, cond, outer_oracle, if_true, if_false);
2962  } else {
2963  function_return_ = owner->graph()->CreateBasicBlock();
2965  }
2966  // Set this after possibly allocating a new TestContext above.
2967  call_context_ = owner->ast_context();
2968  }
2969 
2970  // Push on the state stack.
2971  owner->set_function_state(this);
2972 }
2973 
2974 
2976  delete test_context_;
2977  owner_->set_function_state(outer_);
2978 }
2979 
2980 
2981 // Implementation of utility classes to represent an expression's context in
2982 // the AST.
2984  : owner_(owner),
2985  kind_(kind),
2986  outer_(owner->ast_context()),
2987  for_typeof_(false) {
2988  owner->set_ast_context(this); // Push.
2989 #ifdef DEBUG
2990  ASSERT(owner->environment()->frame_type() == JS_FUNCTION);
2991  original_length_ = owner->environment()->length();
2992 #endif
2993 }
2994 
2995 
2997  owner_->set_ast_context(outer_); // Pop.
2998 }
2999 
3000 
3002  ASSERT(owner()->HasStackOverflow() ||
3003  owner()->current_block() == NULL ||
3004  (owner()->environment()->length() == original_length_ &&
3005  owner()->environment()->frame_type() == JS_FUNCTION));
3006 }
3007 
3008 
3010  ASSERT(owner()->HasStackOverflow() ||
3011  owner()->current_block() == NULL ||
3012  (owner()->environment()->length() == original_length_ + 1 &&
3013  owner()->environment()->frame_type() == JS_FUNCTION));
3014 }
3015 
3016 
3018  // The value is simply ignored.
3019 }
3020 
3021 
3023  // The value is tracked in the bailout environment, and communicated
3024  // through the environment as the result of the expression.
3025  if (!arguments_allowed() && value->CheckFlag(HValue::kIsArguments)) {
3026  owner()->Bailout("bad value context for arguments value");
3027  }
3028  owner()->Push(value);
3029 }
3030 
3031 
3033  BuildBranch(value);
3034 }
3035 
3036 
3038  ASSERT(!instr->IsControlInstruction());
3039  owner()->AddInstruction(instr);
3040  if (instr->HasObservableSideEffects()) owner()->AddSimulate(ast_id);
3041 }
3042 
3043 
3045  BailoutId ast_id) {
3046  ASSERT(!instr->HasObservableSideEffects());
3047  HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
3048  HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
3049  instr->SetSuccessorAt(0, empty_true);
3050  instr->SetSuccessorAt(1, empty_false);
3051  owner()->current_block()->Finish(instr);
3052  HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id);
3053  owner()->set_current_block(join);
3054 }
3055 
3056 
3058  ASSERT(!instr->IsControlInstruction());
3059  if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
3060  return owner()->Bailout("bad value context for arguments object value");
3061  }
3062  owner()->AddInstruction(instr);
3063  owner()->Push(instr);
3064  if (instr->HasObservableSideEffects()) owner()->AddSimulate(ast_id);
3065 }
3066 
3067 
3069  ASSERT(!instr->HasObservableSideEffects());
3070  if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
3071  return owner()->Bailout("bad value context for arguments object value");
3072  }
3073  HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
3074  HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
3075  instr->SetSuccessorAt(0, materialize_true);
3076  instr->SetSuccessorAt(1, materialize_false);
3077  owner()->current_block()->Finish(instr);
3078  owner()->set_current_block(materialize_true);
3079  owner()->Push(owner()->graph()->GetConstantTrue());
3080  owner()->set_current_block(materialize_false);
3081  owner()->Push(owner()->graph()->GetConstantFalse());
3082  HBasicBlock* join =
3083  owner()->CreateJoin(materialize_true, materialize_false, ast_id);
3084  owner()->set_current_block(join);
3085 }
3086 
3087 
3089  ASSERT(!instr->IsControlInstruction());
3090  HGraphBuilder* builder = owner();
3091  builder->AddInstruction(instr);
3092  // We expect a simulate after every expression with side effects, though
3093  // this one isn't actually needed (and wouldn't work if it were targeted).
3094  if (instr->HasObservableSideEffects()) {
3095  builder->Push(instr);
3096  builder->AddSimulate(ast_id);
3097  builder->Pop();
3098  }
3099  BuildBranch(instr);
3100 }
3101 
3102 
3104  ASSERT(!instr->HasObservableSideEffects());
3105  HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
3106  HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
3107  instr->SetSuccessorAt(0, empty_true);
3108  instr->SetSuccessorAt(1, empty_false);
3109  owner()->current_block()->Finish(instr);
3110  empty_true->Goto(if_true(), owner()->function_state());
3111  empty_false->Goto(if_false(), owner()->function_state());
3113 }
3114 
3115 
3116 void TestContext::BuildBranch(HValue* value) {
3117  // We expect the graph to be in edge-split form: there is no edge that
3118  // connects a branch node to a join node. We conservatively ensure that
3119  // property by always adding an empty block on the outgoing edges of this
3120  // branch.
3121  HGraphBuilder* builder = owner();
3122  if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
3123  builder->Bailout("arguments object value in a test context");
3124  }
3125  HBasicBlock* empty_true = builder->graph()->CreateBasicBlock();
3126  HBasicBlock* empty_false = builder->graph()->CreateBasicBlock();
3127  TypeFeedbackId test_id = condition()->test_id();
3128  ToBooleanStub::Types expected(oracle()->ToBooleanTypes(test_id));
3129  HBranch* test = new(zone()) HBranch(value, empty_true, empty_false, expected);
3130  builder->current_block()->Finish(test);
3131 
3132  empty_true->Goto(if_true(), owner()->function_state());
3133  empty_false->Goto(if_false(), owner()->function_state());
3134  builder->set_current_block(NULL);
3135 }
3136 
3137 
3138 // HGraphBuilder infrastructure for bailing out and checking bailouts.
3139 #define CHECK_BAILOUT(call) \
3140  do { \
3141  call; \
3142  if (HasStackOverflow()) return; \
3143  } while (false)
3144 
3145 
3146 #define CHECK_ALIVE(call) \
3147  do { \
3148  call; \
3149  if (HasStackOverflow() || current_block() == NULL) return; \
3150  } while (false)
3151 
3152 
3153 void HGraphBuilder::Bailout(const char* reason) {
3154  info()->set_bailout_reason(reason);
3155  SetStackOverflow();
3156 }
3157 
3158 
3159 void HGraphBuilder::VisitForEffect(Expression* expr) {
3160  EffectContext for_effect(this);
3161  Visit(expr);
3162 }
3163 
3164 
3165 void HGraphBuilder::VisitForValue(Expression* expr, ArgumentsAllowedFlag flag) {
3166  ValueContext for_value(this, flag);
3167  Visit(expr);
3168 }
3169 
3170 
3171 void HGraphBuilder::VisitForTypeOf(Expression* expr) {
3172  ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
3173  for_value.set_for_typeof(true);
3174  Visit(expr);
3175 }
3176 
3177 
3178 
3179 void HGraphBuilder::VisitForControl(Expression* expr,
3180  HBasicBlock* true_block,
3181  HBasicBlock* false_block) {
3182  TestContext for_test(this, expr, oracle(), true_block, false_block);
3183  Visit(expr);
3184 }
3185 
3186 
3187 void HGraphBuilder::VisitArgument(Expression* expr) {
3188  CHECK_ALIVE(VisitForValue(expr));
3189  Push(AddInstruction(new(zone()) HPushArgument(Pop())));
3190 }
3191 
3192 
3193 void HGraphBuilder::VisitArgumentList(ZoneList<Expression*>* arguments) {
3194  for (int i = 0; i < arguments->length(); i++) {
3195  CHECK_ALIVE(VisitArgument(arguments->at(i)));
3196  }
3197 }
3198 
3199 
3200 void HGraphBuilder::VisitExpressions(ZoneList<Expression*>* exprs) {
3201  for (int i = 0; i < exprs->length(); ++i) {
3202  CHECK_ALIVE(VisitForValue(exprs->at(i)));
3203  }
3204 }
3205 
3206 
3208  graph_ = new(zone()) HGraph(info());
3209  if (FLAG_hydrogen_stats) HStatistics::Instance()->Initialize(info());
3210 
3211  {
3212  HPhase phase("H_Block building");
3213  current_block_ = graph()->entry_block();
3214 
3215  Scope* scope = info()->scope();
3216  if (scope->HasIllegalRedeclaration()) {
3217  Bailout("function with illegal redeclaration");
3218  return NULL;
3219  }
3220  if (scope->calls_eval()) {
3221  Bailout("function calls eval");
3222  return NULL;
3223  }
3224  SetUpScope(scope);
3225 
3226  // Add an edge to the body entry. This is warty: the graph's start
3227  // environment will be used by the Lithium translation as the initial
3228  // environment on graph entry, but it has now been mutated by the
3229  // Hydrogen translation of the instructions in the start block. This
3230  // environment uses values which have not been defined yet. These
3231  // Hydrogen instructions will then be replayed by the Lithium
3232  // translation, so they cannot have an environment effect. The edge to
3233  // the body's entry block (along with some special logic for the start
3234  // block in HInstruction::InsertAfter) seals the start block from
3235  // getting unwanted instructions inserted.
3236  //
3237  // TODO(kmillikin): Fix this. Stop mutating the initial environment.
3238  // Make the Hydrogen instructions in the initial block into Hydrogen
3239  // values (but not instructions), present in the initial environment and
3240  // not replayed by the Lithium translation.
3241  HEnvironment* initial_env = environment()->CopyWithoutHistory();
3242  HBasicBlock* body_entry = CreateBasicBlock(initial_env);
3243  current_block()->Goto(body_entry);
3244  body_entry->SetJoinId(BailoutId::FunctionEntry());
3245  set_current_block(body_entry);
3246 
3247  // Handle implicit declaration of the function name in named function
3248  // expressions before other declarations.
3249  if (scope->is_function_scope() && scope->function() != NULL) {
3250  VisitVariableDeclaration(scope->function());
3251  }
3252  VisitDeclarations(scope->declarations());
3254 
3255  HValue* context = environment()->LookupContext();
3257  new(zone()) HStackCheck(context, HStackCheck::kFunctionEntry));
3258 
3259  VisitStatements(info()->function()->body());
3260  if (HasStackOverflow()) return NULL;
3261 
3262  if (current_block() != NULL) {
3263  HReturn* instr = new(zone()) HReturn(graph()->GetConstantUndefined());
3264  current_block()->FinishExit(instr);
3266  }
3267 
3268  // If the checksum of the number of type info changes is the same as the
3269  // last time this function was compiled, then this recompile is likely not
3270  // due to missing/inadequate type feedback, but rather too aggressive
3271  // optimization. Disable optimistic LICM in that case.
3272  Handle<Code> unoptimized_code(info()->shared_info()->code());
3273  ASSERT(unoptimized_code->kind() == Code::FUNCTION);
3274  Handle<Object> maybe_type_info(unoptimized_code->type_feedback_info());
3275  Handle<TypeFeedbackInfo> type_info(
3276  Handle<TypeFeedbackInfo>::cast(maybe_type_info));
3277  int checksum = type_info->own_type_change_checksum();
3278  int composite_checksum = graph()->update_type_change_checksum(checksum);
3280  !type_info->matches_inlined_type_change_checksum(composite_checksum));
3281  type_info->set_inlined_type_change_checksum(composite_checksum);
3282  }
3283 
3284  return graph();
3285 }
3286 
3288  *bailout_reason = SmartArrayPointer<char>();
3289  OrderBlocks();
3290  AssignDominators();
3291 
3292 #ifdef DEBUG
3293  // Do a full verify after building the graph and computing dominators.
3294  Verify(true);
3295 #endif
3296 
3298  if (!CheckConstPhiUses()) {
3299  *bailout_reason = SmartArrayPointer<char>(StrDup(
3300  "Unsupported phi use of const variable"));
3301  return false;
3302  }
3304  if (!CheckArgumentsPhiUses()) {
3305  *bailout_reason = SmartArrayPointer<char>(StrDup(
3306  "Unsupported phi use of arguments"));
3307  return false;
3308  }
3309  if (FLAG_eliminate_dead_phis) EliminateUnreachablePhis();
3310  CollectPhis();
3311 
3312  if (has_osr_loop_entry()) {
3313  const ZoneList<HPhi*>* phis = osr_loop_entry()->phis();
3314  for (int j = 0; j < phis->length(); j++) {
3315  HPhi* phi = phis->at(j);
3316  osr_values()->at(phi->merged_index())->set_incoming_value(phi);
3317  }
3318  }
3319 
3320  HInferRepresentation rep(this);
3321  rep.Analyze();
3322 
3325 
3327 
3328  // Must be performed before canonicalization to ensure that Canonicalize
3329  // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with
3330  // zero.
3332 
3333  Canonicalize();
3334 
3335  // Perform common subexpression elimination and loop-invariant code motion.
3336  if (FLAG_use_gvn) {
3337  HPhase phase("H_Global value numbering", this);
3338  HGlobalValueNumberer gvn(this, info());
3339  bool removed_side_effects = gvn.Analyze();
3340  // Trigger a second analysis pass to further eliminate duplicate values that
3341  // could only be discovered by removing side-effect-generating instructions
3342  // during the first pass.
3343  if (FLAG_smi_only_arrays && removed_side_effects) {
3344  removed_side_effects = gvn.Analyze();
3345  ASSERT(!removed_side_effects);
3346  }
3347  }
3348 
3349  if (FLAG_use_range) {
3350  HRangeAnalysis rangeAnalysis(this);
3351  rangeAnalysis.Analyze();
3352  }
3354 
3355  // Eliminate redundant stack checks on backwards branches.
3356  HStackCheckEliminator sce(this);
3357  sce.Process();
3358 
3361  if (FLAG_dead_code_elimination) DeadCodeElimination();
3362 
3363  return true;
3364 }
3365 
3366 
3367 // We try to "factor up" HBoundsCheck instructions towards the root of the
3368 // dominator tree.
3369 // For now we handle checks where the index is like "exp + int32value".
3370 // If in the dominator tree we check "exp + v1" and later (dominated)
3371 // "exp + v2", if v2 <= v1 we can safely remove the second check, and if
3372 // v2 > v1 we can use v2 in the 1st check and again remove the second.
3373 // To do so we keep a dictionary of all checks where the key if the pair
3374 // "exp, length".
3375 // The class BoundsCheckKey represents this key.
3376 class BoundsCheckKey : public ZoneObject {
3377  public:
3378  HValue* IndexBase() const { return index_base_; }
3379  HValue* Length() const { return length_; }
3380 
3381  uint32_t Hash() {
3382  return static_cast<uint32_t>(index_base_->Hashcode() ^ length_->Hashcode());
3383  }
3384 
3385  static BoundsCheckKey* Create(Zone* zone,
3387  int32_t* offset) {
3388  if (!check->index()->representation().IsInteger32()) return NULL;
3389 
3390  HValue* index_base = NULL;
3391  HConstant* constant = NULL;
3392  bool is_sub = false;
3393 
3394  if (check->index()->IsAdd()) {
3395  HAdd* index = HAdd::cast(check->index());
3396  if (index->left()->IsConstant()) {
3397  constant = HConstant::cast(index->left());
3398  index_base = index->right();
3399  } else if (index->right()->IsConstant()) {
3400  constant = HConstant::cast(index->right());
3401  index_base = index->left();
3402  }
3403  } else if (check->index()->IsSub()) {
3404  HSub* index = HSub::cast(check->index());
3405  is_sub = true;
3406  if (index->left()->IsConstant()) {
3407  constant = HConstant::cast(index->left());
3408  index_base = index->right();
3409  } else if (index->right()->IsConstant()) {
3410  constant = HConstant::cast(index->right());
3411  index_base = index->left();
3412  }
3413  }
3414 
3415  if (constant != NULL && constant->HasInteger32Value()) {
3416  *offset = is_sub ? - constant->Integer32Value()
3417  : constant->Integer32Value();
3418  } else {
3419  *offset = 0;
3420  index_base = check->index();
3421  }
3422 
3423  return new(zone) BoundsCheckKey(index_base, check->length());
3424  }
3425 
3426  private:
3427  BoundsCheckKey(HValue* index_base, HValue* length)
3428  : index_base_(index_base),
3429  length_(length) { }
3430 
3431  HValue* index_base_;
3432  HValue* length_;
3433 };
3434 
3435 
3436 // Data about each HBoundsCheck that can be eliminated or moved.
3437 // It is the "value" in the dictionary indexed by "base-index, length"
3438 // (the key is BoundsCheckKey).
3439 // We scan the code with a dominator tree traversal.
3440 // Traversing the dominator tree we keep a stack (implemented as a singly
3441 // linked list) of "data" for each basic block that contains a relevant check
3442 // with the same key (the dictionary holds the head of the list).
3443 // We also keep all the "data" created for a given basic block in a list, and
3444 // use it to "clean up" the dictionary when backtracking in the dominator tree
3445 // traversal.
3446 // Doing this each dictionary entry always directly points to the check that
3447 // is dominating the code being examined now.
3448 // We also track the current "offset" of the index expression and use it to
3449 // decide if any check is already "covered" (so it can be removed) or not.
3451  public:
3452  BoundsCheckKey* Key() const { return key_; }
3453  int32_t LowerOffset() const { return lower_offset_; }
3454  int32_t UpperOffset() const { return upper_offset_; }
3455  HBasicBlock* BasicBlock() const { return basic_block_; }
3456  HBoundsCheck* LowerCheck() const { return lower_check_; }
3457  HBoundsCheck* UpperCheck() const { return upper_check_; }
3458  BoundsCheckBbData* NextInBasicBlock() const { return next_in_bb_; }
3459  BoundsCheckBbData* FatherInDominatorTree() const { return father_in_dt_; }
3460 
3461  bool OffsetIsCovered(int32_t offset) const {
3462  return offset >= LowerOffset() && offset <= UpperOffset();
3463  }
3464 
3465  bool HasSingleCheck() { return lower_check_ == upper_check_; }
3466 
3467  // The goal of this method is to modify either upper_offset_ or
3468  // lower_offset_ so that also new_offset is covered (the covered
3469  // range grows).
3470  //
3471  // The precondition is that new_check follows UpperCheck() and
3472  // LowerCheck() in the same basic block, and that new_offset is not
3473  // covered (otherwise we could simply remove new_check).
3474  //
3475  // If HasSingleCheck() is true then new_check is added as "second check"
3476  // (either upper or lower; note that HasSingleCheck() becomes false).
3477  // Otherwise one of the current checks is modified so that it also covers
3478  // new_offset, and new_check is removed.
3479  void CoverCheck(HBoundsCheck* new_check,
3480  int32_t new_offset) {
3481  ASSERT(new_check->index()->representation().IsInteger32());
3482  bool keep_new_check = false;
3483 
3484  if (new_offset > upper_offset_) {
3485  upper_offset_ = new_offset;
3486  if (HasSingleCheck()) {
3487  keep_new_check = true;
3488  upper_check_ = new_check;
3489  } else {
3490  BuildOffsetAdd(upper_check_,
3491  &added_upper_index_,
3492  &added_upper_offset_,
3493  Key()->IndexBase(),
3494  new_check->index()->representation(),
3495  new_offset);
3496  upper_check_->SetOperandAt(0, added_upper_index_);
3497  }
3498  } else if (new_offset < lower_offset_) {
3499  lower_offset_ = new_offset;
3500  if (HasSingleCheck()) {
3501  keep_new_check = true;
3502  lower_check_ = new_check;
3503  } else {
3504  BuildOffsetAdd(lower_check_,
3505  &added_lower_index_,
3506  &added_lower_offset_,
3507  Key()->IndexBase(),
3508  new_check->index()->representation(),
3509  new_offset);
3510  lower_check_->SetOperandAt(0, added_lower_index_);
3511  }
3512  } else {
3513  ASSERT(false);
3514  }
3515 
3516  if (!keep_new_check) {
3517  new_check->DeleteAndReplaceWith(NULL);
3518  }
3519  }
3520 
3522  RemoveZeroAdd(&added_lower_index_, &added_lower_offset_);
3523  RemoveZeroAdd(&added_upper_index_, &added_upper_offset_);
3524  }
3525 
3527  int32_t lower_offset,
3528  int32_t upper_offset,
3529  HBasicBlock* bb,
3530  HBoundsCheck* lower_check,
3531  HBoundsCheck* upper_check,
3532  BoundsCheckBbData* next_in_bb,
3533  BoundsCheckBbData* father_in_dt)
3534  : key_(key),
3535  lower_offset_(lower_offset),
3536  upper_offset_(upper_offset),
3537  basic_block_(bb),
3538  lower_check_(lower_check),
3539  upper_check_(upper_check),
3540  added_lower_index_(NULL),
3541  added_lower_offset_(NULL),
3542  added_upper_index_(NULL),
3543  added_upper_offset_(NULL),
3544  next_in_bb_(next_in_bb),
3545  father_in_dt_(father_in_dt) { }
3546 
3547  private:
3548  BoundsCheckKey* key_;
3549  int32_t lower_offset_;
3550  int32_t upper_offset_;
3551  HBasicBlock* basic_block_;
3552  HBoundsCheck* lower_check_;
3553  HBoundsCheck* upper_check_;
3554  HAdd* added_lower_index_;
3555  HConstant* added_lower_offset_;
3556  HAdd* added_upper_index_;
3557  HConstant* added_upper_offset_;
3558  BoundsCheckBbData* next_in_bb_;
3559  BoundsCheckBbData* father_in_dt_;
3560 
3561  void BuildOffsetAdd(HBoundsCheck* check,
3562  HAdd** add,
3563  HConstant** constant,
3564  HValue* original_value,
3565  Representation representation,
3566  int32_t new_offset) {
3567  HConstant* new_constant = new(BasicBlock()->zone())
3568  HConstant(new_offset, Representation::Integer32());
3569  if (*add == NULL) {
3570  new_constant->InsertBefore(check);
3571  // Because of the bounds checks elimination algorithm, the index is always
3572  // an HAdd or an HSub here, so we can safely cast to an HBinaryOperation.
3573  HValue* context = HBinaryOperation::cast(check->index())->context();
3574  *add = new(BasicBlock()->zone()) HAdd(context,
3575  original_value,
3576  new_constant);
3577  (*add)->AssumeRepresentation(representation);
3578  (*add)->InsertBefore(check);
3579  } else {
3580  new_constant->InsertBefore(*add);
3581  (*constant)->DeleteAndReplaceWith(new_constant);
3582  }
3583  *constant = new_constant;
3584  }
3585 
3586  void RemoveZeroAdd(HAdd** add, HConstant** constant) {
3587  if (*add != NULL && (*constant)->Integer32Value() == 0) {
3588  (*add)->DeleteAndReplaceWith((*add)->left());
3589  (*constant)->DeleteAndReplaceWith(NULL);
3590  }
3591  }
3592 };
3593 
3594 
3595 static bool BoundsCheckKeyMatch(void* key1, void* key2) {
3596  BoundsCheckKey* k1 = static_cast<BoundsCheckKey*>(key1);
3597  BoundsCheckKey* k2 = static_cast<BoundsCheckKey*>(key2);
3598  return k1->IndexBase() == k2->IndexBase() && k1->Length() == k2->Length();
3599 }
3600 
3601 
3603  public:
3605  return reinterpret_cast<BoundsCheckBbData**>(
3606  &(Lookup(key, key->Hash(), true, ZoneAllocationPolicy(zone))->value));
3607  }
3608 
3609  void Insert(BoundsCheckKey* key, BoundsCheckBbData* data, Zone* zone) {
3610  Lookup(key, key->Hash(), true, ZoneAllocationPolicy(zone))->value = data;
3611  }
3612 
3613  void Delete(BoundsCheckKey* key) {
3614  Remove(key, key->Hash());
3615  }
3616 
3617  explicit BoundsCheckTable(Zone* zone)
3618  : ZoneHashMap(BoundsCheckKeyMatch, ZoneHashMap::kDefaultHashMapCapacity,
3619  ZoneAllocationPolicy(zone)) { }
3620 };
3621 
3622 
3623 // Eliminates checks in bb and recursively in the dominated blocks.
3624 // Also replace the results of check instructions with the original value, if
3625 // the result is used. This is safe now, since we don't do code motion after
3626 // this point. It enables better register allocation since the value produced
3627 // by check instructions is really a copy of the original value.
3628 void HGraph::EliminateRedundantBoundsChecks(HBasicBlock* bb,
3629  BoundsCheckTable* table) {
3630  BoundsCheckBbData* bb_data_list = NULL;
3631 
3632  for (HInstruction* i = bb->first(); i != NULL; i = i->next()) {
3633  if (!i->IsBoundsCheck()) continue;
3634 
3635  HBoundsCheck* check = HBoundsCheck::cast(i);
3636  check->ReplaceAllUsesWith(check->index());
3637 
3638  if (!FLAG_array_bounds_checks_elimination) continue;
3639 
3640  int32_t offset;
3641  BoundsCheckKey* key =
3642  BoundsCheckKey::Create(zone(), check, &offset);
3643  if (key == NULL) continue;
3644  BoundsCheckBbData** data_p = table->LookupOrInsert(key, zone());
3645  BoundsCheckBbData* data = *data_p;
3646  if (data == NULL) {
3647  bb_data_list = new(zone()) BoundsCheckBbData(key,
3648  offset,
3649  offset,
3650  bb,
3651  check,
3652  check,
3653  bb_data_list,
3654  NULL);
3655  *data_p = bb_data_list;
3656  } else if (data->OffsetIsCovered(offset)) {
3657  check->DeleteAndReplaceWith(NULL);
3658  } else if (data->BasicBlock() == bb) {
3659  data->CoverCheck(check, offset);
3660  } else {
3661  int32_t new_lower_offset = offset < data->LowerOffset()
3662  ? offset
3663  : data->LowerOffset();
3664  int32_t new_upper_offset = offset > data->UpperOffset()
3665  ? offset
3666  : data->UpperOffset();
3667  bb_data_list = new(zone()) BoundsCheckBbData(key,
3668  new_lower_offset,
3669  new_upper_offset,
3670  bb,
3671  data->LowerCheck(),
3672  data->UpperCheck(),
3673  bb_data_list,
3674  data);
3675  table->Insert(key, bb_data_list, zone());
3676  }
3677  }
3678 
3679  for (int i = 0; i < bb->dominated_blocks()->length(); ++i) {
3680  EliminateRedundantBoundsChecks(bb->dominated_blocks()->at(i), table);
3681  }
3682 
3683  for (BoundsCheckBbData* data = bb_data_list;
3684  data != NULL;
3685  data = data->NextInBasicBlock()) {
3686  data->RemoveZeroOperations();
3687  if (data->FatherInDominatorTree()) {
3688  table->Insert(data->Key(), data->FatherInDominatorTree(), zone());
3689  } else {
3690  table->Delete(data->Key());
3691  }
3692  }
3693 }
3694 
3695 
3697  HPhase phase("H_Eliminate bounds checks", this);
3698  BoundsCheckTable checks_table(zone());
3699  EliminateRedundantBoundsChecks(entry_block(), &checks_table);
3700 }
3701 
3702 
3703 static void DehoistArrayIndex(ArrayInstructionInterface* array_operation) {
3704  HValue* index = array_operation->GetKey();
3705  if (!index->representation().IsInteger32()) return;
3706 
3707  HConstant* constant;
3708  HValue* subexpression;
3709  int32_t sign;
3710  if (index->IsAdd()) {
3711  sign = 1;
3712  HAdd* add = HAdd::cast(index);
3713  if (add->left()->IsConstant()) {
3714  subexpression = add->right();
3715  constant = HConstant::cast(add->left());
3716  } else if (add->right()->IsConstant()) {
3717  subexpression = add->left();
3718  constant = HConstant::cast(add->right());
3719  } else {
3720  return;
3721  }
3722  } else if (index->IsSub()) {
3723  sign = -1;
3724  HSub* sub = HSub::cast(index);
3725  if (sub->left()->IsConstant()) {
3726  subexpression = sub->right();
3727  constant = HConstant::cast(sub->left());
3728  } else if (sub->right()->IsConstant()) {
3729  subexpression = sub->left();
3730  constant = HConstant::cast(sub->right());
3731  } return;
3732  } else {
3733  return;
3734  }
3735 
3736  if (!constant->HasInteger32Value()) return;
3737  int32_t value = constant->Integer32Value() * sign;
3738  // We limit offset values to 30 bits because we want to avoid the risk of
3739  // overflows when the offset is added to the object header size.
3740  if (value >= 1 << 30 || value < 0) return;
3741  array_operation->SetKey(subexpression);
3742  if (index->HasNoUses()) {
3743  index->DeleteAndReplaceWith(NULL);
3744  }
3745  ASSERT(value >= 0);
3746  array_operation->SetIndexOffset(static_cast<uint32_t>(value));
3747  array_operation->SetDehoisted(true);
3748 }
3749 
3750 
3752  if (!FLAG_array_index_dehoisting) return;
3753 
3754  HPhase phase("H_Dehoist index computations", this);
3755  for (int i = 0; i < blocks()->length(); ++i) {
3756  for (HInstruction* instr = blocks()->at(i)->first();
3757  instr != NULL;
3758  instr = instr->next()) {
3759  ArrayInstructionInterface* array_instruction = NULL;
3760  if (instr->IsLoadKeyedFastElement()) {
3762  array_instruction = static_cast<ArrayInstructionInterface*>(op);
3763  } else if (instr->IsLoadKeyedFastDoubleElement()) {
3766  array_instruction = static_cast<ArrayInstructionInterface*>(op);
3767  } else if (instr->IsLoadKeyedSpecializedArrayElement()) {
3770  array_instruction = static_cast<ArrayInstructionInterface*>(op);
3771  } else if (instr->IsStoreKeyedFastElement()) {
3773  array_instruction = static_cast<ArrayInstructionInterface*>(op);
3774  } else if (instr->IsStoreKeyedFastDoubleElement()) {
3777  array_instruction = static_cast<ArrayInstructionInterface*>(op);
3778  } else if (instr->IsStoreKeyedSpecializedArrayElement()) {
3781  array_instruction = static_cast<ArrayInstructionInterface*>(op);
3782  } else {
3783  continue;
3784  }
3785  DehoistArrayIndex(array_instruction);
3786  }
3787  }
3788 }
3789 
3790 
3792  HPhase phase("H_Dead code elimination", this);
3793  ZoneList<HInstruction*> worklist(blocks_.length(), zone());
3794  for (int i = 0; i < blocks()->length(); ++i) {
3795  for (HInstruction* instr = blocks()->at(i)->first();
3796  instr != NULL;
3797  instr = instr->next()) {
3798  if (instr->IsDead()) worklist.Add(instr, zone());
3799  }
3800  }
3801 
3802  while (!worklist.is_empty()) {
3803  HInstruction* instr = worklist.RemoveLast();
3804  if (FLAG_trace_dead_code_elimination) {
3805  HeapStringAllocator allocator;
3806  StringStream stream(&allocator);
3807  instr->PrintNameTo(&stream);
3808  stream.Add(" = ");
3809  instr->PrintTo(&stream);
3810  PrintF("[removing dead instruction %s]\n", *stream.ToCString());
3811  }
3812  instr->DeleteAndReplaceWith(NULL);
3813  for (int i = 0; i < instr->OperandCount(); ++i) {
3814  HValue* operand = instr->OperandAt(i);
3815  if (operand->IsDead()) worklist.Add(HInstruction::cast(operand), zone());
3816  }
3817  }
3818 }
3819 
3820 
3822  ASSERT(current_block() != NULL);
3823  current_block()->AddInstruction(instr);
3824  return instr;
3825 }
3826 
3827 
3829  ASSERT(current_block() != NULL);
3830  current_block()->AddSimulate(ast_id);
3831 }
3832 
3833 
3834 void HGraphBuilder::AddPhi(HPhi* instr) {
3835  ASSERT(current_block() != NULL);
3836  current_block()->AddPhi(instr);
3837 }
3838 
3839 
3840 void HGraphBuilder::PushAndAdd(HInstruction* instr) {
3841  Push(instr);
3842  AddInstruction(instr);
3843 }
3844 
3845 
3846 template <class Instruction>
3847 HInstruction* HGraphBuilder::PreProcessCall(Instruction* call) {
3848  int count = call->argument_count();
3849  ZoneList<HValue*> arguments(count, zone());
3850  for (int i = 0; i < count; ++i) {
3851  arguments.Add(Pop(), zone());
3852  }
3853 
3854  while (!arguments.is_empty()) {
3855  AddInstruction(new(zone()) HPushArgument(arguments.RemoveLast()));
3856  }
3857  return call;
3858 }
3859 
3860 
3861 void HGraphBuilder::SetUpScope(Scope* scope) {
3862  HConstant* undefined_constant = new(zone()) HConstant(
3863  isolate()->factory()->undefined_value(), Representation::Tagged());
3864  AddInstruction(undefined_constant);
3865  graph_->set_undefined_constant(undefined_constant);
3866 
3867  HArgumentsObject* object = new(zone()) HArgumentsObject;
3868  AddInstruction(object);
3869  graph()->SetArgumentsObject(object);
3870 
3871  // Set the initial values of parameters including "this". "This" has
3872  // parameter index 0.
3873  ASSERT_EQ(scope->num_parameters() + 1, environment()->parameter_count());
3874 
3875  for (int i = 0; i < environment()->parameter_count(); ++i) {
3876  HInstruction* parameter = AddInstruction(new(zone()) HParameter(i));
3877  environment()->Bind(i, parameter);
3878  }
3879 
3880  // First special is HContext.
3881  HInstruction* context = AddInstruction(new(zone()) HContext);
3882  environment()->BindContext(context);
3883 
3884  // Initialize specials and locals to undefined.
3885  for (int i = environment()->parameter_count() + 1;
3886  i < environment()->length();
3887  ++i) {
3888  environment()->Bind(i, undefined_constant);
3889  }
3890 
3891  // Handle the arguments and arguments shadow variables specially (they do
3892  // not have declarations).
3893  if (scope->arguments() != NULL) {
3894  if (!scope->arguments()->IsStackAllocated()) {
3895  return Bailout("context-allocated arguments");
3896  }
3897 
3898  environment()->Bind(scope->arguments(),
3899  graph()->GetArgumentsObject());
3900  }
3901 }
3902 
3903 
3904 void HGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
3905  for (int i = 0; i < statements->length(); i++) {
3906  CHECK_ALIVE(Visit(statements->at(i)));
3907  }
3908 }
3909 
3910 
3911 HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
3912  HBasicBlock* b = graph()->CreateBasicBlock();
3913  b->SetInitialEnvironment(env);
3914  return b;
3915 }
3916 
3917 
3918 HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
3919  HBasicBlock* header = graph()->CreateBasicBlock();
3920  HEnvironment* entry_env = environment()->CopyAsLoopHeader(header);
3921  header->SetInitialEnvironment(entry_env);
3922  header->AttachLoopInformation();
3923  return header;
3924 }
3925 
3926 
3927 void HGraphBuilder::VisitBlock(Block* stmt) {
3928  ASSERT(!HasStackOverflow());
3929  ASSERT(current_block() != NULL);
3930  ASSERT(current_block()->HasPredecessor());
3931  if (stmt->scope() != NULL) {
3932  return Bailout("ScopedBlock");
3933  }
3934  BreakAndContinueInfo break_info(stmt);
3935  { BreakAndContinueScope push(&break_info, this);
3936  CHECK_BAILOUT(VisitStatements(stmt->statements()));
3937  }
3938  HBasicBlock* break_block = break_info.break_block();
3939  if (break_block != NULL) {
3940  if (current_block() != NULL) current_block()->Goto(break_block);
3941  break_block->SetJoinId(stmt->ExitId());
3942  set_current_block(break_block);
3943  }
3944 }
3945 
3946 
3947 void HGraphBuilder::VisitExpressionStatement(ExpressionStatement* stmt) {
3948  ASSERT(!HasStackOverflow());
3949  ASSERT(current_block() != NULL);
3950  ASSERT(current_block()->HasPredecessor());
3951  VisitForEffect(stmt->expression());
3952 }
3953 
3954 
3955 void HGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
3956  ASSERT(!HasStackOverflow());
3957  ASSERT(current_block() != NULL);
3958  ASSERT(current_block()->HasPredecessor());
3959 }
3960 
3961 
3962 void HGraphBuilder::VisitIfStatement(IfStatement* stmt) {
3963  ASSERT(!HasStackOverflow());
3964  ASSERT(current_block() != NULL);
3965  ASSERT(current_block()->HasPredecessor());
3966  if (stmt->condition()->ToBooleanIsTrue()) {
3967  AddSimulate(stmt->ThenId());
3968  Visit(stmt->then_statement());
3969  } else if (stmt->condition()->ToBooleanIsFalse()) {
3970  AddSimulate(stmt->ElseId());
3971  Visit(stmt->else_statement());
3972  } else {
3973  HBasicBlock* cond_true = graph()->CreateBasicBlock();
3974  HBasicBlock* cond_false = graph()->CreateBasicBlock();
3975  CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false));
3976 
3977  if (cond_true->HasPredecessor()) {
3978  cond_true->SetJoinId(stmt->ThenId());
3979  set_current_block(cond_true);
3980  CHECK_BAILOUT(Visit(stmt->then_statement()));
3981  cond_true = current_block();
3982  } else {
3983  cond_true = NULL;
3984  }
3985 
3986  if (cond_false->HasPredecessor()) {
3987  cond_false->SetJoinId(stmt->ElseId());
3988  set_current_block(cond_false);
3989  CHECK_BAILOUT(Visit(stmt->else_statement()));
3990  cond_false = current_block();
3991  } else {
3992  cond_false = NULL;
3993  }
3994 
3995  HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId());
3996  set_current_block(join);
3997  }
3998 }
3999 
4000 
4001 HBasicBlock* HGraphBuilder::BreakAndContinueScope::Get(
4002  BreakableStatement* stmt,
4003  BreakType type,
4004  int* drop_extra) {
4005  *drop_extra = 0;
4006  BreakAndContinueScope* current = this;
4007  while (current != NULL && current->info()->target() != stmt) {
4008  *drop_extra += current->info()->drop_extra();
4009  current = current->next();
4010  }
4011  ASSERT(current != NULL); // Always found (unless stack is malformed).
4012 
4013  if (type == BREAK) {
4014  *drop_extra += current->info()->drop_extra();
4015  }
4016 
4017  HBasicBlock* block = NULL;
4018  switch (type) {
4019  case BREAK:
4020  block = current->info()->break_block();
4021  if (block == NULL) {
4022  block = current->owner()->graph()->CreateBasicBlock();
4023  current->info()->set_break_block(block);
4024  }
4025  break;
4026 
4027  case CONTINUE:
4028  block = current->info()->continue_block();
4029  if (block == NULL) {
4030  block = current->owner()->graph()->CreateBasicBlock();
4031  current->info()->set_continue_block(block);
4032  }
4033  break;
4034  }
4035 
4036  return block;
4037 }
4038 
4039 
4040 void HGraphBuilder::VisitContinueStatement(ContinueStatement* stmt) {
4041  ASSERT(!HasStackOverflow());
4042  ASSERT(current_block() != NULL);
4043  ASSERT(current_block()->HasPredecessor());
4044  int drop_extra = 0;
4045  HBasicBlock* continue_block = break_scope()->Get(stmt->target(),
4046  CONTINUE,
4047  &drop_extra);
4048  Drop(drop_extra);
4049  current_block()->Goto(continue_block);
4051 }
4052 
4053 
4054 void HGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
4055  ASSERT(!HasStackOverflow());
4056  ASSERT(current_block() != NULL);
4057  ASSERT(current_block()->HasPredecessor());
4058  int drop_extra = 0;
4059  HBasicBlock* break_block = break_scope()->Get(stmt->target(),
4060  BREAK,
4061  &drop_extra);
4062  Drop(drop_extra);
4063  current_block()->Goto(break_block);
4065 }
4066 
4067 
4068 void HGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
4069  ASSERT(!HasStackOverflow());
4070  ASSERT(current_block() != NULL);
4071  ASSERT(current_block()->HasPredecessor());
4072  FunctionState* state = function_state();
4073  AstContext* context = call_context();
4074  if (context == NULL) {
4075  // Not an inlined return, so an actual one.
4076  CHECK_ALIVE(VisitForValue(stmt->expression()));
4077  HValue* result = environment()->Pop();
4078  current_block()->FinishExit(new(zone()) HReturn(result));
4079  } else if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
4080  // Return from an inlined construct call. In a test context the return value
4081  // will always evaluate to true, in a value context the return value needs
4082  // to be a JSObject.
4083  if (context->IsTest()) {
4084  TestContext* test = TestContext::cast(context);
4085  CHECK_ALIVE(VisitForEffect(stmt->expression()));
4086  current_block()->Goto(test->if_true(), state);
4087  } else if (context->IsEffect()) {
4088  CHECK_ALIVE(VisitForEffect(stmt->expression()));
4089  current_block()->Goto(function_return(), state);
4090  } else {
4091  ASSERT(context->IsValue());
4092  CHECK_ALIVE(VisitForValue(stmt->expression()));
4093  HValue* return_value = Pop();
4094  HValue* receiver = environment()->arguments_environment()->Lookup(0);
4095  HHasInstanceTypeAndBranch* typecheck =
4096  new(zone()) HHasInstanceTypeAndBranch(return_value,
4099  HBasicBlock* if_spec_object = graph()->CreateBasicBlock();
4100  HBasicBlock* not_spec_object = graph()->CreateBasicBlock();
4101  typecheck->SetSuccessorAt(0, if_spec_object);
4102  typecheck->SetSuccessorAt(1, not_spec_object);
4103  current_block()->Finish(typecheck);
4104  if_spec_object->AddLeaveInlined(return_value, state);
4105  not_spec_object->AddLeaveInlined(receiver, state);
4106  }
4107  } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
4108  // Return from an inlined setter call. The returned value is never used, the
4109  // value of an assignment is always the value of the RHS of the assignment.
4110  CHECK_ALIVE(VisitForEffect(stmt->expression()));
4111  if (context->IsTest()) {
4112  HValue* rhs = environment()->arguments_environment()->Lookup(1);
4113  context->ReturnValue(rhs);
4114  } else if (context->IsEffect()) {
4115  current_block()->Goto(function_return(), state);
4116  } else {
4117  ASSERT(context->IsValue());
4118  HValue* rhs = environment()->arguments_environment()->Lookup(1);
4119  current_block()->AddLeaveInlined(rhs, state);
4120  }
4121  } else {
4122  // Return from a normal inlined function. Visit the subexpression in the
4123  // expression context of the call.
4124  if (context->IsTest()) {
4125  TestContext* test = TestContext::cast(context);
4126  VisitForControl(stmt->expression(), test->if_true(), test->if_false());
4127  } else if (context->IsEffect()) {
4128  CHECK_ALIVE(VisitForEffect(stmt->expression()));
4129  current_block()->Goto(function_return(), state);
4130  } else {
4131  ASSERT(context->IsValue());
4132  CHECK_ALIVE(VisitForValue(stmt->expression()));
4133  current_block()->AddLeaveInlined(Pop(), state);
4134  }
4135  }
4137 }
4138 
4139 
4140 void HGraphBuilder::VisitWithStatement(WithStatement* stmt) {
4141  ASSERT(!HasStackOverflow());
4142  ASSERT(current_block() != NULL);
4143  ASSERT(current_block()->HasPredecessor());
4144  return Bailout("WithStatement");
4145 }
4146 
4147 
4148 void HGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
4149  ASSERT(!HasStackOverflow());
4150  ASSERT(current_block() != NULL);
4151  ASSERT(current_block()->HasPredecessor());
4152  // We only optimize switch statements with smi-literal smi comparisons,
4153  // with a bounded number of clauses.
4154  const int kCaseClauseLimit = 128;
4155  ZoneList<CaseClause*>* clauses = stmt->cases();
4156  int clause_count = clauses->length();
4157  if (clause_count > kCaseClauseLimit) {
4158  return Bailout("SwitchStatement: too many clauses");
4159  }
4160 
4161  HValue* context = environment()->LookupContext();
4162 
4163  CHECK_ALIVE(VisitForValue(stmt->tag()));
4164  AddSimulate(stmt->EntryId());
4165  HValue* tag_value = Pop();
4166  HBasicBlock* first_test_block = current_block();
4167 
4168  SwitchType switch_type = UNKNOWN_SWITCH;
4169 
4170  // 1. Extract clause type
4171  for (int i = 0; i < clause_count; ++i) {
4172  CaseClause* clause = clauses->at(i);
4173  if (clause->is_default()) continue;
4174 
4175  if (switch_type == UNKNOWN_SWITCH) {
4176  if (clause->label()->IsSmiLiteral()) {
4177  switch_type = SMI_SWITCH;
4178  } else if (clause->label()->IsStringLiteral()) {
4179  switch_type = STRING_SWITCH;
4180  } else {
4181  return Bailout("SwitchStatement: non-literal switch label");
4182  }
4183  } else if ((switch_type == STRING_SWITCH &&
4184  !clause->label()->IsStringLiteral()) ||
4185  (switch_type == SMI_SWITCH &&
4186  !clause->label()->IsSmiLiteral())) {
4187  return Bailout("SwitchStatemnt: mixed label types are not supported");
4188  }
4189  }
4190 
4191  HUnaryControlInstruction* string_check = NULL;
4192  HBasicBlock* not_string_block = NULL;
4193 
4194  // Test switch's tag value if all clauses are string literals
4195  if (switch_type == STRING_SWITCH) {
4196  string_check = new(zone()) HIsStringAndBranch(tag_value);
4197  first_test_block = graph()->CreateBasicBlock();
4198  not_string_block = graph()->CreateBasicBlock();
4199 
4200  string_check->SetSuccessorAt(0, first_test_block);
4201  string_check->SetSuccessorAt(1, not_string_block);
4202  current_block()->Finish(string_check);
4203 
4204  set_current_block(first_test_block);
4205  }
4206 
4207  // 2. Build all the tests, with dangling true branches
4208  BailoutId default_id = BailoutId::None();
4209  for (int i = 0; i < clause_count; ++i) {
4210  CaseClause* clause = clauses->at(i);
4211  if (clause->is_default()) {
4212  default_id = clause->EntryId();
4213  continue;
4214  }
4215  if (switch_type == SMI_SWITCH) {
4216  clause->RecordTypeFeedback(oracle());
4217  }
4218 
4219  // Generate a compare and branch.
4220  CHECK_ALIVE(VisitForValue(clause->label()));
4221  HValue* label_value = Pop();
4222 
4223  HBasicBlock* next_test_block = graph()->CreateBasicBlock();
4224  HBasicBlock* body_block = graph()->CreateBasicBlock();
4225 
4226  HControlInstruction* compare;
4227 
4228  if (switch_type == SMI_SWITCH) {
4229  if (!clause->IsSmiCompare()) {
4230  // Finish with deoptimize and add uses of enviroment values to
4231  // account for invisible uses.
4234  break;
4235  }
4236 
4237  HCompareIDAndBranch* compare_ =
4238  new(zone()) HCompareIDAndBranch(tag_value,
4239  label_value,
4240  Token::EQ_STRICT);
4241  compare_->SetInputRepresentation(Representation::Integer32());
4242  compare = compare_;
4243  } else {
4244  compare = new(zone()) HStringCompareAndBranch(context, tag_value,
4245  label_value,
4246  Token::EQ_STRICT);
4247  }
4248 
4249  compare->SetSuccessorAt(0, body_block);
4250  compare->SetSuccessorAt(1, next_test_block);
4251  current_block()->Finish(compare);
4252 
4253  set_current_block(next_test_block);
4254  }
4255 
4256  // Save the current block to use for the default or to join with the
4257  // exit. This block is NULL if we deoptimized.
4258  HBasicBlock* last_block = current_block();
4259 
4260  if (not_string_block != NULL) {
4261  BailoutId join_id = !default_id.IsNone() ? default_id : stmt->ExitId();
4262  last_block = CreateJoin(last_block, not_string_block, join_id);
4263  }
4264 
4265  // 3. Loop over the clauses and the linked list of tests in lockstep,
4266  // translating the clause bodies.
4267  HBasicBlock* curr_test_block = first_test_block;
4268  HBasicBlock* fall_through_block = NULL;
4269 
4270  BreakAndContinueInfo break_info(stmt);
4271  { BreakAndContinueScope push(&break_info, this);
4272  for (int i = 0; i < clause_count; ++i) {
4273  CaseClause* clause = clauses->at(i);
4274 
4275  // Identify the block where normal (non-fall-through) control flow
4276  // goes to.
4277  HBasicBlock* normal_block = NULL;
4278  if (clause->is_default()) {
4279  if (last_block != NULL) {
4280  normal_block = last_block;
4281  last_block = NULL; // Cleared to indicate we've handled it.
4282  }
4283  } else if (!curr_test_block->end()->IsDeoptimize()) {
4284  normal_block = curr_test_block->end()->FirstSuccessor();
4285  curr_test_block = curr_test_block->end()->SecondSuccessor();
4286  }
4287 
4288  // Identify a block to emit the body into.
4289  if (normal_block == NULL) {
4290  if (fall_through_block == NULL) {
4291  // (a) Unreachable.
4292  if (clause->is_default()) {
4293  continue; // Might still be reachable clause bodies.
4294  } else {
4295  break;
4296  }
4297  } else {
4298  // (b) Reachable only as fall through.
4299  set_current_block(fall_through_block);
4300  }
4301  } else if (fall_through_block == NULL) {
4302  // (c) Reachable only normally.
4303  set_current_block(normal_block);
4304  } else {
4305  // (d) Reachable both ways.
4306  HBasicBlock* join = CreateJoin(fall_through_block,
4307  normal_block,
4308  clause->EntryId());
4309  set_current_block(join);
4310  }
4311 
4312  CHECK_BAILOUT(VisitStatements(clause->statements()));
4313  fall_through_block = current_block();
4314  }
4315  }
4316 
4317  // Create an up-to-3-way join. Use the break block if it exists since
4318  // it's already a join block.
4319  HBasicBlock* break_block = break_info.break_block();
4320  if (break_block == NULL) {
4321  set_current_block(CreateJoin(fall_through_block,
4322  last_block,
4323  stmt->ExitId()));
4324  } else {
4325  if (fall_through_block != NULL) fall_through_block->Goto(break_block);
4326  if (last_block != NULL) last_block->Goto(break_block);
4327  break_block->SetJoinId(stmt->ExitId());
4328  set_current_block(break_block);
4329  }
4330 }
4331 
4332 
4333 bool HGraphBuilder::HasOsrEntryAt(IterationStatement* statement) {
4334  return statement->OsrEntryId() == info()->osr_ast_id();
4335 }
4336 
4337 
4338 bool HGraphBuilder::PreProcessOsrEntry(IterationStatement* statement) {
4339  if (!HasOsrEntryAt(statement)) return false;
4340 
4341  HBasicBlock* non_osr_entry = graph()->CreateBasicBlock();
4342  HBasicBlock* osr_entry = graph()->CreateBasicBlock();
4343  HValue* true_value = graph()->GetConstantTrue();
4344  HBranch* test = new(zone()) HBranch(true_value, non_osr_entry, osr_entry);
4345  current_block()->Finish(test);
4346 
4347  HBasicBlock* loop_predecessor = graph()->CreateBasicBlock();
4348  non_osr_entry->Goto(loop_predecessor);
4349 
4350  set_current_block(osr_entry);
4351  BailoutId osr_entry_id = statement->OsrEntryId();
4352  int first_expression_index = environment()->first_expression_index();
4353  int length = environment()->length();
4354  ZoneList<HUnknownOSRValue*>* osr_values =
4355  new(zone()) ZoneList<HUnknownOSRValue*>(length, zone());
4356 
4357  for (int i = 0; i < first_expression_index; ++i) {
4358  HUnknownOSRValue* osr_value = new(zone()) HUnknownOSRValue;
4359  AddInstruction(osr_value);
4360  environment()->Bind(i, osr_value);
4361  osr_values->Add(osr_value, zone());
4362  }
4363 
4364  if (first_expression_index != length) {
4365  environment()->Drop(length - first_expression_index);
4366  for (int i = first_expression_index; i < length; ++i) {
4367  HUnknownOSRValue* osr_value = new(zone()) HUnknownOSRValue;
4368  AddInstruction(osr_value);
4369  environment()->Push(osr_value);
4370  osr_values->Add(osr_value, zone());
4371  }
4372  }
4373 
4374  graph()->set_osr_values(osr_values);
4375 
4376  AddSimulate(osr_entry_id);
4377  AddInstruction(new(zone()) HOsrEntry(osr_entry_id));
4378  HContext* context = new(zone()) HContext;
4379  AddInstruction(context);
4380  environment()->BindContext(context);
4381  current_block()->Goto(loop_predecessor);
4382  loop_predecessor->SetJoinId(statement->EntryId());
4383  set_current_block(loop_predecessor);
4384  return true;
4385 }
4386 
4387 
4388 void HGraphBuilder::VisitLoopBody(IterationStatement* stmt,
4389  HBasicBlock* loop_entry,
4390  BreakAndContinueInfo* break_info) {
4391  BreakAndContinueScope push(break_info, this);
4392  AddSimulate(stmt->StackCheckId());
4393  HValue* context = environment()->LookupContext();
4394  HStackCheck* stack_check =
4395  new(zone()) HStackCheck(context, HStackCheck::kBackwardsBranch);
4396  AddInstruction(stack_check);
4397  ASSERT(loop_entry->IsLoopHeader());
4398  loop_entry->loop_information()->set_stack_check(stack_check);
4399  CHECK_BAILOUT(Visit(stmt->body()));
4400 }
4401 
4402 
4403 void HGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
4404  ASSERT(!HasStackOverflow());
4405  ASSERT(current_block() != NULL);
4406  ASSERT(current_block()->HasPredecessor());
4407  ASSERT(current_block() != NULL);
4408  bool osr_entry = PreProcessOsrEntry(stmt);
4409  HBasicBlock* loop_entry = CreateLoopHeaderBlock();
4410  current_block()->Goto(loop_entry);
4411  set_current_block(loop_entry);
4412  if (osr_entry) graph()->set_osr_loop_entry(loop_entry);
4413 
4414  BreakAndContinueInfo break_info(stmt);
4415  CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4416  HBasicBlock* body_exit =
4417  JoinContinue(stmt, current_block(), break_info.continue_block());
4418  HBasicBlock* loop_successor = NULL;
4419  if (body_exit != NULL && !stmt->cond()->ToBooleanIsTrue()) {
4420  set_current_block(body_exit);
4421  // The block for a true condition, the actual predecessor block of the
4422  // back edge.
4423  body_exit = graph()->CreateBasicBlock();
4424  loop_successor = graph()->CreateBasicBlock();
4425  CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor));
4426  if (body_exit->HasPredecessor()) {
4427  body_exit->SetJoinId(stmt->BackEdgeId());
4428  } else {
4429  body_exit = NULL;
4430  }
4431  if (loop_successor->HasPredecessor()) {
4432  loop_successor->SetJoinId(stmt->ExitId());
4433  } else {
4434  loop_successor = NULL;
4435  }
4436  }
4437  HBasicBlock* loop_exit = CreateLoop(stmt,
4438  loop_entry,
4439  body_exit,
4440  loop_successor,
4441  break_info.break_block());
4442  set_current_block(loop_exit);
4443 }
4444 
4445 
4446 void HGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
4447  ASSERT(!HasStackOverflow());
4448  ASSERT(current_block() != NULL);
4449  ASSERT(current_block()->HasPredecessor());
4450  ASSERT(current_block() != NULL);
4451  bool osr_entry = PreProcessOsrEntry(stmt);
4452  HBasicBlock* loop_entry = CreateLoopHeaderBlock();
4453  current_block()->Goto(loop_entry);
4454  set_current_block(loop_entry);
4455  if (osr_entry) graph()->set_osr_loop_entry(loop_entry);
4456 
4457 
4458  // If the condition is constant true, do not generate a branch.
4459  HBasicBlock* loop_successor = NULL;
4460  if (!stmt->cond()->ToBooleanIsTrue()) {
4461  HBasicBlock* body_entry = graph()->CreateBasicBlock();
4462  loop_successor = graph()->CreateBasicBlock();
4463  CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
4464  if (body_entry->HasPredecessor()) {
4465  body_entry->SetJoinId(stmt->BodyId());
4466  set_current_block(body_entry);
4467  }
4468  if (loop_successor->HasPredecessor()) {
4469  loop_successor->SetJoinId(stmt->ExitId());
4470  } else {
4471  loop_successor = NULL;
4472  }
4473  }
4474 
4475  BreakAndContinueInfo break_info(stmt);
4476  if (current_block() != NULL) {
4477  CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4478  }
4479  HBasicBlock* body_exit =
4480  JoinContinue(stmt, current_block(), break_info.continue_block());
4481  HBasicBlock* loop_exit = CreateLoop(stmt,
4482  loop_entry,
4483  body_exit,
4484  loop_successor,
4485  break_info.break_block());
4486  set_current_block(loop_exit);
4487 }
4488 
4489 
4490 void HGraphBuilder::VisitForStatement(ForStatement* stmt) {
4491  ASSERT(!HasStackOverflow());
4492  ASSERT(current_block() != NULL);
4493  ASSERT(current_block()->HasPredecessor());
4494  if (stmt->init() != NULL) {
4495  CHECK_ALIVE(Visit(stmt->init()));
4496  }
4497  ASSERT(current_block() != NULL);
4498  bool osr_entry = PreProcessOsrEntry(stmt);
4499  HBasicBlock* loop_entry = CreateLoopHeaderBlock();
4500  current_block()->Goto(loop_entry);
4501  set_current_block(loop_entry);
4502  if (osr_entry) graph()->set_osr_loop_entry(loop_entry);
4503 
4504  HBasicBlock* loop_successor = NULL;
4505  if (stmt->cond() != NULL) {
4506  HBasicBlock* body_entry = graph()->CreateBasicBlock();
4507  loop_successor = graph()->CreateBasicBlock();
4508  CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
4509  if (body_entry->HasPredecessor()) {
4510  body_entry->SetJoinId(stmt->BodyId());
4511  set_current_block(body_entry);
4512  }
4513  if (loop_successor->HasPredecessor()) {
4514  loop_successor->SetJoinId(stmt->ExitId());
4515  } else {
4516  loop_successor = NULL;
4517  }
4518  }
4519 
4520  BreakAndContinueInfo break_info(stmt);
4521  if (current_block() != NULL) {
4522  CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4523  }
4524  HBasicBlock* body_exit =
4525  JoinContinue(stmt, current_block(), break_info.continue_block());
4526 
4527  if (stmt->next() != NULL && body_exit != NULL) {
4528  set_current_block(body_exit);
4529  CHECK_BAILOUT(Visit(stmt->next()));
4530  body_exit = current_block();
4531  }
4532 
4533  HBasicBlock* loop_exit = CreateLoop(stmt,
4534  loop_entry,
4535  body_exit,
4536  loop_successor,
4537  break_info.break_block());
4538  set_current_block(loop_exit);
4539 }
4540 
4541 
4542 void HGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
4543  ASSERT(!HasStackOverflow());
4544  ASSERT(current_block() != NULL);
4545  ASSERT(current_block()->HasPredecessor());
4546 
4547  if (!FLAG_optimize_for_in) {
4548  return Bailout("ForInStatement optimization is disabled");
4549  }
4550 
4551  if (!oracle()->IsForInFastCase(stmt)) {
4552  return Bailout("ForInStatement is not fast case");
4553  }
4554 
4555  if (!stmt->each()->IsVariableProxy() ||
4556  !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
4557  return Bailout("ForInStatement with non-local each variable");
4558  }
4559 
4560  Variable* each_var = stmt->each()->AsVariableProxy()->var();
4561 
4562  CHECK_ALIVE(VisitForValue(stmt->enumerable()));
4563  HValue* enumerable = Top(); // Leave enumerable at the top.
4564 
4565  HInstruction* map = AddInstruction(new(zone()) HForInPrepareMap(
4566  environment()->LookupContext(), enumerable));
4567  AddSimulate(stmt->PrepareId());
4568 
4569  HInstruction* array = AddInstruction(
4570  new(zone()) HForInCacheArray(
4571  enumerable,
4572  map,
4574 
4575  HInstruction* enum_length = AddInstruction(new(zone()) HMapEnumLength(map));
4576 
4577  HInstruction* start_index = AddInstruction(new(zone()) HConstant(
4578  Handle<Object>(Smi::FromInt(0)), Representation::Integer32()));
4579 
4580  Push(map);
4581  Push(array);
4582  Push(enum_length);
4583  Push(start_index);
4584 
4585  HInstruction* index_cache = AddInstruction(
4586  new(zone()) HForInCacheArray(
4587  enumerable,
4588  map,
4590  HForInCacheArray::cast(array)->set_index_cache(
4591  HForInCacheArray::cast(index_cache));
4592 
4593  bool osr_entry = PreProcessOsrEntry(stmt);
4594  HBasicBlock* loop_entry = CreateLoopHeaderBlock();
4595  current_block()->Goto(loop_entry);
4596  set_current_block(loop_entry);
4597  if (osr_entry) graph()->set_osr_loop_entry(loop_entry);
4598 
4599  HValue* index = environment()->ExpressionStackAt(0);
4600  HValue* limit = environment()->ExpressionStackAt(1);
4601 
4602  // Check that we still have more keys.
4603  HCompareIDAndBranch* compare_index =
4604  new(zone()) HCompareIDAndBranch(index, limit, Token::LT);
4605  compare_index->SetInputRepresentation(Representation::Integer32());
4606 
4607  HBasicBlock* loop_body = graph()->CreateBasicBlock();
4608  HBasicBlock* loop_successor = graph()->CreateBasicBlock();
4609 
4610  compare_index->SetSuccessorAt(0, loop_body);
4611  compare_index->SetSuccessorAt(1, loop_successor);
4612  current_block()->Finish(compare_index);
4613 
4614  set_current_block(loop_successor);
4615  Drop(5);
4616 
4617  set_current_block(loop_body);
4618 
4619  HValue* key = AddInstruction(
4620  new(zone()) HLoadKeyedFastElement(
4621  environment()->ExpressionStackAt(2), // Enum cache.
4622  environment()->ExpressionStackAt(0), // Iteration index.
4623  environment()->ExpressionStackAt(0)));
4624 
4625  // Check if the expected map still matches that of the enumerable.
4626  // If not just deoptimize.
4627  AddInstruction(new(zone()) HCheckMapValue(
4628  environment()->ExpressionStackAt(4),
4629  environment()->ExpressionStackAt(3)));
4630 
4631  Bind(each_var, key);
4632 
4633  BreakAndContinueInfo break_info(stmt, 5);
4634  CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4635 
4636  HBasicBlock* body_exit =
4637  JoinContinue(stmt, current_block(), break_info.continue_block());
4638 
4639  if (body_exit != NULL) {
4640  set_current_block(body_exit);
4641 
4642  HValue* current_index = Pop();
4643  HInstruction* new_index = new(zone()) HAdd(environment()->LookupContext(),
4644  current_index,
4645  graph()->GetConstant1());
4646  new_index->AssumeRepresentation(Representation::Integer32());
4647  PushAndAdd(new_index);
4648  body_exit = current_block();
4649  }
4650 
4651  HBasicBlock* loop_exit = CreateLoop(stmt,
4652  loop_entry,
4653  body_exit,
4654  loop_successor,
4655  break_info.break_block());
4656 
4657  set_current_block(loop_exit);
4658 }
4659 
4660 
4661 void HGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
4662  ASSERT(!HasStackOverflow());
4663  ASSERT(current_block() != NULL);
4664  ASSERT(current_block()->HasPredecessor());
4665  return Bailout("TryCatchStatement");
4666 }
4667 
4668 
4669 void HGraphBuilder::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
4670  ASSERT(!HasStackOverflow());
4671  ASSERT(current_block() != NULL);
4672  ASSERT(current_block()->HasPredecessor());
4673  return Bailout("TryFinallyStatement");
4674 }
4675 
4676 
4677 void HGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
4678  ASSERT(!HasStackOverflow());
4679  ASSERT(current_block() != NULL);
4680  ASSERT(current_block()->HasPredecessor());
4681  return Bailout("DebuggerStatement");
4682 }
4683 
4684 
4685 static Handle<SharedFunctionInfo> SearchSharedFunctionInfo(
4686  Code* unoptimized_code, FunctionLiteral* expr) {
4687  int start_position = expr->start_position();
4688  RelocIterator it(unoptimized_code);
4689  for (;!it.done(); it.next()) {
4690  RelocInfo* rinfo = it.rinfo();
4691  if (rinfo->rmode() != RelocInfo::EMBEDDED_OBJECT) continue;
4692  Object* obj = rinfo->target_object();
4693  if (obj->IsSharedFunctionInfo()) {
4694  SharedFunctionInfo* shared = SharedFunctionInfo::cast(obj);
4695  if (shared->start_position() == start_position) {
4696  return Handle<SharedFunctionInfo>(shared);
4697  }
4698  }
4699  }
4700 
4701  return Handle<SharedFunctionInfo>();
4702 }
4703 
4704 
4705 void HGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
4706  ASSERT(!HasStackOverflow());
4707  ASSERT(current_block() != NULL);
4708  ASSERT(current_block()->HasPredecessor());
4709  Handle<SharedFunctionInfo> shared_info =
4710  SearchSharedFunctionInfo(info()->shared_info()->code(),
4711  expr);
4712  if (shared_info.is_null()) {
4713  shared_info = Compiler::BuildFunctionInfo(expr, info()->script());
4714  }
4715  // We also have a stack overflow if the recursive compilation did.
4716  if (HasStackOverflow()) return;
4717  HValue* context = environment()->LookupContext();
4718  HFunctionLiteral* instr =
4719  new(zone()) HFunctionLiteral(context, shared_info, expr->pretenure());
4720  return ast_context()->ReturnInstruction(instr, expr->id());
4721 }
4722 
4723 
4724 void HGraphBuilder::VisitSharedFunctionInfoLiteral(
4725  SharedFunctionInfoLiteral* expr) {
4726  ASSERT(!HasStackOverflow());
4727  ASSERT(current_block() != NULL);
4728  ASSERT(current_block()->HasPredecessor());
4729  return Bailout("SharedFunctionInfoLiteral");
4730 }
4731 
4732 
4733 void HGraphBuilder::VisitConditional(Conditional* expr) {
4734  ASSERT(!HasStackOverflow());
4735  ASSERT(current_block() != NULL);
4736  ASSERT(current_block()->HasPredecessor());
4737  HBasicBlock* cond_true = graph()->CreateBasicBlock();
4738  HBasicBlock* cond_false = graph()->CreateBasicBlock();
4739  CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false));
4740 
4741  // Visit the true and false subexpressions in the same AST context as the
4742  // whole expression.
4743  if (cond_true->HasPredecessor()) {
4744  cond_true->SetJoinId(expr->ThenId());
4745  set_current_block(cond_true);
4746  CHECK_BAILOUT(Visit(expr->then_expression()));
4747  cond_true = current_block();
4748  } else {
4749  cond_true = NULL;
4750  }
4751 
4752  if (cond_false->HasPredecessor()) {
4753  cond_false->SetJoinId(expr->ElseId());
4754  set_current_block(cond_false);
4755  CHECK_BAILOUT(Visit(expr->else_expression()));
4756  cond_false = current_block();
4757  } else {
4758  cond_false = NULL;
4759  }
4760 
4761  if (!ast_context()->IsTest()) {
4762  HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id());
4763  set_current_block(join);
4764  if (join != NULL && !ast_context()->IsEffect()) {
4765  return ast_context()->ReturnValue(Pop());
4766  }
4767  }
4768 }
4769 
4770 
4771 HGraphBuilder::GlobalPropertyAccess HGraphBuilder::LookupGlobalProperty(
4772  Variable* var, LookupResult* lookup, bool is_store) {
4773  if (var->is_this() || !info()->has_global_object()) {
4774  return kUseGeneric;
4775  }
4776  Handle<GlobalObject> global(info()->global_object());
4777  global->Lookup(*var->name(), lookup);
4778  if (!lookup->IsNormal() ||
4779  (is_store && lookup->IsReadOnly()) ||
4780  lookup->holder() != *global) {
4781  return kUseGeneric;
4782  }
4783 
4784  return kUseCell;
4785 }
4786 
4787 
4788 HValue* HGraphBuilder::BuildContextChainWalk(Variable* var) {
4789  ASSERT(var->IsContextSlot());
4790  HValue* context = environment()->LookupContext();
4791  int length = info()->scope()->ContextChainLength(var->scope());
4792  while (length-- > 0) {
4793  HInstruction* context_instruction = new(zone()) HOuterContext(context);
4794  AddInstruction(context_instruction);
4795  context = context_instruction;
4796  }
4797  return context;
4798 }
4799 
4800 
4801 void HGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
4802  ASSERT(!HasStackOverflow());
4803  ASSERT(current_block() != NULL);
4804  ASSERT(current_block()->HasPredecessor());
4805  Variable* variable = expr->var();
4806  switch (variable->location()) {
4807  case Variable::UNALLOCATED: {
4808  if (IsLexicalVariableMode(variable->mode())) {
4809  // TODO(rossberg): should this be an ASSERT?
4810  return Bailout("reference to global lexical variable");
4811  }
4812  // Handle known global constants like 'undefined' specially to avoid a
4813  // load from a global cell for them.
4814  Handle<Object> constant_value =
4815  isolate()->factory()->GlobalConstantFor(variable->name());
4816  if (!constant_value.is_null()) {
4817  HConstant* instr =
4818  new(zone()) HConstant(constant_value, Representation::Tagged());
4819  return ast_context()->ReturnInstruction(instr, expr->id());
4820  }
4821 
4822  LookupResult lookup(isolate());
4823  GlobalPropertyAccess type =
4824  LookupGlobalProperty(variable, &lookup, false);
4825 
4826  if (type == kUseCell &&
4827  info()->global_object()->IsAccessCheckNeeded()) {
4828  type = kUseGeneric;
4829  }
4830 
4831  if (type == kUseCell) {
4832  Handle<GlobalObject> global(info()->global_object());
4833  Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(&lookup));
4834  HLoadGlobalCell* instr =
4835  new(zone()) HLoadGlobalCell(cell, lookup.GetPropertyDetails());
4836  return ast_context()->ReturnInstruction(instr, expr->id());
4837  } else {
4838  HValue* context = environment()->LookupContext();
4839  HGlobalObject* global_object = new(zone()) HGlobalObject(context);
4840  AddInstruction(global_object);
4841  HLoadGlobalGeneric* instr =
4842  new(zone()) HLoadGlobalGeneric(context,
4843  global_object,
4844  variable->name(),
4845  ast_context()->is_for_typeof());
4846  instr->set_position(expr->position());
4847  return ast_context()->ReturnInstruction(instr, expr->id());
4848  }
4849  }
4850 
4851  case Variable::PARAMETER:
4852  case Variable::LOCAL: {
4853  HValue* value = environment()->Lookup(variable);
4854  if (value == graph()->GetConstantHole()) {
4855  ASSERT(IsDeclaredVariableMode(variable->mode()) &&
4856  variable->mode() != VAR);
4857  return Bailout("reference to uninitialized variable");
4858  }
4859  return ast_context()->ReturnValue(value);
4860  }
4861 
4862  case Variable::CONTEXT: {
4863  HValue* context = BuildContextChainWalk(variable);
4864  HLoadContextSlot* instr = new(zone()) HLoadContextSlot(context, variable);
4865  return ast_context()->ReturnInstruction(instr, expr->id());
4866  }
4867 
4868  case Variable::LOOKUP:
4869  return Bailout("reference to a variable which requires dynamic lookup");
4870  }
4871 }
4872 
4873 
4874 void HGraphBuilder::VisitLiteral(Literal* expr) {
4875  ASSERT(!HasStackOverflow());
4876  ASSERT(current_block() != NULL);
4877  ASSERT(current_block()->HasPredecessor());
4878  HConstant* instr =
4879  new(zone()) HConstant(expr->handle(), Representation::Tagged());
4880  return ast_context()->ReturnInstruction(instr, expr->id());
4881 }
4882 
4883 
4884 void HGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
4885  ASSERT(!HasStackOverflow());
4886  ASSERT(current_block() != NULL);
4887  ASSERT(current_block()->HasPredecessor());
4888  Handle<JSFunction> closure = function_state()->compilation_info()->closure();
4889  Handle<FixedArray> literals(closure->literals());
4890  HValue* context = environment()->LookupContext();
4891 
4892  HRegExpLiteral* instr = new(zone()) HRegExpLiteral(context,
4893  literals,
4894  expr->pattern(),
4895  expr->flags(),
4896  expr->literal_index());
4897  return ast_context()->ReturnInstruction(instr, expr->id());
4898 }
4899 
4900 
4901 static void LookupInPrototypes(Handle<Map> map,
4902  Handle<String> name,
4903  LookupResult* lookup) {
4904  while (map->prototype()->IsJSObject()) {
4905  Handle<JSObject> holder(JSObject::cast(map->prototype()));
4906  if (!holder->HasFastProperties()) break;
4907  map = Handle<Map>(holder->map());
4908  map->LookupDescriptor(*holder, *name, lookup);
4909  if (lookup->IsFound()) return;
4910  }
4911  lookup->NotFound();
4912 }
4913 
4914 
4915 // Tries to find a JavaScript accessor of the given name in the prototype chain
4916 // starting at the given map. Return true iff there is one, including the
4917 // corresponding AccessorPair plus its holder (which could be null when the
4918 // accessor is found directly in the given map).
4919 static bool LookupAccessorPair(Handle<Map> map,
4920  Handle<String> name,
4921  Handle<AccessorPair>* accessors,
4922  Handle<JSObject>* holder) {
4923  LookupResult lookup(map->GetIsolate());
4924 
4925  // Check for a JavaScript accessor directly in the map.
4926  map->LookupDescriptor(NULL, *name, &lookup);
4927  if (lookup.IsPropertyCallbacks()) {
4928  Handle<Object> callback(lookup.GetValueFromMap(*map));
4929  if (!callback->IsAccessorPair()) return false;
4930  *accessors = Handle<AccessorPair>::cast(callback);
4931  *holder = Handle<JSObject>();
4932  return true;
4933  }
4934 
4935  // Everything else, e.g. a field, can't be an accessor call.
4936  if (lookup.IsFound()) return false;
4937 
4938  // Check for a JavaScript accessor somewhere in the proto chain.
4939  LookupInPrototypes(map, name, &lookup);
4940  if (lookup.IsPropertyCallbacks()) {
4941  Handle<Object> callback(lookup.GetValue());
4942  if (!callback->IsAccessorPair()) return false;
4943  *accessors = Handle<AccessorPair>::cast(callback);
4944  *holder = Handle<JSObject>(lookup.holder());
4945  return true;
4946  }
4947 
4948  // We haven't found a JavaScript accessor anywhere.
4949  return false;
4950 }
4951 
4952 
4953 static bool LookupGetter(Handle<Map> map,
4954  Handle<String> name,
4955  Handle<JSFunction>* getter,
4956  Handle<JSObject>* holder) {
4957  Handle<AccessorPair> accessors;
4958  if (LookupAccessorPair(map, name, &accessors, holder) &&
4959  accessors->getter()->IsJSFunction()) {
4960  *getter = Handle<JSFunction>(JSFunction::cast(accessors->getter()));
4961  return true;
4962  }
4963  return false;
4964 }
4965 
4966 
4967 static bool LookupSetter(Handle<Map> map,
4968  Handle<String> name,
4969  Handle<JSFunction>* setter,
4970  Handle<JSObject>* holder) {
4971  Handle<AccessorPair> accessors;
4972  if (LookupAccessorPair(map, name, &accessors, holder) &&
4973  accessors->setter()->IsJSFunction()) {
4974  *setter = Handle<JSFunction>(JSFunction::cast(accessors->setter()));
4975  return true;
4976  }
4977  return false;
4978 }
4979 
4980 
4981 // Determines whether the given array or object literal boilerplate satisfies
4982 // all limits to be considered for fast deep-copying and computes the total
4983 // size of all objects that are part of the graph.
4984 static bool IsFastLiteral(Handle<JSObject> boilerplate,
4985  int max_depth,
4986  int* max_properties,
4987  int* total_size) {
4988  ASSERT(max_depth >= 0 && *max_properties >= 0);
4989  if (max_depth == 0) return false;
4990 
4991  Handle<FixedArrayBase> elements(boilerplate->elements());
4992  if (elements->length() > 0 &&
4993  elements->map() != boilerplate->GetHeap()->fixed_cow_array_map()) {
4994  if (boilerplate->HasFastDoubleElements()) {
4995  *total_size += FixedDoubleArray::SizeFor(elements->length());
4996  } else if (boilerplate->HasFastObjectElements()) {
4997  Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
4998  int length = elements->length();
4999  for (int i = 0; i < length; i++) {
5000  if ((*max_properties)-- == 0) return false;
5001  Handle<Object> value(fast_elements->get(i));
5002  if (value->IsJSObject()) {
5003  Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5004  if (!IsFastLiteral(value_object,
5005  max_depth - 1,
5006  max_properties,
5007  total_size)) {
5008  return false;
5009  }
5010  }
5011  }
5012  *total_size += FixedArray::SizeFor(length);
5013  } else {
5014  return false;
5015  }
5016  }
5017 
5018  Handle<FixedArray> properties(boilerplate->properties());
5019  if (properties->length() > 0) {
5020  return false;
5021  } else {
5022  int nof = boilerplate->map()->inobject_properties();
5023  for (int i = 0; i < nof; i++) {
5024  if ((*max_properties)-- == 0) return false;
5025  Handle<Object> value(boilerplate->InObjectPropertyAt(i));
5026  if (value->IsJSObject()) {
5027  Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5028  if (!IsFastLiteral(value_object,
5029  max_depth - 1,
5030  max_properties,
5031  total_size)) {
5032  return false;
5033  }
5034  }
5035  }
5036  }
5037 
5038  *total_size += boilerplate->map()->instance_size();
5039  return true;
5040 }
5041 
5042 
5043 void HGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
5044  ASSERT(!HasStackOverflow());
5045  ASSERT(current_block() != NULL);
5046  ASSERT(current_block()->HasPredecessor());
5047  Handle<JSFunction> closure = function_state()->compilation_info()->closure();
5048  HValue* context = environment()->LookupContext();
5049  HInstruction* literal;
5050 
5051  // Check whether to use fast or slow deep-copying for boilerplate.
5052  int total_size = 0;
5053  int max_properties = HFastLiteral::kMaxLiteralProperties;
5054  Handle<Object> boilerplate(closure->literals()->get(expr->literal_index()));
5055  if (boilerplate->IsJSObject() &&
5056  IsFastLiteral(Handle<JSObject>::cast(boilerplate),
5058  &max_properties,
5059  &total_size)) {
5060  Handle<JSObject> boilerplate_object = Handle<JSObject>::cast(boilerplate);
5061  literal = new(zone()) HFastLiteral(context,
5062  boilerplate_object,
5063  total_size,
5064  expr->literal_index(),
5065  expr->depth());
5066  } else {
5067  literal = new(zone()) HObjectLiteral(context,
5068  expr->constant_properties(),
5069  expr->fast_elements(),
5070  expr->literal_index(),
5071  expr->depth(),
5072  expr->has_function());
5073  }
5074 
5075  // The object is expected in the bailout environment during computation
5076  // of the property values and is the value of the entire expression.
5077  PushAndAdd(literal);
5078 
5079  expr->CalculateEmitStore(zone());
5080 
5081  for (int i = 0; i < expr->properties()->length(); i++) {
5082  ObjectLiteral::Property* property = expr->properties()->at(i);
5083  if (property->IsCompileTimeValue()) continue;
5084 
5085  Literal* key = property->key();
5086  Expression* value = property->value();
5087 
5088  switch (property->kind()) {
5091  // Fall through.
5093  if (key->handle()->IsSymbol()) {
5094  if (property->emit_store()) {
5095  property->RecordTypeFeedback(oracle());
5096  CHECK_ALIVE(VisitForValue(value));
5097  HValue* value = Pop();
5098  Handle<Map> map = property->GetReceiverType();
5099  Handle<String> name = property->key()->AsPropertyName();
5100  HInstruction* store;
5101  if (map.is_null()) {
5102  // If we don't know the monomorphic type, do a generic store.
5103  CHECK_ALIVE(store = BuildStoreNamedGeneric(literal, name, value));
5104  } else {
5105 #if DEBUG
5106  Handle<JSFunction> setter;
5107  Handle<JSObject> holder;
5108  ASSERT(!LookupSetter(map, name, &setter, &holder));
5109 #endif
5110  CHECK_ALIVE(store = BuildStoreNamedMonomorphic(literal,
5111  name,
5112  value,
5113  map));
5114  }
5115  AddInstruction(store);
5116  if (store->HasObservableSideEffects()) AddSimulate(key->id());
5117  } else {
5118  CHECK_ALIVE(VisitForEffect(value));
5119  }
5120  break;
5121  }
5122  // Fall through.
5126  return Bailout("Object literal with complex property");
5127  default: UNREACHABLE();
5128  }
5129  }
5130 
5131  if (expr->has_function()) {
5132  // Return the result of the transformation to fast properties
5133  // instead of the original since this operation changes the map
5134  // of the object. This makes sure that the original object won't
5135  // be used by other optimized code before it is transformed
5136  // (e.g. because of code motion).
5137  HToFastProperties* result = new(zone()) HToFastProperties(Pop());
5138  AddInstruction(result);
5139  return ast_context()->ReturnValue(result);
5140  } else {
5141  return ast_context()->ReturnValue(Pop());
5142  }
5143 }
5144 
5145 
5146 void HGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
5147  ASSERT(!HasStackOverflow());
5148  ASSERT(current_block() != NULL);
5149  ASSERT(current_block()->HasPredecessor());
5150  ZoneList<Expression*>* subexprs = expr->values();
5151  int length = subexprs->length();
5152  HValue* context = environment()->LookupContext();
5153  HInstruction* literal;
5154 
5155  Handle<FixedArray> literals(environment()->closure()->literals());
5156  Handle<Object> raw_boilerplate(literals->get(expr->literal_index()));
5157 
5158  if (raw_boilerplate->IsUndefined()) {
5159  raw_boilerplate = Runtime::CreateArrayLiteralBoilerplate(
5160  isolate(), literals, expr->constant_elements());
5161  if (raw_boilerplate.is_null()) {
5162  return Bailout("array boilerplate creation failed");
5163  }
5164  literals->set(expr->literal_index(), *raw_boilerplate);
5165  if (JSObject::cast(*raw_boilerplate)->elements()->map() ==
5166  isolate()->heap()->fixed_cow_array_map()) {
5167  isolate()->counters()->cow_arrays_created_runtime()->Increment();
5168  }
5169  }
5170 
5171  Handle<JSObject> boilerplate = Handle<JSObject>::cast(raw_boilerplate);
5172  ElementsKind boilerplate_elements_kind =
5173  Handle<JSObject>::cast(boilerplate)->GetElementsKind();
5174 
5175  // Check whether to use fast or slow deep-copying for boilerplate.
5176  int total_size = 0;
5177  int max_properties = HFastLiteral::kMaxLiteralProperties;
5178  if (IsFastLiteral(boilerplate,
5180  &max_properties,
5181  &total_size)) {
5182  literal = new(zone()) HFastLiteral(context,
5183  boilerplate,
5184  total_size,
5185  expr->literal_index(),
5186  expr->depth());
5187  } else {
5188  literal = new(zone()) HArrayLiteral(context,
5189  boilerplate,
5190  length,
5191  expr->literal_index(),
5192  expr->depth());
5193  }
5194 
5195  // The array is expected in the bailout environment during computation
5196  // of the property values and is the value of the entire expression.
5197  PushAndAdd(literal);
5198 
5199  HLoadElements* elements = NULL;
5200 
5201  for (int i = 0; i < length; i++) {
5202  Expression* subexpr = subexprs->at(i);
5203  // If the subexpression is a literal or a simple materialized literal it
5204  // is already set in the cloned array.
5205  if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
5206 
5207  CHECK_ALIVE(VisitForValue(subexpr));
5208  HValue* value = Pop();
5209  if (!Smi::IsValid(i)) return Bailout("Non-smi key in array literal");
5210 
5211  // Pass in literal as dummy depedency, since the receiver always has
5212  // elements.
5213  elements = new(zone()) HLoadElements(literal, literal);
5214  AddInstruction(elements);
5215 
5216  HValue* key = AddInstruction(
5217  new(zone()) HConstant(Handle<Object>(Smi::FromInt(i)),
5219 
5220  switch (boilerplate_elements_kind) {
5221  case FAST_SMI_ELEMENTS:
5223  // Smi-only arrays need a smi check.
5224  AddInstruction(new(zone()) HCheckSmi(value));
5225  // Fall through.
5226  case FAST_ELEMENTS:
5227  case FAST_HOLEY_ELEMENTS:
5228  AddInstruction(new(zone()) HStoreKeyedFastElement(
5229  elements,
5230  key,
5231  value,
5232  boilerplate_elements_kind));
5233  break;
5234  case FAST_DOUBLE_ELEMENTS:
5236  AddInstruction(new(zone()) HStoreKeyedFastDoubleElement(elements,
5237  key,
5238  value));
5239  break;
5240  default:
5241  UNREACHABLE();
5242  break;
5243  }
5244 
5245  AddSimulate(expr->GetIdForElement(i));
5246  }
5247  return ast_context()->ReturnValue(Pop());
5248 }
5249 
5250 
5251 // Sets the lookup result and returns true if the load/store can be inlined.
5252 static bool ComputeLoadStoreField(Handle<Map> type,
5253  Handle<String> name,
5254  LookupResult* lookup,
5255  bool is_store) {
5256  // If we directly find a field, the access can be inlined.
5257  type->LookupDescriptor(NULL, *name, lookup);
5258  if (lookup->IsField()) return true;
5259 
5260  // For a load, we are out of luck if there is no such field.
5261  if (!is_store) return false;
5262 
5263  // 2nd chance: A store into a non-existent field can still be inlined if we
5264  // have a matching transition and some room left in the object.
5265  type->LookupTransition(NULL, *name, lookup);
5266  return lookup->IsTransitionToField(*type) &&
5267  (type->unused_property_fields() > 0);
5268 }
5269 
5270 
5271 static int ComputeLoadStoreFieldIndex(Handle<Map> type,
5272  Handle<String> name,
5273  LookupResult* lookup) {
5274  ASSERT(lookup->IsField() || lookup->IsTransitionToField(*type));
5275  if (lookup->IsField()) {
5276  return lookup->GetLocalFieldIndexFromMap(*type);
5277  } else {
5278  Map* transition = lookup->GetTransitionMapFromMap(*type);
5279  return transition->PropertyIndexFor(*name) - type->inobject_properties();
5280  }
5281 }
5282 
5283 
5284 HInstruction* HGraphBuilder::BuildStoreNamedField(HValue* object,
5285  Handle<String> name,
5286  HValue* value,
5287  Handle<Map> map,
5288  LookupResult* lookup,
5289  bool smi_and_map_check) {
5290  ASSERT(lookup->IsFound());
5291  if (smi_and_map_check) {
5292  AddInstruction(new(zone()) HCheckNonSmi(object));
5293  AddInstruction(HCheckMaps::NewWithTransitions(object, map, zone()));
5294  }
5295 
5296  // If the property does not exist yet, we have to check that it wasn't made
5297  // readonly or turned into a setter by some meanwhile modifications on the
5298  // prototype chain.
5299  if (!lookup->IsProperty() && map->prototype()->IsJSReceiver()) {
5300  Object* proto = map->prototype();
5301  // First check that the prototype chain isn't affected already.
5302  LookupResult proto_result(isolate());
5303  proto->Lookup(*name, &proto_result);
5304  if (proto_result.IsProperty()) {
5305  // If the inherited property could induce readonly-ness, bail out.
5306  if (proto_result.IsReadOnly() || !proto_result.IsCacheable()) {
5307  Bailout("improper object on prototype chain for store");
5308  return NULL;
5309  }
5310  // We only need to check up to the preexisting property.
5311  proto = proto_result.holder();
5312  } else {
5313  // Otherwise, find the top prototype.
5314  while (proto->GetPrototype()->IsJSObject()) proto = proto->GetPrototype();
5315  ASSERT(proto->GetPrototype()->IsNull());
5316  }
5317  ASSERT(proto->IsJSObject());
5318  AddInstruction(new(zone()) HCheckPrototypeMaps(
5319  Handle<JSObject>(JSObject::cast(map->prototype())),
5320  Handle<JSObject>(JSObject::cast(proto))));
5321  }
5322 
5323  int index = ComputeLoadStoreFieldIndex(map, name, lookup);
5324  bool is_in_object = index < 0;
5325  int offset = index * kPointerSize;
5326  if (index < 0) {
5327  // Negative property indices are in-object properties, indexed
5328  // from the end of the fixed part of the object.
5329  offset += map->instance_size();
5330  } else {
5331  offset += FixedArray::kHeaderSize;
5332  }
5333  HStoreNamedField* instr =
5334  new(zone()) HStoreNamedField(object, name, value, is_in_object, offset);
5335  if (lookup->IsTransitionToField(*map)) {
5336  Handle<Map> transition(lookup->GetTransitionMapFromMap(*map));
5337  instr->set_transition(transition);
5338  // TODO(fschneider): Record the new map type of the object in the IR to
5339  // enable elimination of redundant checks after the transition store.
5340  instr->SetGVNFlag(kChangesMaps);
5341  }
5342  return instr;
5343 }
5344 
5345 
5346 HInstruction* HGraphBuilder::BuildStoreNamedGeneric(HValue* object,
5347  Handle<String> name,
5348  HValue* value) {
5349  HValue* context = environment()->LookupContext();
5350  return new(zone()) HStoreNamedGeneric(
5351  context,
5352  object,
5353  name,
5354  value,
5355  function_strict_mode_flag());
5356 }
5357 
5358 
5359 HInstruction* HGraphBuilder::BuildCallSetter(HValue* object,
5360  HValue* value,
5361  Handle<Map> map,
5362  Handle<JSFunction> setter,
5363  Handle<JSObject> holder) {
5364  AddCheckConstantFunction(holder, object, map, true);
5365  AddInstruction(new(zone()) HPushArgument(object));
5366  AddInstruction(new(zone()) HPushArgument(value));
5367  return new(zone()) HCallConstantFunction(setter, 2);
5368 }
5369 
5370 
5371 HInstruction* HGraphBuilder::BuildStoreNamedMonomorphic(HValue* object,
5372  Handle<String> name,
5373  HValue* value,
5374  Handle<Map> map) {
5375  // Handle a store to a known field.
5376  LookupResult lookup(isolate());
5377  if (ComputeLoadStoreField(map, name, &lookup, true)) {
5378  // true = needs smi and map check.
5379  return BuildStoreNamedField(object, name, value, map, &lookup, true);
5380  }
5381 
5382  // No luck, do a generic store.
5383  return BuildStoreNamedGeneric(object, name, value);
5384 }
5385 
5386 
5387 void HGraphBuilder::HandlePolymorphicLoadNamedField(Property* expr,
5388  HValue* object,
5389  SmallMapList* types,
5390  Handle<String> name) {
5391  int count = 0;
5392  int previous_field_offset = 0;
5393  bool previous_field_is_in_object = false;
5394  bool is_monomorphic_field = true;
5395  Handle<Map> map;
5396  LookupResult lookup(isolate());
5397  for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) {
5398  map = types->at(i);
5399  if (ComputeLoadStoreField(map, name, &lookup, false)) {
5400  int index = ComputeLoadStoreFieldIndex(map, name, &lookup);
5401  bool is_in_object = index < 0;
5402  int offset = index * kPointerSize;
5403  if (index < 0) {
5404  // Negative property indices are in-object properties, indexed
5405  // from the end of the fixed part of the object.
5406  offset += map->instance_size();
5407  } else {
5408  offset += FixedArray::kHeaderSize;
5409  }
5410  if (count == 0) {
5411  previous_field_offset = offset;
5412  previous_field_is_in_object = is_in_object;
5413  } else if (is_monomorphic_field) {
5414  is_monomorphic_field = (offset == previous_field_offset) &&
5415  (is_in_object == previous_field_is_in_object);
5416  }
5417  ++count;
5418  }
5419  }
5420 
5421  // Use monomorphic load if property lookup results in the same field index
5422  // for all maps. Requires special map check on the set of all handled maps.
5423  AddInstruction(new(zone()) HCheckNonSmi(object));
5424  HInstruction* instr;
5425  if (count == types->length() && is_monomorphic_field) {
5426  AddInstruction(new(zone()) HCheckMaps(object, types, zone()));
5427  instr = BuildLoadNamedField(object, map, &lookup, false);
5428  } else {
5429  HValue* context = environment()->LookupContext();
5430  instr = new(zone()) HLoadNamedFieldPolymorphic(context,
5431  object,
5432  types,
5433  name,
5434  zone());
5435  }
5436 
5437  instr->set_position(expr->position());
5438  return ast_context()->ReturnInstruction(instr, expr->id());
5439 }
5440 
5441 
5442 void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
5443  HValue* object,
5444  HValue* value,
5445  SmallMapList* types,
5446  Handle<String> name) {
5447  // TODO(ager): We should recognize when the prototype chains for different
5448  // maps are identical. In that case we can avoid repeatedly generating the
5449  // same prototype map checks.
5450  int count = 0;
5451  HBasicBlock* join = NULL;
5452  for (int i = 0; i < types->length() && count < kMaxStorePolymorphism; ++i) {
5453  Handle<Map> map = types->at(i);
5454  LookupResult lookup(isolate());
5455  if (ComputeLoadStoreField(map, name, &lookup, true)) {
5456  if (count == 0) {
5457  AddInstruction(new(zone()) HCheckNonSmi(object)); // Only needed once.
5458  join = graph()->CreateBasicBlock();
5459  }
5460  ++count;
5461  HBasicBlock* if_true = graph()->CreateBasicBlock();
5462  HBasicBlock* if_false = graph()->CreateBasicBlock();
5463  HCompareMap* compare =
5464  new(zone()) HCompareMap(object, map, if_true, if_false);
5465  current_block()->Finish(compare);
5466 
5467  set_current_block(if_true);
5468  HInstruction* instr;
5469  CHECK_ALIVE(instr =
5470  BuildStoreNamedField(object, name, value, map, &lookup, false));
5471  instr->set_position(expr->position());
5472  // Goto will add the HSimulate for the store.
5473  AddInstruction(instr);
5474  if (!ast_context()->IsEffect()) Push(value);
5475  current_block()->Goto(join);
5476 
5477  set_current_block(if_false);
5478  }
5479  }
5480 
5481  // Finish up. Unconditionally deoptimize if we've handled all the maps we
5482  // know about and do not want to handle ones we've never seen. Otherwise
5483  // use a generic IC.
5484  if (count == types->length() && FLAG_deoptimize_uncommon_cases) {
5486  } else {
5487  HInstruction* instr = BuildStoreNamedGeneric(object, name, value);
5488  instr->set_position(expr->position());
5489  AddInstruction(instr);
5490 
5491  if (join != NULL) {
5492  if (!ast_context()->IsEffect()) Push(value);
5493  current_block()->Goto(join);
5494  } else {
5495  // The HSimulate for the store should not see the stored value in
5496  // effect contexts (it is not materialized at expr->id() in the
5497  // unoptimized code).
5498  if (instr->HasObservableSideEffects()) {
5499  if (ast_context()->IsEffect()) {
5500  AddSimulate(expr->id());
5501  } else {
5502  Push(value);
5503  AddSimulate(expr->id());
5504  Drop(1);
5505  }
5506  }
5507  return ast_context()->ReturnValue(value);
5508  }
5509  }
5510 
5511  ASSERT(join != NULL);
5512  join->SetJoinId(expr->id());
5513  set_current_block(join);
5514  if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop());
5515 }
5516 
5517 
5518 void HGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
5519  Property* prop = expr->target()->AsProperty();
5520  ASSERT(prop != NULL);
5521  expr->RecordTypeFeedback(oracle(), zone());
5522  CHECK_ALIVE(VisitForValue(prop->obj()));
5523 
5524  if (prop->key()->IsPropertyName()) {
5525  // Named store.
5526  CHECK_ALIVE(VisitForValue(expr->value()));
5527  HValue* value = environment()->ExpressionStackAt(0);
5528  HValue* object = environment()->ExpressionStackAt(1);
5529 
5530  Literal* key = prop->key()->AsLiteral();
5531  Handle<String> name = Handle<String>::cast(key->handle());
5532  ASSERT(!name.is_null());
5533 
5534  HInstruction* instr = NULL;
5535  SmallMapList* types = expr->GetReceiverTypes();
5536  bool monomorphic = expr->IsMonomorphic();
5537  Handle<Map> map;
5538  if (monomorphic) {
5539  map = types->first();
5540  if (map->is_dictionary_map()) monomorphic = false;
5541  }
5542  if (monomorphic) {
5543  Handle<JSFunction> setter;
5544  Handle<JSObject> holder;
5545  if (LookupSetter(map, name, &setter, &holder)) {
5546  AddCheckConstantFunction(holder, object, map, true);
5547  if (FLAG_inline_accessors && TryInlineSetter(setter, expr, value)) {
5548  return;
5549  }
5550  Drop(2);
5551  AddInstruction(new(zone()) HPushArgument(object));
5552  AddInstruction(new(zone()) HPushArgument(value));
5553  instr = new(zone()) HCallConstantFunction(setter, 2);
5554  } else {
5555  Drop(2);
5556  CHECK_ALIVE(instr = BuildStoreNamedMonomorphic(object,
5557  name,
5558  value,
5559  map));
5560  }
5561 
5562  } else if (types != NULL && types->length() > 1) {
5563  Drop(2);
5564  return HandlePolymorphicStoreNamedField(expr, object, value, types, name);
5565  } else {
5566  Drop(2);
5567  instr = BuildStoreNamedGeneric(object, name, value);
5568  }
5569 
5570  Push(value);
5571  instr->set_position(expr->position());
5572  AddInstruction(instr);
5573  if (instr->HasObservableSideEffects()) AddSimulate(expr->AssignmentId());
5574  return ast_context()->ReturnValue(Pop());
5575 
5576  } else {
5577  // Keyed store.
5578  CHECK_ALIVE(VisitForValue(prop->key()));
5579  CHECK_ALIVE(VisitForValue(expr->value()));
5580  HValue* value = Pop();
5581  HValue* key = Pop();
5582  HValue* object = Pop();
5583  bool has_side_effects = false;
5584  HandleKeyedElementAccess(object, key, value, expr, expr->AssignmentId(),
5585  expr->position(),
5586  true, // is_store
5587  &has_side_effects);
5588  Push(value);
5589  ASSERT(has_side_effects); // Stores always have side effects.
5590  AddSimulate(expr->AssignmentId());
5591  return ast_context()->ReturnValue(Pop());
5592  }
5593 }
5594 
5595 
5596 // Because not every expression has a position and there is not common
5597 // superclass of Assignment and CountOperation, we cannot just pass the
5598 // owning expression instead of position and ast_id separately.
5599 void HGraphBuilder::HandleGlobalVariableAssignment(Variable* var,
5600  HValue* value,
5601  int position,
5602  BailoutId ast_id) {
5603  LookupResult lookup(isolate());
5604  GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, true);
5605  if (type == kUseCell) {
5606  Handle<GlobalObject> global(info()->global_object());
5607  Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(&lookup));
5608  HInstruction* instr =
5609  new(zone()) HStoreGlobalCell(value, cell, lookup.GetPropertyDetails());
5610  instr->set_position(position);
5611  AddInstruction(instr);
5612  if (instr->HasObservableSideEffects()) AddSimulate(ast_id);
5613  } else {
5614  HValue* context = environment()->LookupContext();
5615  HGlobalObject* global_object = new(zone()) HGlobalObject(context);
5616  AddInstruction(global_object);
5617  HStoreGlobalGeneric* instr =
5618  new(zone()) HStoreGlobalGeneric(context,
5619  global_object,
5620  var->name(),
5621  value,
5622  function_strict_mode_flag());
5623  instr->set_position(position);
5624  AddInstruction(instr);
5625  ASSERT(instr->HasObservableSideEffects());
5626  if (instr->HasObservableSideEffects()) AddSimulate(ast_id);
5627  }
5628 }
5629 
5630 
5631 void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
5632  Expression* target = expr->target();
5633  VariableProxy* proxy = target->AsVariableProxy();
5634  Property* prop = target->AsProperty();
5635  ASSERT(proxy == NULL || prop == NULL);
5636 
5637  // We have a second position recorded in the FullCodeGenerator to have
5638  // type feedback for the binary operation.
5639  BinaryOperation* operation = expr->binary_operation();
5640 
5641  if (proxy != NULL) {
5642  Variable* var = proxy->var();
5643  if (var->mode() == LET) {
5644  return Bailout("unsupported let compound assignment");
5645  }
5646 
5647  CHECK_ALIVE(VisitForValue(operation));
5648 
5649  switch (var->location()) {
5650  case Variable::UNALLOCATED:
5651  HandleGlobalVariableAssignment(var,
5652  Top(),
5653  expr->position(),
5654  expr->AssignmentId());
5655  break;
5656 
5657  case Variable::PARAMETER:
5658  case Variable::LOCAL:
5659  if (var->mode() == CONST) {
5660  return Bailout("unsupported const compound assignment");
5661  }
5662  Bind(var, Top());
5663  break;
5664 
5665  case Variable::CONTEXT: {
5666  // Bail out if we try to mutate a parameter value in a function
5667  // using the arguments object. We do not (yet) correctly handle the
5668  // arguments property of the function.
5669  if (info()->scope()->arguments() != NULL) {
5670  // Parameters will be allocated to context slots. We have no
5671  // direct way to detect that the variable is a parameter so we do
5672  // a linear search of the parameter variables.
5673  int count = info()->scope()->num_parameters();
5674  for (int i = 0; i < count; ++i) {
5675  if (var == info()->scope()->parameter(i)) {
5676  Bailout(
5677  "assignment to parameter, function uses arguments object");
5678  }
5679  }
5680  }
5681 
5683 
5684  switch (var->mode()) {
5685  case LET:
5687  break;
5688  case CONST:
5689  return ast_context()->ReturnValue(Pop());
5690  case CONST_HARMONY:
5691  // This case is checked statically so no need to
5692  // perform checks here
5693  UNREACHABLE();
5694  default:
5696  }
5697 
5698  HValue* context = BuildContextChainWalk(var);
5699  HStoreContextSlot* instr =
5700  new(zone()) HStoreContextSlot(context, var->index(), mode, Top());
5701  AddInstruction(instr);
5702  if (instr->HasObservableSideEffects()) {
5703  AddSimulate(expr->AssignmentId());
5704  }
5705  break;
5706  }
5707 
5708  case Variable::LOOKUP:
5709  return Bailout("compound assignment to lookup slot");
5710  }
5711  return ast_context()->ReturnValue(Pop());
5712 
5713  } else if (prop != NULL) {
5714  prop->RecordTypeFeedback(oracle(), zone());
5715 
5716  if (prop->key()->IsPropertyName()) {
5717  // Named property.
5718  CHECK_ALIVE(VisitForValue(prop->obj()));
5719  HValue* object = Top();
5720 
5721  Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
5722  Handle<Map> map;
5723  HInstruction* load;
5724  bool monomorphic = prop->IsMonomorphic();
5725  if (monomorphic) {
5726  map = prop->GetReceiverTypes()->first();
5727  // We can't generate code for a monomorphic dict mode load so
5728  // just pretend it is not monomorphic.
5729  if (map->is_dictionary_map()) monomorphic = false;
5730  }
5731  if (monomorphic) {
5732  Handle<JSFunction> getter;
5733  Handle<JSObject> holder;
5734  if (LookupGetter(map, name, &getter, &holder)) {
5735  load = BuildCallGetter(object, map, getter, holder);
5736  } else {
5737  load = BuildLoadNamedMonomorphic(object, name, prop, map);
5738  }
5739  } else {
5740  load = BuildLoadNamedGeneric(object, name, prop);
5741  }
5742  PushAndAdd(load);
5743  if (load->HasObservableSideEffects()) AddSimulate(prop->LoadId());
5744 
5745  CHECK_ALIVE(VisitForValue(expr->value()));
5746  HValue* right = Pop();
5747  HValue* left = Pop();
5748 
5749  HInstruction* instr = BuildBinaryOperation(operation, left, right);
5750  PushAndAdd(instr);
5751  if (instr->HasObservableSideEffects()) AddSimulate(operation->id());
5752 
5753  HInstruction* store;
5754  if (!monomorphic) {
5755  // If we don't know the monomorphic type, do a generic store.
5756  CHECK_ALIVE(store = BuildStoreNamedGeneric(object, name, instr));
5757  } else {
5758  Handle<JSFunction> setter;
5759  Handle<JSObject> holder;
5760  if (LookupSetter(map, name, &setter, &holder)) {
5761  store = BuildCallSetter(object, instr, map, setter, holder);
5762  } else {
5763  CHECK_ALIVE(store = BuildStoreNamedMonomorphic(object,
5764  name,
5765  instr,
5766  map));
5767  }
5768  }
5769  AddInstruction(store);
5770  // Drop the simulated receiver and value. Return the value.
5771  Drop(2);
5772  Push(instr);
5773  if (store->HasObservableSideEffects()) AddSimulate(expr->AssignmentId());
5774  return ast_context()->ReturnValue(Pop());
5775 
5776  } else {
5777  // Keyed property.
5778  CHECK_ALIVE(VisitForValue(prop->obj()));
5779  CHECK_ALIVE(VisitForValue(prop->key()));
5780  HValue* obj = environment()->ExpressionStackAt(1);
5781  HValue* key = environment()->ExpressionStackAt(0);
5782 
5783  bool has_side_effects = false;
5784  HValue* load = HandleKeyedElementAccess(
5785  obj, key, NULL, prop, prop->LoadId(), RelocInfo::kNoPosition,
5786  false, // is_store
5787  &has_side_effects);
5788  Push(load);
5789  if (has_side_effects) AddSimulate(prop->LoadId());
5790 
5791 
5792  CHECK_ALIVE(VisitForValue(expr->value()));
5793  HValue* right = Pop();
5794  HValue* left = Pop();
5795 
5796  HInstruction* instr = BuildBinaryOperation(operation, left, right);
5797  PushAndAdd(instr);
5798  if (instr->HasObservableSideEffects()) AddSimulate(operation->id());
5799 
5800  expr->RecordTypeFeedback(oracle(), zone());
5801  HandleKeyedElementAccess(obj, key, instr, expr, expr->AssignmentId(),
5802  RelocInfo::kNoPosition,
5803  true, // is_store
5804  &has_side_effects);
5805 
5806  // Drop the simulated receiver, key, and value. Return the value.
5807  Drop(3);
5808  Push(instr);
5809  ASSERT(has_side_effects); // Stores always have side effects.
5810  AddSimulate(expr->AssignmentId());
5811  return ast_context()->ReturnValue(Pop());
5812  }
5813 
5814  } else {
5815  return Bailout("invalid lhs in compound assignment");
5816  }
5817 }
5818 
5819 
5820 void HGraphBuilder::VisitAssignment(Assignment* expr) {
5821  ASSERT(!HasStackOverflow());
5822  ASSERT(current_block() != NULL);
5823  ASSERT(current_block()->HasPredecessor());
5824  VariableProxy* proxy = expr->target()->AsVariableProxy();
5825  Property* prop = expr->target()->AsProperty();
5826  ASSERT(proxy == NULL || prop == NULL);
5827 
5828  if (expr->is_compound()) {
5829  HandleCompoundAssignment(expr);
5830  return;
5831  }
5832 
5833  if (prop != NULL) {
5834  HandlePropertyAssignment(expr);
5835  } else if (proxy != NULL) {
5836  Variable* var = proxy->var();
5837 
5838  if (var->mode() == CONST) {
5839  if (expr->op() != Token::INIT_CONST) {
5840  CHECK_ALIVE(VisitForValue(expr->value()));
5841  return ast_context()->ReturnValue(Pop());
5842  }
5843 
5844  if (var->IsStackAllocated()) {
5845  // We insert a use of the old value to detect unsupported uses of const
5846  // variables (e.g. initialization inside a loop).
5847  HValue* old_value = environment()->Lookup(var);
5848  AddInstruction(new(zone()) HUseConst(old_value));
5849  }
5850  } else if (var->mode() == CONST_HARMONY) {
5851  if (expr->op() != Token::INIT_CONST_HARMONY) {
5852  return Bailout("non-initializer assignment to const");
5853  }
5854  }
5855 
5856  if (proxy->IsArguments()) return Bailout("assignment to arguments");
5857 
5858  // Handle the assignment.
5859  switch (var->location()) {
5860  case Variable::UNALLOCATED:
5861  CHECK_ALIVE(VisitForValue(expr->value()));
5862  HandleGlobalVariableAssignment(var,
5863  Top(),
5864  expr->position(),
5865  expr->AssignmentId());
5866  return ast_context()->ReturnValue(Pop());
5867 
5868  case Variable::PARAMETER:
5869  case Variable::LOCAL: {
5870  // Perform an initialization check for let declared variables
5871  // or parameters.
5872  if (var->mode() == LET && expr->op() == Token::ASSIGN) {
5873  HValue* env_value = environment()->Lookup(var);
5874  if (env_value == graph()->GetConstantHole()) {
5875  return Bailout("assignment to let variable before initialization");
5876  }
5877  }
5878  // We do not allow the arguments object to occur in a context where it
5879  // may escape, but assignments to stack-allocated locals are
5880  // permitted.
5881  CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED));
5882  HValue* value = Pop();
5883  Bind(var, value);
5884  return ast_context()->ReturnValue(value);
5885  }
5886 
5887  case Variable::CONTEXT: {
5888  // Bail out if we try to mutate a parameter value in a function using
5889  // the arguments object. We do not (yet) correctly handle the
5890  // arguments property of the function.
5891  if (info()->scope()->arguments() != NULL) {
5892  // Parameters will rewrite to context slots. We have no direct way
5893  // to detect that the variable is a parameter.
5894  int count = info()->scope()->num_parameters();
5895  for (int i = 0; i < count; ++i) {
5896  if (var == info()->scope()->parameter(i)) {
5897  return Bailout("assignment to parameter in arguments object");
5898  }
5899  }
5900  }
5901 
5902  CHECK_ALIVE(VisitForValue(expr->value()));
5904  if (expr->op() == Token::ASSIGN) {
5905  switch (var->mode()) {
5906  case LET:
5908  break;
5909  case CONST:
5910  return ast_context()->ReturnValue(Pop());
5911  case CONST_HARMONY:
5912  // This case is checked statically so no need to
5913  // perform checks here
5914  UNREACHABLE();
5915  default:
5917  }
5918  } else if (expr->op() == Token::INIT_VAR ||
5919  expr->op() == Token::INIT_LET ||
5920  expr->op() == Token::INIT_CONST_HARMONY) {
5922  } else {
5923  ASSERT(expr->op() == Token::INIT_CONST);
5924 
5926  }
5927 
5928  HValue* context = BuildContextChainWalk(var);
5929  HStoreContextSlot* instr = new(zone()) HStoreContextSlot(
5930  context, var->index(), mode, Top());
5931  AddInstruction(instr);
5932  if (instr->HasObservableSideEffects()) {
5933  AddSimulate(expr->AssignmentId());
5934  }
5935  return ast_context()->ReturnValue(Pop());
5936  }
5937 
5938  case Variable::LOOKUP:
5939  return Bailout("assignment to LOOKUP variable");
5940  }
5941  } else {
5942  return Bailout("invalid left-hand side in assignment");
5943  }
5944 }
5945 
5946 
5947 void HGraphBuilder::VisitThrow(Throw* expr) {
5948  ASSERT(!HasStackOverflow());
5949  ASSERT(current_block() != NULL);
5950  ASSERT(current_block()->HasPredecessor());
5951  // We don't optimize functions with invalid left-hand sides in
5952  // assignments, count operations, or for-in. Consequently throw can
5953  // currently only occur in an effect context.
5954  ASSERT(ast_context()->IsEffect());
5955  CHECK_ALIVE(VisitForValue(expr->exception()));
5956 
5957  HValue* context = environment()->LookupContext();
5958  HValue* value = environment()->Pop();
5959  HThrow* instr = new(zone()) HThrow(context, value);
5960  instr->set_position(expr->position());
5961  AddInstruction(instr);
5962  AddSimulate(expr->id());
5963  current_block()->FinishExit(new(zone()) HAbnormalExit);
5965 }
5966 
5967 
5968 HLoadNamedField* HGraphBuilder::BuildLoadNamedField(HValue* object,
5969  Handle<Map> map,
5970  LookupResult* lookup,
5971  bool smi_and_map_check) {
5972  if (smi_and_map_check) {
5973  AddInstruction(new(zone()) HCheckNonSmi(object));
5974  AddInstruction(HCheckMaps::NewWithTransitions(object, map, zone()));
5975  }
5976 
5977  int index = lookup->GetLocalFieldIndexFromMap(*map);
5978  if (index < 0) {
5979  // Negative property indices are in-object properties, indexed
5980  // from the end of the fixed part of the object.
5981  int offset = (index * kPointerSize) + map->instance_size();
5982  return new(zone()) HLoadNamedField(object, true, offset);
5983  } else {
5984  // Non-negative property indices are in the properties array.
5985  int offset = (index * kPointerSize) + FixedArray::kHeaderSize;
5986  return new(zone()) HLoadNamedField(object, false, offset);
5987  }
5988 }
5989 
5990 
5991 HInstruction* HGraphBuilder::BuildLoadNamedGeneric(HValue* object,
5992  Handle<String> name,
5993  Property* expr) {
5994  if (expr->IsUninitialized() && !FLAG_always_opt) {
5995  AddInstruction(new(zone()) HSoftDeoptimize);
5997  }
5998  HValue* context = environment()->LookupContext();
5999  return new(zone()) HLoadNamedGeneric(context, object, name);
6000 }
6001 
6002 
6003 HInstruction* HGraphBuilder::BuildCallGetter(HValue* object,
6004  Handle<Map> map,
6005  Handle<JSFunction> getter,
6006  Handle<JSObject> holder) {
6007  AddCheckConstantFunction(holder, object, map, true);
6008  AddInstruction(new(zone()) HPushArgument(object));
6009  return new(zone()) HCallConstantFunction(getter, 1);
6010 }
6011 
6012 
6013 HInstruction* HGraphBuilder::BuildLoadNamedMonomorphic(HValue* object,
6014  Handle<String> name,
6015  Property* expr,
6016  Handle<Map> map) {
6017  // Handle a load from a known field.
6018  ASSERT(!map->is_dictionary_map());
6019  LookupResult lookup(isolate());
6020  map->LookupDescriptor(NULL, *name, &lookup);
6021  if (lookup.IsField()) {
6022  return BuildLoadNamedField(object, map, &lookup, true);
6023  }
6024 
6025  // Handle a load of a constant known function.
6026  if (lookup.IsConstantFunction()) {
6027  AddInstruction(new(zone()) HCheckNonSmi(object));
6028  AddInstruction(HCheckMaps::NewWithTransitions(object, map, zone()));
6029  Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*map));
6030  return new(zone()) HConstant(function, Representation::Tagged());
6031  }
6032 
6033  // No luck, do a generic load.
6034  return BuildLoadNamedGeneric(object, name, expr);
6035 }
6036 
6037 
6038 HInstruction* HGraphBuilder::BuildLoadKeyedGeneric(HValue* object,
6039  HValue* key) {
6040  HValue* context = environment()->LookupContext();
6041  return new(zone()) HLoadKeyedGeneric(context, object, key);
6042 }
6043 
6044 
6045 HInstruction* HGraphBuilder::BuildExternalArrayElementAccess(
6046  HValue* external_elements,
6047  HValue* checked_key,
6048  HValue* val,
6049  HValue* dependency,
6050  ElementsKind elements_kind,
6051  bool is_store) {
6052  if (is_store) {
6053  ASSERT(val != NULL);
6054  switch (elements_kind) {
6055  case EXTERNAL_PIXEL_ELEMENTS: {
6056  val = AddInstruction(new(zone()) HClampToUint8(val));
6057  break;
6058  }
6063  case EXTERNAL_INT_ELEMENTS:
6065  if (!val->representation().IsInteger32()) {
6066  val = AddInstruction(new(zone()) HChange(
6067  val,
6069  true, // Truncate to int32.
6070  false)); // Don't deoptimize undefined (irrelevant here).
6071  }
6072  break;
6073  }
6076  break;
6077  case FAST_SMI_ELEMENTS:
6078  case FAST_ELEMENTS:
6079  case FAST_DOUBLE_ELEMENTS:
6081  case FAST_HOLEY_ELEMENTS:
6083  case DICTIONARY_ELEMENTS:
6085  UNREACHABLE();
6086  break;
6087  }
6088  return new(zone()) HStoreKeyedSpecializedArrayElement(
6089  external_elements, checked_key, val, elements_kind);
6090  } else {
6091  ASSERT(val == NULL);
6092  HLoadKeyedSpecializedArrayElement* load =
6093  new(zone()) HLoadKeyedSpecializedArrayElement(
6094  external_elements, checked_key, dependency, elements_kind);
6095  if (FLAG_opt_safe_uint32_operations &&
6096  elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) {
6097  graph()->RecordUint32Instruction(load);
6098  }
6099  return load;
6100  }
6101 }
6102 
6103 
6104 HInstruction* HGraphBuilder::BuildFastElementAccess(HValue* elements,
6105  HValue* checked_key,
6106  HValue* val,
6107  HValue* load_dependency,
6108  ElementsKind elements_kind,
6109  bool is_store) {
6110  if (is_store) {
6111  ASSERT(val != NULL);
6112  switch (elements_kind) {
6113  case FAST_DOUBLE_ELEMENTS:
6115  return new(zone()) HStoreKeyedFastDoubleElement(
6116  elements, checked_key, val);
6117  case FAST_SMI_ELEMENTS:
6119  // Smi-only arrays need a smi check.
6120  AddInstruction(new(zone()) HCheckSmi(val));
6121  // Fall through.
6122  case FAST_ELEMENTS:
6123  case FAST_HOLEY_ELEMENTS:
6124  return new(zone()) HStoreKeyedFastElement(
6125  elements, checked_key, val, elements_kind);
6126  default:
6127  UNREACHABLE();
6128  return NULL;
6129  }
6130  }
6131  // It's an element load (!is_store).
6132  HoleCheckMode mode = IsFastPackedElementsKind(elements_kind) ?
6133  OMIT_HOLE_CHECK :
6135  if (IsFastDoubleElementsKind(elements_kind)) {
6136  return new(zone()) HLoadKeyedFastDoubleElement(elements, checked_key,
6137  load_dependency, mode);
6138  } else { // Smi or Object elements.
6139  return new(zone()) HLoadKeyedFastElement(elements, checked_key,
6140  load_dependency, elements_kind);
6141  }
6142 }
6143 
6144 
6145 HInstruction* HGraphBuilder::BuildMonomorphicElementAccess(HValue* object,
6146  HValue* key,
6147  HValue* val,
6148  HValue* dependency,
6149  Handle<Map> map,
6150  bool is_store) {
6151  HCheckMaps* mapcheck = new(zone()) HCheckMaps(object, map,
6152  zone(), dependency);
6153  AddInstruction(mapcheck);
6154  if (dependency) {
6155  mapcheck->ClearGVNFlag(kDependsOnElementsKind);
6156  }
6157  return BuildUncheckedMonomorphicElementAccess(object, key, val,
6158  mapcheck, map, is_store);
6159 }
6160 
6161 
6162 HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
6163  HValue* object,
6164  HValue* key,
6165  HValue* val,
6166  HCheckMaps* mapcheck,
6167  Handle<Map> map,
6168  bool is_store) {
6169  // No GVNFlag is necessary for ElementsKind if there is an explicit dependency
6170  // on a HElementsTransition instruction. The flag can also be removed if the
6171  // map to check has FAST_HOLEY_ELEMENTS, since there can be no further
6172  // ElementsKind transitions. Finally, the dependency can be removed for stores
6173  // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the
6174  // generated store code.
6175  if ((map->elements_kind() == FAST_HOLEY_ELEMENTS) ||
6176  (map->elements_kind() == FAST_ELEMENTS && is_store)) {
6177  mapcheck->ClearGVNFlag(kDependsOnElementsKind);
6178  }
6179  bool fast_smi_only_elements = map->has_fast_smi_elements();
6180  bool fast_elements = map->has_fast_object_elements();
6181  HInstruction* elements =
6182  AddInstruction(new(zone()) HLoadElements(object, mapcheck));
6183  if (is_store && (fast_elements || fast_smi_only_elements)) {
6184  HCheckMaps* check_cow_map = new(zone()) HCheckMaps(
6185  elements, isolate()->factory()->fixed_array_map(), zone());
6186  check_cow_map->ClearGVNFlag(kDependsOnElementsKind);
6187  AddInstruction(check_cow_map);
6188  }
6189  HInstruction* length = NULL;
6190  HInstruction* checked_key = NULL;
6191  if (map->has_external_array_elements()) {
6192  length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
6193  checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length,
6194  ALLOW_SMI_KEY));
6195  HLoadExternalArrayPointer* external_elements =
6196  new(zone()) HLoadExternalArrayPointer(elements);
6197  AddInstruction(external_elements);
6198  return BuildExternalArrayElementAccess(
6199  external_elements, checked_key, val, mapcheck,
6200  map->elements_kind(), is_store);
6201  }
6202  ASSERT(fast_smi_only_elements ||
6203  fast_elements ||
6204  map->has_fast_double_elements());
6205  if (map->instance_type() == JS_ARRAY_TYPE) {
6206  length = AddInstruction(new(zone()) HJSArrayLength(object, mapcheck,
6207  HType::Smi()));
6208  } else {
6209  length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
6210  }
6211  checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length,
6212  ALLOW_SMI_KEY));
6213  return BuildFastElementAccess(elements, checked_key, val, mapcheck,
6214  map->elements_kind(), is_store);
6215 }
6216 
6217 
6218 HInstruction* HGraphBuilder::TryBuildConsolidatedElementLoad(
6219  HValue* object,
6220  HValue* key,
6221  HValue* val,
6222  SmallMapList* maps) {
6223  // For polymorphic loads of similar elements kinds (i.e. all tagged or all
6224  // double), always use the "worst case" code without a transition. This is
6225  // much faster than transitioning the elements to the worst case, trading a
6226  // HTransitionElements for a HCheckMaps, and avoiding mutation of the array.
6227  bool has_double_maps = false;
6228  bool has_smi_or_object_maps = false;
6229  bool has_js_array_access = false;
6230  bool has_non_js_array_access = false;
6231  Handle<Map> most_general_consolidated_map;
6232  for (int i = 0; i < maps->length(); ++i) {
6233  Handle<Map> map = maps->at(i);
6234  // Don't allow mixing of JSArrays with JSObjects.
6235  if (map->instance_type() == JS_ARRAY_TYPE) {
6236  if (has_non_js_array_access) return NULL;
6237  has_js_array_access = true;
6238  } else if (has_js_array_access) {
6239  return NULL;
6240  } else {
6241  has_non_js_array_access = true;
6242  }
6243  // Don't allow mixed, incompatible elements kinds.
6244  if (map->has_fast_double_elements()) {
6245  if (has_smi_or_object_maps) return NULL;
6246  has_double_maps = true;
6247  } else if (map->has_fast_smi_or_object_elements()) {
6248  if (has_double_maps) return NULL;
6249  has_smi_or_object_maps = true;
6250  } else {
6251  return NULL;
6252  }
6253  // Remember the most general elements kind, the code for its load will
6254  // properly handle all of the more specific cases.
6255  if ((i == 0) || IsMoreGeneralElementsKindTransition(
6256  most_general_consolidated_map->elements_kind(),
6257  map->elements_kind())) {
6258  most_general_consolidated_map = map;
6259  }
6260  }
6261  if (!has_double_maps && !has_smi_or_object_maps) return NULL;
6262 
6263  HCheckMaps* check_maps = new(zone()) HCheckMaps(object, maps, zone());
6264  AddInstruction(check_maps);
6265  HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
6266  object, key, val, check_maps, most_general_consolidated_map, false);
6267  return instr;
6268 }
6269 
6270 
6271 HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
6272  HValue* key,
6273  HValue* val,
6274  Expression* prop,
6275  BailoutId ast_id,
6276  int position,
6277  bool is_store,
6278  bool* has_side_effects) {
6279  *has_side_effects = false;
6280  AddInstruction(new(zone()) HCheckNonSmi(object));
6281  SmallMapList* maps = prop->GetReceiverTypes();
6282  bool todo_external_array = false;
6283 
6284  if (!is_store) {
6285  HInstruction* consolidated_load =
6286  TryBuildConsolidatedElementLoad(object, key, val, maps);
6287  if (consolidated_load != NULL) {
6288  AddInstruction(consolidated_load);
6289  *has_side_effects |= consolidated_load->HasObservableSideEffects();
6290  if (position != RelocInfo::kNoPosition) {
6291  consolidated_load->set_position(position);
6292  }
6293  return consolidated_load;
6294  }
6295  }
6296 
6297  static const int kNumElementTypes = kElementsKindCount;
6298  bool type_todo[kNumElementTypes];
6299  for (int i = 0; i < kNumElementTypes; ++i) {
6300  type_todo[i] = false;
6301  }
6302 
6303  // Elements_kind transition support.
6304  MapHandleList transition_target(maps->length());
6305  // Collect possible transition targets.
6306  MapHandleList possible_transitioned_maps(maps->length());
6307  for (int i = 0; i < maps->length(); ++i) {
6308  Handle<Map> map = maps->at(i);
6309  ElementsKind elements_kind = map->elements_kind();
6310  if (IsFastElementsKind(elements_kind) &&
6311  elements_kind != GetInitialFastElementsKind()) {
6312  possible_transitioned_maps.Add(map);
6313  }
6314  }
6315  // Get transition target for each map (NULL == no transition).
6316  for (int i = 0; i < maps->length(); ++i) {
6317  Handle<Map> map = maps->at(i);
6318  Handle<Map> transitioned_map =
6319  map->FindTransitionedMap(&possible_transitioned_maps);
6320  transition_target.Add(transitioned_map);
6321  }
6322 
6323  int num_untransitionable_maps = 0;
6324  Handle<Map> untransitionable_map;
6325  HTransitionElementsKind* transition = NULL;
6326  for (int i = 0; i < maps->length(); ++i) {
6327  Handle<Map> map = maps->at(i);
6328  ASSERT(map->IsMap());
6329  if (!transition_target.at(i).is_null()) {
6331  map->elements_kind(),
6332  transition_target.at(i)->elements_kind()));
6333  transition = new(zone()) HTransitionElementsKind(
6334  object, map, transition_target.at(i));
6335  AddInstruction(transition);
6336  } else {
6337  type_todo[map->elements_kind()] = true;
6338  if (IsExternalArrayElementsKind(map->elements_kind())) {
6339  todo_external_array = true;
6340  }
6341  num_untransitionable_maps++;
6342  untransitionable_map = map;
6343  }
6344  }
6345 
6346  // If only one map is left after transitioning, handle this case
6347  // monomorphically.
6348  if (num_untransitionable_maps == 1) {
6349  HInstruction* instr = NULL;
6350  if (untransitionable_map->has_slow_elements_kind()) {
6351  instr = AddInstruction(is_store ? BuildStoreKeyedGeneric(object, key, val)
6352  : BuildLoadKeyedGeneric(object, key));
6353  } else {
6354  instr = AddInstruction(BuildMonomorphicElementAccess(
6355  object, key, val, transition, untransitionable_map, is_store));
6356  }
6357  *has_side_effects |= instr->HasObservableSideEffects();
6358  if (position != RelocInfo::kNoPosition) instr->set_position(position);
6359  return is_store ? NULL : instr;
6360  }
6361 
6362  HInstruction* checkspec =
6364  HBasicBlock* join = graph()->CreateBasicBlock();
6365 
6366  HInstruction* elements_kind_instr =
6367  AddInstruction(new(zone()) HElementsKind(object));
6368  HCompareConstantEqAndBranch* elements_kind_branch = NULL;
6369  HInstruction* elements =
6370  AddInstruction(new(zone()) HLoadElements(object, checkspec));
6371  HLoadExternalArrayPointer* external_elements = NULL;
6372  HInstruction* checked_key = NULL;
6373 
6374  // Generated code assumes that FAST_* and DICTIONARY_ELEMENTS ElementsKinds
6375  // are handled before external arrays.
6380 
6381  for (ElementsKind elements_kind = FIRST_ELEMENTS_KIND;
6382  elements_kind <= LAST_ELEMENTS_KIND;
6383  elements_kind = ElementsKind(elements_kind + 1)) {
6384  // After having handled FAST_* and DICTIONARY_ELEMENTS, we need to add some
6385  // code that's executed for all external array cases.
6388  if (elements_kind == FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND
6389  && todo_external_array) {
6390  HInstruction* length =
6391  AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
6392  checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
6393  external_elements = new(zone()) HLoadExternalArrayPointer(elements);
6394  AddInstruction(external_elements);
6395  }
6396  if (type_todo[elements_kind]) {
6397  HBasicBlock* if_true = graph()->CreateBasicBlock();
6398  HBasicBlock* if_false = graph()->CreateBasicBlock();
6399  elements_kind_branch = new(zone()) HCompareConstantEqAndBranch(
6400  elements_kind_instr, elements_kind, Token::EQ_STRICT);
6401  elements_kind_branch->SetSuccessorAt(0, if_true);
6402  elements_kind_branch->SetSuccessorAt(1, if_false);
6403  current_block()->Finish(elements_kind_branch);
6404 
6405  set_current_block(if_true);
6406  HInstruction* access;
6407  if (IsFastElementsKind(elements_kind)) {
6408  if (is_store && !IsFastDoubleElementsKind(elements_kind)) {
6409  AddInstruction(new(zone()) HCheckMaps(
6410  elements, isolate()->factory()->fixed_array_map(),
6411  zone(), elements_kind_branch));
6412  }
6413  // TODO(jkummerow): The need for these two blocks could be avoided
6414  // in one of two ways:
6415  // (1) Introduce ElementsKinds for JSArrays that are distinct from
6416  // those for fast objects.
6417  // (2) Put the common instructions into a third "join" block. This
6418  // requires additional AST IDs that we can deopt to from inside
6419  // that join block. They must be added to the Property class (when
6420  // it's a keyed property) and registered in the full codegen.
6421  HBasicBlock* if_jsarray = graph()->CreateBasicBlock();
6422  HBasicBlock* if_fastobject = graph()->CreateBasicBlock();
6423  HHasInstanceTypeAndBranch* typecheck =
6424  new(zone()) HHasInstanceTypeAndBranch(object, JS_ARRAY_TYPE);
6425  typecheck->SetSuccessorAt(0, if_jsarray);
6426  typecheck->SetSuccessorAt(1, if_fastobject);
6427  current_block()->Finish(typecheck);
6428 
6429  set_current_block(if_jsarray);
6430  HInstruction* length;
6431  length = AddInstruction(new(zone()) HJSArrayLength(object, typecheck,
6432  HType::Smi()));
6433  checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length,
6434  ALLOW_SMI_KEY));
6435  access = AddInstruction(BuildFastElementAccess(
6436  elements, checked_key, val, elements_kind_branch,
6437  elements_kind, is_store));
6438  if (!is_store) {
6439  Push(access);
6440  }
6441 
6442  *has_side_effects |= access->HasObservableSideEffects();
6443  if (position != -1) {
6444  access->set_position(position);
6445  }
6446  if_jsarray->Goto(join);
6447 
6448  set_current_block(if_fastobject);
6449  length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
6450  checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length,
6451  ALLOW_SMI_KEY));
6452  access = AddInstruction(BuildFastElementAccess(
6453  elements, checked_key, val, elements_kind_branch,
6454  elements_kind, is_store));
6455  } else if (elements_kind == DICTIONARY_ELEMENTS) {
6456  if (is_store) {
6457  access = AddInstruction(BuildStoreKeyedGeneric(object, key, val));
6458  } else {
6459  access = AddInstruction(BuildLoadKeyedGeneric(object, key));
6460  }
6461  } else { // External array elements.
6462  access = AddInstruction(BuildExternalArrayElementAccess(
6463  external_elements, checked_key, val, elements_kind_branch,
6464  elements_kind, is_store));
6465  }
6466  *has_side_effects |= access->HasObservableSideEffects();
6467  if (position != RelocInfo::kNoPosition) access->set_position(position);
6468  if (!is_store) {
6469  Push(access);
6470  }
6471  current_block()->Goto(join);
6472  set_current_block(if_false);
6473  }
6474  }
6475 
6476  // Deopt if none of the cases matched.
6478  join->SetJoinId(ast_id);
6479  set_current_block(join);
6480  return is_store ? NULL : Pop();
6481 }
6482 
6483 
6484 HValue* HGraphBuilder::HandleKeyedElementAccess(HValue* obj,
6485  HValue* key,
6486  HValue* val,
6487  Expression* expr,
6488  BailoutId ast_id,
6489  int position,
6490  bool is_store,
6491  bool* has_side_effects) {
6492  ASSERT(!expr->IsPropertyName());
6493  HInstruction* instr = NULL;
6494  if (expr->IsMonomorphic()) {
6495  Handle<Map> map = expr->GetMonomorphicReceiverType();
6496  if (map->has_slow_elements_kind()) {
6497  instr = is_store ? BuildStoreKeyedGeneric(obj, key, val)
6498  : BuildLoadKeyedGeneric(obj, key);
6499  } else {
6500  AddInstruction(new(zone()) HCheckNonSmi(obj));
6501  instr = BuildMonomorphicElementAccess(obj, key, val, NULL, map, is_store);
6502  }
6503  } else if (expr->GetReceiverTypes() != NULL &&
6504  !expr->GetReceiverTypes()->is_empty()) {
6505  return HandlePolymorphicElementAccess(
6506  obj, key, val, expr, ast_id, position, is_store, has_side_effects);
6507  } else {
6508  if (is_store) {
6509  instr = BuildStoreKeyedGeneric(obj, key, val);
6510  } else {
6511  instr = BuildLoadKeyedGeneric(obj, key);
6512  }
6513  }
6514  if (position != RelocInfo::kNoPosition) instr->set_position(position);
6515  AddInstruction(instr);
6516  *has_side_effects = instr->HasObservableSideEffects();
6517  return instr;
6518 }
6519 
6520 
6521 HInstruction* HGraphBuilder::BuildStoreKeyedGeneric(HValue* object,
6522  HValue* key,
6523  HValue* value) {
6524  HValue* context = environment()->LookupContext();
6525  return new(zone()) HStoreKeyedGeneric(
6526  context,
6527  object,
6528  key,
6529  value,
6530  function_strict_mode_flag());
6531 }
6532 
6533 
6534 void HGraphBuilder::EnsureArgumentsArePushedForAccess() {
6535  // Outermost function already has arguments on the stack.
6536  if (function_state()->outer() == NULL) return;
6537 
6538  if (function_state()->arguments_pushed()) return;
6539 
6540  // Push arguments when entering inlined function.
6541  HEnterInlined* entry = function_state()->entry();
6542  entry->set_arguments_pushed();
6543 
6544  ZoneList<HValue*>* arguments_values = entry->arguments_values();
6545 
6546  HInstruction* insert_after = entry;
6547  for (int i = 0; i < arguments_values->length(); i++) {
6548  HValue* argument = arguments_values->at(i);
6549  HInstruction* push_argument = new(zone()) HPushArgument(argument);
6550  push_argument->InsertAfter(insert_after);
6551  insert_after = push_argument;
6552  }
6553 
6554  HArgumentsElements* arguments_elements =
6555  new(zone()) HArgumentsElements(true);
6556  arguments_elements->ClearFlag(HValue::kUseGVN);
6557  arguments_elements->InsertAfter(insert_after);
6558  function_state()->set_arguments_elements(arguments_elements);
6559 }
6560 
6561 
6562 bool HGraphBuilder::TryArgumentsAccess(Property* expr) {
6563  VariableProxy* proxy = expr->obj()->AsVariableProxy();
6564  if (proxy == NULL) return false;
6565  if (!proxy->var()->IsStackAllocated()) return false;
6566  if (!environment()->Lookup(proxy->var())->CheckFlag(HValue::kIsArguments)) {
6567  return false;
6568  }
6569 
6570  HInstruction* result = NULL;
6571  if (expr->key()->IsPropertyName()) {
6572  Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
6573  if (!name->IsEqualTo(CStrVector("length"))) return false;
6574 
6575  if (function_state()->outer() == NULL) {
6576  HInstruction* elements = AddInstruction(
6577  new(zone()) HArgumentsElements(false));
6578  result = new(zone()) HArgumentsLength(elements);
6579  } else {
6580  // Number of arguments without receiver.
6581  int argument_count = environment()->
6582  arguments_environment()->parameter_count() - 1;
6583  result = new(zone()) HConstant(
6584  Handle<Object>(Smi::FromInt(argument_count)),
6586  }
6587  } else {
6588  Push(graph()->GetArgumentsObject());
6589  VisitForValue(expr->key());
6590  if (HasStackOverflow() || current_block() == NULL) return true;
6591  HValue* key = Pop();
6592  Drop(1); // Arguments object.
6593  if (function_state()->outer() == NULL) {
6594  HInstruction* elements = AddInstruction(
6595  new(zone()) HArgumentsElements(false));
6596  HInstruction* length = AddInstruction(
6597  new(zone()) HArgumentsLength(elements));
6598  HInstruction* checked_key =
6599  AddInstruction(new(zone()) HBoundsCheck(key, length));
6600  result = new(zone()) HAccessArgumentsAt(elements, length, checked_key);
6601  } else {
6602  EnsureArgumentsArePushedForAccess();
6603 
6604  // Number of arguments without receiver.
6605  HInstruction* elements = function_state()->arguments_elements();
6606  int argument_count = environment()->
6607  arguments_environment()->parameter_count() - 1;
6608  HInstruction* length = AddInstruction(new(zone()) HConstant(
6609  Handle<Object>(Smi::FromInt(argument_count)),
6611  HInstruction* checked_key =
6612  AddInstruction(new(zone()) HBoundsCheck(key, length));
6613  result = new(zone()) HAccessArgumentsAt(elements, length, checked_key);
6614  }
6615  }
6616  ast_context()->ReturnInstruction(result, expr->id());
6617  return true;
6618 }
6619 
6620 
6621 void HGraphBuilder::VisitProperty(Property* expr) {
6622  ASSERT(!HasStackOverflow());
6623  ASSERT(current_block() != NULL);
6624  ASSERT(current_block()->HasPredecessor());
6625  expr->RecordTypeFeedback(oracle(), zone());
6626 
6627  if (TryArgumentsAccess(expr)) return;
6628 
6629  CHECK_ALIVE(VisitForValue(expr->obj()));
6630 
6631  HInstruction* instr = NULL;
6632  if (expr->AsProperty()->IsArrayLength()) {
6633  HValue* array = Pop();
6634  AddInstruction(new(zone()) HCheckNonSmi(array));
6635  HInstruction* mapcheck =
6637  instr = new(zone()) HJSArrayLength(array, mapcheck);
6638  } else if (expr->IsStringLength()) {
6639  HValue* string = Pop();
6640  AddInstruction(new(zone()) HCheckNonSmi(string));
6642  instr = new(zone()) HStringLength(string);
6643  } else if (expr->IsStringAccess()) {
6644  CHECK_ALIVE(VisitForValue(expr->key()));
6645  HValue* index = Pop();
6646  HValue* string = Pop();
6647  HValue* context = environment()->LookupContext();
6648  HStringCharCodeAt* char_code =
6649  BuildStringCharCodeAt(context, string, index);
6650  AddInstruction(char_code);
6651  instr = new(zone()) HStringCharFromCode(context, char_code);
6652 
6653  } else if (expr->IsFunctionPrototype()) {
6654  HValue* function = Pop();
6655  AddInstruction(new(zone()) HCheckNonSmi(function));
6656  instr = new(zone()) HLoadFunctionPrototype(function);
6657 
6658  } else if (expr->key()->IsPropertyName()) {
6659  Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
6660  SmallMapList* types = expr->GetReceiverTypes();
6661 
6662  bool monomorphic = expr->IsMonomorphic();
6663  Handle<Map> map;
6664  if (expr->IsMonomorphic()) {
6665  map = types->first();
6666  if (map->is_dictionary_map()) monomorphic = false;
6667  }
6668  if (monomorphic) {
6669  Handle<JSFunction> getter;
6670  Handle<JSObject> holder;
6671  if (LookupGetter(map, name, &getter, &holder)) {
6672  AddCheckConstantFunction(holder, Top(), map, true);
6673  if (FLAG_inline_accessors && TryInlineGetter(getter, expr)) return;
6674  AddInstruction(new(zone()) HPushArgument(Pop()));
6675  instr = new(zone()) HCallConstantFunction(getter, 1);
6676  } else {
6677  instr = BuildLoadNamedMonomorphic(Pop(), name, expr, map);
6678  }
6679  } else if (types != NULL && types->length() > 1) {
6680  return HandlePolymorphicLoadNamedField(expr, Pop(), types, name);
6681  } else {
6682  instr = BuildLoadNamedGeneric(Pop(), name, expr);
6683  }
6684 
6685  } else {
6686  CHECK_ALIVE(VisitForValue(expr->key()));
6687 
6688  HValue* key = Pop();
6689  HValue* obj = Pop();
6690 
6691  bool has_side_effects = false;
6692  HValue* load = HandleKeyedElementAccess(
6693  obj, key, NULL, expr, expr->id(), expr->position(),
6694  false, // is_store
6695  &has_side_effects);
6696  if (has_side_effects) {
6697  if (ast_context()->IsEffect()) {
6698  AddSimulate(expr->id());
6699  } else {
6700  Push(load);
6701  AddSimulate(expr->id());
6702  Drop(1);
6703  }
6704  }
6705  return ast_context()->ReturnValue(load);
6706  }
6707  instr->set_position(expr->position());
6708  return ast_context()->ReturnInstruction(instr, expr->id());
6709 }
6710 
6711 
6712 void HGraphBuilder::AddCheckConstantFunction(Handle<JSObject> holder,
6713  HValue* receiver,
6714  Handle<Map> receiver_map,
6715  bool smi_and_map_check) {
6716  // Constant functions have the nice property that the map will change if they
6717  // are overwritten. Therefore it is enough to check the map of the holder and
6718  // its prototypes.
6719  if (smi_and_map_check) {
6720  AddInstruction(new(zone()) HCheckNonSmi(receiver));
6721  AddInstruction(HCheckMaps::NewWithTransitions(receiver, receiver_map,
6722  zone()));
6723  }
6724  if (!holder.is_null()) {
6725  AddInstruction(new(zone()) HCheckPrototypeMaps(
6726  Handle<JSObject>(JSObject::cast(receiver_map->prototype())), holder));
6727  }
6728 }
6729 
6730 
6732  public:
6733  FunctionSorter() : index_(0), ticks_(0), ast_length_(0), src_length_(0) { }
6734  FunctionSorter(int index, int ticks, int ast_length, int src_length)
6735  : index_(index),
6736  ticks_(ticks),
6737  ast_length_(ast_length),
6738  src_length_(src_length) { }
6739 
6740  int index() const { return index_; }
6741  int ticks() const { return ticks_; }
6742  int ast_length() const { return ast_length_; }
6743  int src_length() const { return src_length_; }
6744 
6745  private:
6746  int index_;
6747  int ticks_;
6748  int ast_length_;
6749  int src_length_;
6750 };
6751 
6752 
6753 static int CompareHotness(void const* a, void const* b) {
6754  FunctionSorter const* function1 = reinterpret_cast<FunctionSorter const*>(a);
6755  FunctionSorter const* function2 = reinterpret_cast<FunctionSorter const*>(b);
6756  int diff = function1->ticks() - function2->ticks();
6757  if (diff != 0) return -diff;
6758  diff = function1->ast_length() - function2->ast_length();
6759  if (diff != 0) return diff;
6760  return function1->src_length() - function2->src_length();
6761 }
6762 
6763 
6764 void HGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
6765  HValue* receiver,
6766  SmallMapList* types,
6767  Handle<String> name) {
6768  // TODO(ager): We should recognize when the prototype chains for different
6769  // maps are identical. In that case we can avoid repeatedly generating the
6770  // same prototype map checks.
6771  int argument_count = expr->arguments()->length() + 1; // Includes receiver.
6772  HBasicBlock* join = NULL;
6773  FunctionSorter order[kMaxCallPolymorphism];
6774  int ordered_functions = 0;
6775  for (int i = 0;
6776  i < types->length() && ordered_functions < kMaxCallPolymorphism;
6777  ++i) {
6778  Handle<Map> map = types->at(i);
6779  if (expr->ComputeTarget(map, name)) {
6780  order[ordered_functions++] =
6781  FunctionSorter(i,
6782  expr->target()->shared()->profiler_ticks(),
6783  InliningAstSize(expr->target()),
6784  expr->target()->shared()->SourceSize());
6785  }
6786  }
6787 
6788  qsort(reinterpret_cast<void*>(&order[0]),
6789  ordered_functions,
6790  sizeof(order[0]),
6791  &CompareHotness);
6792 
6793  for (int fn = 0; fn < ordered_functions; ++fn) {
6794  int i = order[fn].index();
6795  Handle<Map> map = types->at(i);
6796  if (fn == 0) {
6797  // Only needed once.
6798  AddInstruction(new(zone()) HCheckNonSmi(receiver));
6799  join = graph()->CreateBasicBlock();
6800  }
6801  HBasicBlock* if_true = graph()->CreateBasicBlock();
6802  HBasicBlock* if_false = graph()->CreateBasicBlock();
6803  HCompareMap* compare =
6804  new(zone()) HCompareMap(receiver, map, if_true, if_false);
6805  current_block()->Finish(compare);
6806 
6807  set_current_block(if_true);
6808  expr->ComputeTarget(map, name);
6809  AddCheckConstantFunction(expr->holder(), receiver, map, false);
6810  if (FLAG_trace_inlining && FLAG_polymorphic_inlining) {
6811  Handle<JSFunction> caller = info()->closure();
6812  SmartArrayPointer<char> caller_name =
6813  caller->shared()->DebugName()->ToCString();
6814  PrintF("Trying to inline the polymorphic call to %s from %s\n",
6815  *name->ToCString(),
6816  *caller_name);
6817  }
6818  if (FLAG_polymorphic_inlining && TryInlineCall(expr)) {
6819  // Trying to inline will signal that we should bailout from the
6820  // entire compilation by setting stack overflow on the visitor.
6821  if (HasStackOverflow()) return;
6822  } else {
6823  HCallConstantFunction* call =
6824  new(zone()) HCallConstantFunction(expr->target(), argument_count);
6825  call->set_position(expr->position());
6826  PreProcessCall(call);
6827  AddInstruction(call);
6828  if (!ast_context()->IsEffect()) Push(call);
6829  }
6830 
6831  if (current_block() != NULL) current_block()->Goto(join);
6832  set_current_block(if_false);
6833  }
6834 
6835  // Finish up. Unconditionally deoptimize if we've handled all the maps we
6836  // know about and do not want to handle ones we've never seen. Otherwise
6837  // use a generic IC.
6838  if (ordered_functions == types->length() && FLAG_deoptimize_uncommon_cases) {
6840  } else {
6841  HValue* context = environment()->LookupContext();
6842  HCallNamed* call = new(zone()) HCallNamed(context, name, argument_count);
6843  call->set_position(expr->position());
6844  PreProcessCall(call);
6845 
6846  if (join != NULL) {
6847  AddInstruction(call);
6848  if (!ast_context()->IsEffect()) Push(call);
6849  current_block()->Goto(join);
6850  } else {
6851  return ast_context()->ReturnInstruction(call, expr->id());
6852  }
6853  }
6854 
6855  // We assume that control flow is always live after an expression. So
6856  // even without predecessors to the join block, we set it as the exit
6857  // block and continue by adding instructions there.
6858  ASSERT(join != NULL);
6859  if (join->HasPredecessor()) {
6860  set_current_block(join);
6861  join->SetJoinId(expr->id());
6862  if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop());
6863  } else {
6865  }
6866 }
6867 
6868 
6869 void HGraphBuilder::TraceInline(Handle<JSFunction> target,
6870  Handle<JSFunction> caller,
6871  const char* reason) {
6872  if (FLAG_trace_inlining) {
6873  SmartArrayPointer<char> target_name =
6874  target->shared()->DebugName()->ToCString();
6875  SmartArrayPointer<char> caller_name =
6876  caller->shared()->DebugName()->ToCString();
6877  if (reason == NULL) {
6878  PrintF("Inlined %s called from %s.\n", *target_name, *caller_name);
6879  } else {
6880  PrintF("Did not inline %s called from %s (%s).\n",
6881  *target_name, *caller_name, reason);
6882  }
6883  }
6884 }
6885 
6886 
6887 static const int kNotInlinable = 1000000000;
6888 
6889 
6890 int HGraphBuilder::InliningAstSize(Handle<JSFunction> target) {
6891  if (!FLAG_use_inlining) return kNotInlinable;
6892 
6893  // Precondition: call is monomorphic and we have found a target with the
6894  // appropriate arity.
6895  Handle<JSFunction> caller = info()->closure();
6896  Handle<SharedFunctionInfo> target_shared(target->shared());
6897 
6898  // Do a quick check on source code length to avoid parsing large
6899  // inlining candidates.
6900  if (target_shared->SourceSize() >
6901  Min(FLAG_max_inlined_source_size, kUnlimitedMaxInlinedSourceSize)) {
6902  TraceInline(target, caller, "target text too big");
6903  return kNotInlinable;
6904  }
6905 
6906  // Target must be inlineable.
6907  if (!target->IsInlineable()) {
6908  TraceInline(target, caller, "target not inlineable");
6909  return kNotInlinable;
6910  }
6911  if (target_shared->dont_inline() || target_shared->dont_optimize()) {
6912  TraceInline(target, caller, "target contains unsupported syntax [early]");
6913  return kNotInlinable;
6914  }
6915 
6916  int nodes_added = target_shared->ast_node_count();
6917  return nodes_added;
6918 }
6919 
6920 
6921 bool HGraphBuilder::TryInline(CallKind call_kind,
6922  Handle<JSFunction> target,
6923  int arguments_count,
6924  HValue* implicit_return_value,
6925  BailoutId ast_id,
6926  BailoutId return_id,
6927  InliningKind inlining_kind) {
6928  int nodes_added = InliningAstSize(target);
6929  if (nodes_added == kNotInlinable) return false;
6930 
6931  Handle<JSFunction> caller = info()->closure();
6932 
6933  if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
6934  TraceInline(target, caller, "target AST is too large [early]");
6935  return false;
6936  }
6937 
6938  Handle<SharedFunctionInfo> target_shared(target->shared());
6939 
6940 #if !defined(V8_TARGET_ARCH_IA32)
6941  // Target must be able to use caller's context.
6942  CompilationInfo* outer_info = info();
6943  if (target->context() != outer_info->closure()->context() ||
6944  outer_info->scope()->contains_with() ||
6945  outer_info->scope()->num_heap_slots() > 0) {
6946  TraceInline(target, caller, "target requires context change");
6947  return false;
6948  }
6949 #endif
6950 
6951 
6952  // Don't inline deeper than kMaxInliningLevels calls.
6953  HEnvironment* env = environment();
6954  int current_level = 1;
6955  while (env->outer() != NULL) {
6956  if (current_level == Compiler::kMaxInliningLevels) {
6957  TraceInline(target, caller, "inline depth limit reached");
6958  return false;
6959  }
6960  if (env->outer()->frame_type() == JS_FUNCTION) {
6961  current_level++;
6962  }
6963  env = env->outer();
6964  }
6965 
6966  // Don't inline recursive functions.
6967  for (FunctionState* state = function_state();
6968  state != NULL;
6969  state = state->outer()) {
6970  if (state->compilation_info()->closure()->shared() == *target_shared) {
6971  TraceInline(target, caller, "target is recursive");
6972  return false;
6973  }
6974  }
6975 
6976  // We don't want to add more than a certain number of nodes from inlining.
6977  if (inlined_count_ > Min(FLAG_max_inlined_nodes_cumulative,
6978  kUnlimitedMaxInlinedNodesCumulative)) {
6979  TraceInline(target, caller, "cumulative AST node limit reached");
6980  return false;
6981  }
6982 
6983  // Parse and allocate variables.
6984  CompilationInfo target_info(target, zone());
6985  if (!ParserApi::Parse(&target_info, kNoParsingFlags) ||
6986  !Scope::Analyze(&target_info)) {
6987  if (target_info.isolate()->has_pending_exception()) {
6988  // Parse or scope error, never optimize this function.
6989  SetStackOverflow();
6990  target_shared->DisableOptimization("parse/scope error");
6991  }
6992  TraceInline(target, caller, "parse failure");
6993  return false;
6994  }
6995 
6996  if (target_info.scope()->num_heap_slots() > 0) {
6997  TraceInline(target, caller, "target has context-allocated variables");
6998  return false;
6999  }
7000  FunctionLiteral* function = target_info.function();
7001 
7002  // The following conditions must be checked again after re-parsing, because
7003  // earlier the information might not have been complete due to lazy parsing.
7004  nodes_added = function->ast_node_count();
7005  if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
7006  TraceInline(target, caller, "target AST is too large [late]");
7007  return false;
7008  }
7009  AstProperties::Flags* flags(function->flags());
7010  if (flags->Contains(kDontInline) || flags->Contains(kDontOptimize)) {
7011  TraceInline(target, caller, "target contains unsupported syntax [late]");
7012  return false;
7013  }
7014 
7015  // If the function uses the arguments object check that inlining of functions
7016  // with arguments object is enabled and the arguments-variable is
7017  // stack allocated.
7018  if (function->scope()->arguments() != NULL) {
7019  if (!FLAG_inline_arguments) {
7020  TraceInline(target, caller, "target uses arguments object");
7021  return false;
7022  }
7023 
7024  if (!function->scope()->arguments()->IsStackAllocated()) {
7025  TraceInline(target,
7026  caller,
7027  "target uses non-stackallocated arguments object");
7028  return false;
7029  }
7030  }
7031 
7032  // All declarations must be inlineable.
7033  ZoneList<Declaration*>* decls = target_info.scope()->declarations();
7034  int decl_count = decls->length();
7035  for (int i = 0; i < decl_count; ++i) {
7036  if (!decls->at(i)->IsInlineable()) {
7037  TraceInline(target, caller, "target has non-trivial declaration");
7038  return false;
7039  }
7040  }
7041 
7042  // Generate the deoptimization data for the unoptimized version of
7043  // the target function if we don't already have it.
7044  if (!target_shared->has_deoptimization_support()) {
7045  // Note that we compile here using the same AST that we will use for
7046  // generating the optimized inline code.
7047  target_info.EnableDeoptimizationSupport();
7048  if (!FullCodeGenerator::MakeCode(&target_info)) {
7049  TraceInline(target, caller, "could not generate deoptimization info");
7050  return false;
7051  }
7052  if (target_shared->scope_info() == ScopeInfo::Empty()) {
7053  // The scope info might not have been set if a lazily compiled
7054  // function is inlined before being called for the first time.
7055  Handle<ScopeInfo> target_scope_info =
7056  ScopeInfo::Create(target_info.scope(), zone());
7057  target_shared->set_scope_info(*target_scope_info);
7058  }
7059  target_shared->EnableDeoptimizationSupport(*target_info.code());
7060  Compiler::RecordFunctionCompilation(Logger::FUNCTION_TAG,
7061  &target_info,
7062  target_shared);
7063  }
7064 
7065  // ----------------------------------------------------------------
7066  // After this point, we've made a decision to inline this function (so
7067  // TryInline should always return true).
7068 
7069  // Save the pending call context and type feedback oracle. Set up new ones
7070  // for the inlined function.
7071  ASSERT(target_shared->has_deoptimization_support());
7072  Handle<Code> unoptimized_code(target_shared->code());
7073  TypeFeedbackOracle target_oracle(
7074  unoptimized_code,
7075  Handle<Context>(target->context()->native_context()),
7076  isolate(),
7077  zone());
7078  // The function state is new-allocated because we need to delete it
7079  // in two different places.
7080  FunctionState* target_state = new FunctionState(
7081  this, &target_info, &target_oracle, inlining_kind);
7082 
7083  HConstant* undefined = graph()->GetConstantUndefined();
7084  HEnvironment* inner_env =
7085  environment()->CopyForInlining(target,
7086  arguments_count,
7087  function,
7088  undefined,
7089  call_kind,
7090  function_state()->inlining_kind());
7091 #ifdef V8_TARGET_ARCH_IA32
7092  // IA32 only, overwrite the caller's context in the deoptimization
7093  // environment with the correct one.
7094  //
7095  // TODO(kmillikin): implement the same inlining on other platforms so we
7096  // can remove the unsightly ifdefs in this function.
7097  HConstant* context =
7098  new(zone()) HConstant(Handle<Context>(target->context()),
7100  AddInstruction(context);
7101  inner_env->BindContext(context);
7102 #endif
7103 
7104  AddSimulate(return_id);
7105  current_block()->UpdateEnvironment(inner_env);
7106 
7107  ZoneList<HValue*>* arguments_values = NULL;
7108 
7109  // If the function uses arguments copy current arguments values
7110  // to use them for materialization.
7111  if (function->scope()->arguments() != NULL) {
7112  HEnvironment* arguments_env = inner_env->arguments_environment();
7113  int arguments_count = arguments_env->parameter_count();
7114  arguments_values = new(zone()) ZoneList<HValue*>(arguments_count, zone());
7115  for (int i = 0; i < arguments_count; i++) {
7116  arguments_values->Add(arguments_env->Lookup(i), zone());
7117  }
7118  }
7119 
7120  HEnterInlined* enter_inlined =
7121  new(zone()) HEnterInlined(target,
7122  arguments_count,
7123  function,
7124  call_kind,
7126  function->scope()->arguments(),
7127  arguments_values);
7128  function_state()->set_entry(enter_inlined);
7129  AddInstruction(enter_inlined);
7130 
7131  // If the function uses arguments object create and bind one.
7132  if (function->scope()->arguments() != NULL) {
7133  ASSERT(function->scope()->arguments()->IsStackAllocated());
7134  inner_env->Bind(function->scope()->arguments(),
7135  graph()->GetArgumentsObject());
7136  }
7137 
7138 
7139  VisitDeclarations(target_info.scope()->declarations());
7140  VisitStatements(function->body());
7141  if (HasStackOverflow()) {
7142  // Bail out if the inline function did, as we cannot residualize a call
7143  // instead.
7144  TraceInline(target, caller, "inline graph construction failed");
7145  target_shared->DisableOptimization("inlining bailed out");
7146  inline_bailout_ = true;
7147  delete target_state;
7148  return true;
7149  }
7150 
7151  // Update inlined nodes count.
7152  inlined_count_ += nodes_added;
7153 
7154  ASSERT(unoptimized_code->kind() == Code::FUNCTION);
7155  Handle<Object> maybe_type_info(unoptimized_code->type_feedback_info());
7156  Handle<TypeFeedbackInfo> type_info(
7157  Handle<TypeFeedbackInfo>::cast(maybe_type_info));
7158  graph()->update_type_change_checksum(type_info->own_type_change_checksum());
7159 
7160  TraceInline(target, caller, NULL);
7161 
7162  if (current_block() != NULL) {
7163  FunctionState* state = function_state();
7164  if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
7165  // Falling off the end of an inlined construct call. In a test context the
7166  // return value will always evaluate to true, in a value context the
7167  // return value is the newly allocated receiver.
7168  if (call_context()->IsTest()) {
7169  current_block()->Goto(inlined_test_context()->if_true(), state);
7170  } else if (call_context()->IsEffect()) {
7171  current_block()->Goto(function_return(), state);
7172  } else {
7173  ASSERT(call_context()->IsValue());
7174  current_block()->AddLeaveInlined(implicit_return_value, state);
7175  }
7176  } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
7177  // Falling off the end of an inlined setter call. The returned value is
7178  // never used, the value of an assignment is always the value of the RHS
7179  // of the assignment.
7180  if (call_context()->IsTest()) {
7181  inlined_test_context()->ReturnValue(implicit_return_value);
7182  } else if (call_context()->IsEffect()) {
7183  current_block()->Goto(function_return(), state);
7184  } else {
7185  ASSERT(call_context()->IsValue());
7186  current_block()->AddLeaveInlined(implicit_return_value, state);
7187  }
7188  } else {
7189  // Falling off the end of a normal inlined function. This basically means
7190  // returning undefined.
7191  if (call_context()->IsTest()) {
7192  current_block()->Goto(inlined_test_context()->if_false(), state);
7193  } else if (call_context()->IsEffect()) {
7194  current_block()->Goto(function_return(), state);
7195  } else {
7196  ASSERT(call_context()->IsValue());
7197  current_block()->AddLeaveInlined(undefined, state);
7198  }
7199  }
7200  }
7201 
7202  // Fix up the function exits.
7203  if (inlined_test_context() != NULL) {
7204  HBasicBlock* if_true = inlined_test_context()->if_true();
7205  HBasicBlock* if_false = inlined_test_context()->if_false();
7206 
7207  // Pop the return test context from the expression context stack.
7208  ASSERT(ast_context() == inlined_test_context());
7209  ClearInlinedTestContext();
7210  delete target_state;
7211 
7212  // Forward to the real test context.
7213  if (if_true->HasPredecessor()) {
7214  if_true->SetJoinId(ast_id);
7215  HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
7216  if_true->Goto(true_target, function_state());
7217  }
7218  if (if_false->HasPredecessor()) {
7219  if_false->SetJoinId(ast_id);
7220  HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
7221  if_false->Goto(false_target, function_state());
7222  }
7224  return true;
7225 
7226  } else if (function_return()->HasPredecessor()) {
7227  function_return()->SetJoinId(ast_id);
7228  set_current_block(function_return());
7229  } else {
7231  }
7232  delete target_state;
7233  return true;
7234 }
7235 
7236 
7237 bool HGraphBuilder::TryInlineCall(Call* expr, bool drop_extra) {
7238  // The function call we are inlining is a method call if the call
7239  // is a property call.
7240  CallKind call_kind = (expr->expression()->AsProperty() == NULL)
7242  : CALL_AS_METHOD;
7243 
7244  return TryInline(call_kind,
7245  expr->target(),
7246  expr->arguments()->length(),
7247  NULL,
7248  expr->id(),
7249  expr->ReturnId(),
7250  drop_extra ? DROP_EXTRA_ON_RETURN : NORMAL_RETURN);
7251 }
7252 
7253 
7254 bool HGraphBuilder::TryInlineConstruct(CallNew* expr,
7255  HValue* implicit_return_value) {
7256  return TryInline(CALL_AS_FUNCTION,
7257  expr->target(),
7258  expr->arguments()->length(),
7259  implicit_return_value,
7260  expr->id(),
7261  expr->ReturnId(),
7263 }
7264 
7265 
7266 bool HGraphBuilder::TryInlineGetter(Handle<JSFunction> getter,
7267  Property* prop) {
7268  return TryInline(CALL_AS_METHOD,
7269  getter,
7270  0,
7271  NULL,
7272  prop->id(),
7273  prop->LoadId(),
7275 }
7276 
7277 
7278 bool HGraphBuilder::TryInlineSetter(Handle<JSFunction> setter,
7279  Assignment* assignment,
7280  HValue* implicit_return_value) {
7281  return TryInline(CALL_AS_METHOD,
7282  setter,
7283  1,
7284  implicit_return_value,
7285  assignment->id(),
7286  assignment->AssignmentId(),
7288 }
7289 
7290 
7291 bool HGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr, bool drop_extra) {
7292  if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
7293  BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
7294  switch (id) {
7295  case kMathRound:
7296  case kMathAbs:
7297  case kMathSqrt:
7298  case kMathLog:
7299  case kMathSin:
7300  case kMathCos:
7301  case kMathTan:
7302  if (expr->arguments()->length() == 1) {
7303  HValue* argument = Pop();
7304  HValue* context = environment()->LookupContext();
7305  Drop(1); // Receiver.
7306  HUnaryMathOperation* op =
7307  new(zone()) HUnaryMathOperation(context, argument, id);
7308  op->set_position(expr->position());
7309  if (drop_extra) Drop(1); // Optionally drop the function.
7310  ast_context()->ReturnInstruction(op, expr->id());
7311  return true;
7312  }
7313  break;
7314  default:
7315  // Not supported for inlining yet.
7316  break;
7317  }
7318  return false;
7319 }
7320 
7321 
7322 bool HGraphBuilder::TryInlineBuiltinMethodCall(Call* expr,
7323  HValue* receiver,
7324  Handle<Map> receiver_map,
7325  CheckType check_type) {
7326  ASSERT(check_type != RECEIVER_MAP_CHECK || !receiver_map.is_null());
7327  // Try to inline calls like Math.* as operations in the calling function.
7328  if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
7329  BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
7330  int argument_count = expr->arguments()->length() + 1; // Plus receiver.
7331  switch (id) {
7332  case kStringCharCodeAt:
7333  case kStringCharAt:
7334  if (argument_count == 2 && check_type == STRING_CHECK) {
7335  HValue* index = Pop();
7336  HValue* string = Pop();
7337  HValue* context = environment()->LookupContext();
7338  ASSERT(!expr->holder().is_null());
7339  AddInstruction(new(zone()) HCheckPrototypeMaps(
7340  oracle()->GetPrototypeForPrimitiveCheck(STRING_CHECK),
7341  expr->holder()));
7342  HStringCharCodeAt* char_code =
7343  BuildStringCharCodeAt(context, string, index);
7344  if (id == kStringCharCodeAt) {
7345  ast_context()->ReturnInstruction(char_code, expr->id());
7346  return true;
7347  }
7348  AddInstruction(char_code);
7349  HStringCharFromCode* result =
7350  new(zone()) HStringCharFromCode(context, char_code);
7351  ast_context()->ReturnInstruction(result, expr->id());
7352  return true;
7353  }
7354  break;
7355  case kMathRound:
7356  case kMathFloor:
7357  case kMathAbs:
7358  case kMathSqrt:
7359  case kMathLog:
7360  case kMathSin:
7361  case kMathCos:
7362  case kMathTan:
7363  if (argument_count == 2 && check_type == RECEIVER_MAP_CHECK) {
7364  AddCheckConstantFunction(expr->holder(), receiver, receiver_map, true);
7365  HValue* argument = Pop();
7366  HValue* context = environment()->LookupContext();
7367  Drop(1); // Receiver.
7368  HUnaryMathOperation* op =
7369  new(zone()) HUnaryMathOperation(context, argument, id);
7370  op->set_position(expr->position());
7371  ast_context()->ReturnInstruction(op, expr->id());
7372  return true;
7373  }
7374  break;
7375  case kMathPow:
7376  if (argument_count == 3 && check_type == RECEIVER_MAP_CHECK) {
7377  AddCheckConstantFunction(expr->holder(), receiver, receiver_map, true);
7378  HValue* right = Pop();
7379  HValue* left = Pop();
7380  Pop(); // Pop receiver.
7381  HValue* context = environment()->LookupContext();
7382  HInstruction* result = NULL;
7383  // Use sqrt() if exponent is 0.5 or -0.5.
7384  if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) {
7385  double exponent = HConstant::cast(right)->DoubleValue();
7386  if (exponent == 0.5) {
7387  result =
7388  new(zone()) HUnaryMathOperation(context, left, kMathPowHalf);
7389  } else if (exponent == -0.5) {
7390  HConstant* double_one =
7391  new(zone()) HConstant(Handle<Object>(Smi::FromInt(1)),
7393  AddInstruction(double_one);
7394  HUnaryMathOperation* square_root =
7395  new(zone()) HUnaryMathOperation(context, left, kMathPowHalf);
7396  AddInstruction(square_root);
7397  // MathPowHalf doesn't have side effects so there's no need for
7398  // an environment simulation here.
7399  ASSERT(!square_root->HasObservableSideEffects());
7400  result = new(zone()) HDiv(context, double_one, square_root);
7401  } else if (exponent == 2.0) {
7402  result = new(zone()) HMul(context, left, left);
7403  }
7404  } else if (right->IsConstant() &&
7405  HConstant::cast(right)->HasInteger32Value() &&
7406  HConstant::cast(right)->Integer32Value() == 2) {
7407  result = new(zone()) HMul(context, left, left);
7408  }
7409 
7410  if (result == NULL) {
7411  result = new(zone()) HPower(left, right);
7412  }
7413  ast_context()->ReturnInstruction(result, expr->id());
7414  return true;
7415  }
7416  break;
7417  case kMathRandom:
7418  if (argument_count == 1 && check_type == RECEIVER_MAP_CHECK) {
7419  AddCheckConstantFunction(expr->holder(), receiver, receiver_map, true);
7420  Drop(1); // Receiver.
7421  HValue* context = environment()->LookupContext();
7422  HGlobalObject* global_object = new(zone()) HGlobalObject(context);
7423  AddInstruction(global_object);
7424  HRandom* result = new(zone()) HRandom(global_object);
7425  ast_context()->ReturnInstruction(result, expr->id());
7426  return true;
7427  }
7428  break;
7429  case kMathMax:
7430  case kMathMin:
7431  if (argument_count == 3 && check_type == RECEIVER_MAP_CHECK) {
7432  AddCheckConstantFunction(expr->holder(), receiver, receiver_map, true);
7433  HValue* right = Pop();
7434  HValue* left = Pop();
7435  Drop(1); // Receiver.
7436  HValue* context = environment()->LookupContext();
7437  HMathMinMax::Operation op = (id == kMathMin) ? HMathMinMax::kMathMin
7439  HMathMinMax* result = new(zone()) HMathMinMax(context, left, right, op);
7440  ast_context()->ReturnInstruction(result, expr->id());
7441  return true;
7442  }
7443  break;
7444  default:
7445  // Not yet supported for inlining.
7446  break;
7447  }
7448  return false;
7449 }
7450 
7451 
7452 bool HGraphBuilder::TryCallApply(Call* expr) {
7453  Expression* callee = expr->expression();
7454  Property* prop = callee->AsProperty();
7455  ASSERT(prop != NULL);
7456 
7457  if (!expr->IsMonomorphic() || expr->check_type() != RECEIVER_MAP_CHECK) {
7458  return false;
7459  }
7460  Handle<Map> function_map = expr->GetReceiverTypes()->first();
7461  if (function_map->instance_type() != JS_FUNCTION_TYPE ||
7462  !expr->target()->shared()->HasBuiltinFunctionId() ||
7463  expr->target()->shared()->builtin_function_id() != kFunctionApply) {
7464  return false;
7465  }
7466 
7467  if (info()->scope()->arguments() == NULL) return false;
7468 
7469  ZoneList<Expression*>* args = expr->arguments();
7470  if (args->length() != 2) return false;
7471 
7472  VariableProxy* arg_two = args->at(1)->AsVariableProxy();
7473  if (arg_two == NULL || !arg_two->var()->IsStackAllocated()) return false;
7474  HValue* arg_two_value = environment()->Lookup(arg_two->var());
7475  if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false;
7476 
7477  // Found pattern f.apply(receiver, arguments).
7478  VisitForValue(prop->obj());
7479  if (HasStackOverflow() || current_block() == NULL) return true;
7480  HValue* function = Top();
7481  AddCheckConstantFunction(expr->holder(), function, function_map, true);
7482  Drop(1);
7483 
7484  VisitForValue(args->at(0));
7485  if (HasStackOverflow() || current_block() == NULL) return true;
7486  HValue* receiver = Pop();
7487 
7488  if (function_state()->outer() == NULL) {
7489  HInstruction* elements = AddInstruction(
7490  new(zone()) HArgumentsElements(false));
7491  HInstruction* length =
7492  AddInstruction(new(zone()) HArgumentsLength(elements));
7493  HValue* wrapped_receiver =
7494  AddInstruction(new(zone()) HWrapReceiver(receiver, function));
7495  HInstruction* result =
7496  new(zone()) HApplyArguments(function,
7497  wrapped_receiver,
7498  length,
7499  elements);
7500  result->set_position(expr->position());
7501  ast_context()->ReturnInstruction(result, expr->id());
7502  return true;
7503  } else {
7504  // We are inside inlined function and we know exactly what is inside
7505  // arguments object.
7506  HValue* context = environment()->LookupContext();
7507 
7508  HValue* wrapped_receiver =
7509  AddInstruction(new(zone()) HWrapReceiver(receiver, function));
7510  PushAndAdd(new(zone()) HPushArgument(wrapped_receiver));
7511 
7512  HEnvironment* arguments_env = environment()->arguments_environment();
7513 
7514  int parameter_count = arguments_env->parameter_count();
7515  for (int i = 1; i < arguments_env->parameter_count(); i++) {
7516  PushAndAdd(new(zone()) HPushArgument(arguments_env->Lookup(i)));
7517  }
7518 
7519  HInvokeFunction* call = new(zone()) HInvokeFunction(
7520  context,
7521  function,
7522  parameter_count);
7523  Drop(parameter_count);
7524  call->set_position(expr->position());
7525  ast_context()->ReturnInstruction(call, expr->id());
7526  return true;
7527  }
7528 }
7529 
7530 
7531 void HGraphBuilder::VisitCall(Call* expr) {
7532  ASSERT(!HasStackOverflow());
7533  ASSERT(current_block() != NULL);
7534  ASSERT(current_block()->HasPredecessor());
7535  Expression* callee = expr->expression();
7536  int argument_count = expr->arguments()->length() + 1; // Plus receiver.
7537  HInstruction* call = NULL;
7538 
7539  Property* prop = callee->AsProperty();
7540  if (prop != NULL) {
7541  if (!prop->key()->IsPropertyName()) {
7542  // Keyed function call.
7543  CHECK_ALIVE(VisitArgument(prop->obj()));
7544 
7545  CHECK_ALIVE(VisitForValue(prop->key()));
7546  // Push receiver and key like the non-optimized code generator expects it.
7547  HValue* key = Pop();
7548  HValue* receiver = Pop();
7549  Push(key);
7550  Push(receiver);
7551 
7552  CHECK_ALIVE(VisitArgumentList(expr->arguments()));
7553 
7554  HValue* context = environment()->LookupContext();
7555  call = new(zone()) HCallKeyed(context, key, argument_count);
7556  call->set_position(expr->position());
7557  Drop(argument_count + 1); // 1 is the key.
7558  return ast_context()->ReturnInstruction(call, expr->id());
7559  }
7560 
7561  // Named function call.
7562  expr->RecordTypeFeedback(oracle(), CALL_AS_METHOD);
7563 
7564  if (TryCallApply(expr)) return;
7565 
7566  CHECK_ALIVE(VisitForValue(prop->obj()));
7567  CHECK_ALIVE(VisitExpressions(expr->arguments()));
7568 
7569  Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
7570 
7571  SmallMapList* types = expr->GetReceiverTypes();
7572 
7573  HValue* receiver =
7574  environment()->ExpressionStackAt(expr->arguments()->length());
7575  if (expr->IsMonomorphic()) {
7576  Handle<Map> receiver_map = (types == NULL || types->is_empty())
7577  ? Handle<Map>::null()
7578  : types->first();
7579  if (TryInlineBuiltinMethodCall(expr,
7580  receiver,
7581  receiver_map,
7582  expr->check_type())) {
7583  if (FLAG_trace_inlining) {
7584  PrintF("Inlining builtin ");
7585  expr->target()->ShortPrint();
7586  PrintF("\n");
7587  }
7588  return;
7589  }
7590 
7591  if (CallStubCompiler::HasCustomCallGenerator(expr->target()) ||
7592  expr->check_type() != RECEIVER_MAP_CHECK) {
7593  // When the target has a custom call IC generator, use the IC,
7594  // because it is likely to generate better code. Also use the IC
7595  // when a primitive receiver check is required.
7596  HValue* context = environment()->LookupContext();
7597  call = PreProcessCall(
7598  new(zone()) HCallNamed(context, name, argument_count));
7599  } else {
7600  AddCheckConstantFunction(expr->holder(), receiver, receiver_map, true);
7601 
7602  if (TryInlineCall(expr)) return;
7603  call = PreProcessCall(
7604  new(zone()) HCallConstantFunction(expr->target(),
7605  argument_count));
7606  }
7607  } else if (types != NULL && types->length() > 1) {
7608  ASSERT(expr->check_type() == RECEIVER_MAP_CHECK);
7609  HandlePolymorphicCallNamed(expr, receiver, types, name);
7610  return;
7611 
7612  } else {
7613  HValue* context = environment()->LookupContext();
7614  call = PreProcessCall(
7615  new(zone()) HCallNamed(context, name, argument_count));
7616  }
7617 
7618  } else {
7619  expr->RecordTypeFeedback(oracle(), CALL_AS_FUNCTION);
7620  VariableProxy* proxy = expr->expression()->AsVariableProxy();
7621  bool global_call = proxy != NULL && proxy->var()->IsUnallocated();
7622 
7623  if (proxy != NULL && proxy->var()->is_possibly_eval()) {
7624  return Bailout("possible direct call to eval");
7625  }
7626 
7627  if (global_call) {
7628  Variable* var = proxy->var();
7629  bool known_global_function = false;
7630  // If there is a global property cell for the name at compile time and
7631  // access check is not enabled we assume that the function will not change
7632  // and generate optimized code for calling the function.
7633  LookupResult lookup(isolate());
7634  GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, false);
7635  if (type == kUseCell &&
7636  !info()->global_object()->IsAccessCheckNeeded()) {
7637  Handle<GlobalObject> global(info()->global_object());
7638  known_global_function = expr->ComputeGlobalTarget(global, &lookup);
7639  }
7640  if (known_global_function) {
7641  // Push the global object instead of the global receiver because
7642  // code generated by the full code generator expects it.
7643  HValue* context = environment()->LookupContext();
7644  HGlobalObject* global_object = new(zone()) HGlobalObject(context);
7645  PushAndAdd(global_object);
7646  CHECK_ALIVE(VisitExpressions(expr->arguments()));
7647 
7648  CHECK_ALIVE(VisitForValue(expr->expression()));
7649  HValue* function = Pop();
7650  AddInstruction(new(zone()) HCheckFunction(function, expr->target()));
7651 
7652  // Replace the global object with the global receiver.
7653  HGlobalReceiver* global_receiver =
7654  new(zone()) HGlobalReceiver(global_object);
7655  // Index of the receiver from the top of the expression stack.
7656  const int receiver_index = argument_count - 1;
7657  AddInstruction(global_receiver);
7658  ASSERT(environment()->ExpressionStackAt(receiver_index)->
7659  IsGlobalObject());
7660  environment()->SetExpressionStackAt(receiver_index, global_receiver);
7661 
7662  if (TryInlineBuiltinFunctionCall(expr, false)) { // Nothing to drop.
7663  if (FLAG_trace_inlining) {
7664  PrintF("Inlining builtin ");
7665  expr->target()->ShortPrint();
7666  PrintF("\n");
7667  }
7668  return;
7669  }
7670  if (TryInlineCall(expr)) return;
7671 
7672  if (expr->target().is_identical_to(info()->closure())) {
7673  graph()->MarkRecursive();
7674  }
7675 
7676  call = PreProcessCall(new(zone()) HCallKnownGlobal(expr->target(),
7677  argument_count));
7678  } else {
7679  HValue* context = environment()->LookupContext();
7680  HGlobalObject* receiver = new(zone()) HGlobalObject(context);
7681  AddInstruction(receiver);
7682  PushAndAdd(new(zone()) HPushArgument(receiver));
7683  CHECK_ALIVE(VisitArgumentList(expr->arguments()));
7684 
7685  call = new(zone()) HCallGlobal(context, var->name(), argument_count);
7686  Drop(argument_count);
7687  }
7688 
7689  } else if (expr->IsMonomorphic()) {
7690  // The function is on the stack in the unoptimized code during
7691  // evaluation of the arguments.
7692  CHECK_ALIVE(VisitForValue(expr->expression()));
7693  HValue* function = Top();
7694  HValue* context = environment()->LookupContext();
7695  HGlobalObject* global = new(zone()) HGlobalObject(context);
7696  AddInstruction(global);
7697  HGlobalReceiver* receiver = new(zone()) HGlobalReceiver(global);
7698  PushAndAdd(receiver);
7699  CHECK_ALIVE(VisitExpressions(expr->arguments()));
7700  AddInstruction(new(zone()) HCheckFunction(function, expr->target()));
7701 
7702  if (TryInlineBuiltinFunctionCall(expr, true)) { // Drop the function.
7703  if (FLAG_trace_inlining) {
7704  PrintF("Inlining builtin ");
7705  expr->target()->ShortPrint();
7706  PrintF("\n");
7707  }
7708  return;
7709  }
7710 
7711  if (TryInlineCall(expr, true)) { // Drop function from environment.
7712  return;
7713  } else {
7714  call = PreProcessCall(
7715  new(zone()) HInvokeFunction(context,
7716  function,
7717  expr->target(),
7718  argument_count));
7719  Drop(1); // The function.
7720  }
7721 
7722  } else {
7723  CHECK_ALIVE(VisitForValue(expr->expression()));
7724  HValue* function = Top();
7725  HValue* context = environment()->LookupContext();
7726  HGlobalObject* global_object = new(zone()) HGlobalObject(context);
7727  AddInstruction(global_object);
7728  HGlobalReceiver* receiver = new(zone()) HGlobalReceiver(global_object);
7729  AddInstruction(receiver);
7730  PushAndAdd(new(zone()) HPushArgument(receiver));
7731  CHECK_ALIVE(VisitArgumentList(expr->arguments()));
7732 
7733  call = new(zone()) HCallFunction(context, function, argument_count);
7734  Drop(argument_count + 1);
7735  }
7736  }
7737 
7738  call->set_position(expr->position());
7739  return ast_context()->ReturnInstruction(call, expr->id());
7740 }
7741 
7742 
7743 // Checks whether allocation using the given constructor can be inlined.
7744 static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
7745  return constructor->has_initial_map() &&
7746  constructor->initial_map()->instance_type() == JS_OBJECT_TYPE &&
7747  constructor->initial_map()->instance_size() < HAllocateObject::kMaxSize;
7748 }
7749 
7750 
7751 void HGraphBuilder::VisitCallNew(CallNew* expr) {
7752  ASSERT(!HasStackOverflow());
7753  ASSERT(current_block() != NULL);
7754  ASSERT(current_block()->HasPredecessor());
7755  expr->RecordTypeFeedback(oracle());
7756  int argument_count = expr->arguments()->length() + 1; // Plus constructor.
7757  HValue* context = environment()->LookupContext();
7758 
7759  if (FLAG_inline_construct &&
7760  expr->IsMonomorphic() &&
7761  IsAllocationInlineable(expr->target())) {
7762  // The constructor function is on the stack in the unoptimized code
7763  // during evaluation of the arguments.
7764  CHECK_ALIVE(VisitForValue(expr->expression()));
7765  HValue* function = Top();
7766  CHECK_ALIVE(VisitExpressions(expr->arguments()));
7767  Handle<JSFunction> constructor = expr->target();
7768  HValue* check = AddInstruction(
7769  new(zone()) HCheckFunction(function, constructor));
7770 
7771  // Force completion of inobject slack tracking before generating
7772  // allocation code to finalize instance size.
7773  if (constructor->shared()->IsInobjectSlackTrackingInProgress()) {
7774  constructor->shared()->CompleteInobjectSlackTracking();
7775  }
7776 
7777  // Replace the constructor function with a newly allocated receiver.
7778  HInstruction* receiver = new(zone()) HAllocateObject(context, constructor);
7779  // Index of the receiver from the top of the expression stack.
7780  const int receiver_index = argument_count - 1;
7781  AddInstruction(receiver);
7782  ASSERT(environment()->ExpressionStackAt(receiver_index) == function);
7783  environment()->SetExpressionStackAt(receiver_index, receiver);
7784 
7785  if (TryInlineConstruct(expr, receiver)) return;
7786 
7787  // TODO(mstarzinger): For now we remove the previous HAllocateObject and
7788  // add HPushArgument for the arguments in case inlining failed. What we
7789  // actually should do is emit HInvokeFunction on the constructor instead
7790  // of using HCallNew as a fallback.
7791  receiver->DeleteAndReplaceWith(NULL);
7792  check->DeleteAndReplaceWith(NULL);
7793  environment()->SetExpressionStackAt(receiver_index, function);
7794  HInstruction* call = PreProcessCall(
7795  new(zone()) HCallNew(context, function, argument_count));
7796  call->set_position(expr->position());
7797  return ast_context()->ReturnInstruction(call, expr->id());
7798  } else {
7799  // The constructor function is both an operand to the instruction and an
7800  // argument to the construct call.
7801  CHECK_ALIVE(VisitArgument(expr->expression()));
7802  HValue* constructor = HPushArgument::cast(Top())->argument();
7803  CHECK_ALIVE(VisitArgumentList(expr->arguments()));
7804  HInstruction* call =
7805  new(zone()) HCallNew(context, constructor, argument_count);
7806  Drop(argument_count);
7807  call->set_position(expr->position());
7808  return ast_context()->ReturnInstruction(call, expr->id());
7809  }
7810 }
7811 
7812 
7813 // Support for generating inlined runtime functions.
7814 
7815 // Lookup table for generators for runtime calls that are generated inline.
7816 // Elements of the table are member pointers to functions of HGraphBuilder.
7817 #define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize) \
7818  &HGraphBuilder::Generate##Name,
7819 
7820 const HGraphBuilder::InlineFunctionGenerator
7821  HGraphBuilder::kInlineFunctionGenerators[] = {
7824 };
7825 #undef INLINE_FUNCTION_GENERATOR_ADDRESS
7826 
7827 
7828 void HGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
7829  ASSERT(!HasStackOverflow());
7830  ASSERT(current_block() != NULL);
7831  ASSERT(current_block()->HasPredecessor());
7832  if (expr->is_jsruntime()) {
7833  return Bailout("call to a JavaScript runtime function");
7834  }
7835 
7836  const Runtime::Function* function = expr->function();
7837  ASSERT(function != NULL);
7838  if (function->intrinsic_type == Runtime::INLINE) {
7839  ASSERT(expr->name()->length() > 0);
7840  ASSERT(expr->name()->Get(0) == '_');
7841  // Call to an inline function.
7842  int lookup_index = static_cast<int>(function->function_id) -
7843  static_cast<int>(Runtime::kFirstInlineFunction);
7844  ASSERT(lookup_index >= 0);
7845  ASSERT(static_cast<size_t>(lookup_index) <
7846  ARRAY_SIZE(kInlineFunctionGenerators));
7847  InlineFunctionGenerator generator = kInlineFunctionGenerators[lookup_index];
7848 
7849  // Call the inline code generator using the pointer-to-member.
7850  (this->*generator)(expr);
7851  } else {
7852  ASSERT(function->intrinsic_type == Runtime::RUNTIME);
7853  CHECK_ALIVE(VisitArgumentList(expr->arguments()));
7854 
7855  HValue* context = environment()->LookupContext();
7856  Handle<String> name = expr->name();
7857  int argument_count = expr->arguments()->length();
7858  HCallRuntime* call =
7859  new(zone()) HCallRuntime(context, name, function, argument_count);
7860  Drop(argument_count);
7861  return ast_context()->ReturnInstruction(call, expr->id());
7862  }
7863 }
7864 
7865 
7866 void HGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
7867  ASSERT(!HasStackOverflow());
7868  ASSERT(current_block() != NULL);
7869  ASSERT(current_block()->HasPredecessor());
7870  switch (expr->op()) {
7871  case Token::DELETE: return VisitDelete(expr);
7872  case Token::VOID: return VisitVoid(expr);
7873  case Token::TYPEOF: return VisitTypeof(expr);
7874  case Token::ADD: return VisitAdd(expr);
7875  case Token::SUB: return VisitSub(expr);
7876  case Token::BIT_NOT: return VisitBitNot(expr);
7877  case Token::NOT: return VisitNot(expr);
7878  default: UNREACHABLE();
7879  }
7880 }
7881 
7882 void HGraphBuilder::VisitDelete(UnaryOperation* expr) {
7883  Property* prop = expr->expression()->AsProperty();
7884  VariableProxy* proxy = expr->expression()->AsVariableProxy();
7885  if (prop != NULL) {
7886  CHECK_ALIVE(VisitForValue(prop->obj()));
7887  CHECK_ALIVE(VisitForValue(prop->key()));
7888  HValue* key = Pop();
7889  HValue* obj = Pop();
7890  HValue* context = environment()->LookupContext();
7891  HDeleteProperty* instr = new(zone()) HDeleteProperty(context, obj, key);
7892  return ast_context()->ReturnInstruction(instr, expr->id());
7893  } else if (proxy != NULL) {
7894  Variable* var = proxy->var();
7895  if (var->IsUnallocated()) {
7896  Bailout("delete with global variable");
7897  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
7898  // Result of deleting non-global variables is false. 'this' is not
7899  // really a variable, though we implement it as one. The
7900  // subexpression does not have side effects.
7901  HValue* value = var->is_this()
7902  ? graph()->GetConstantTrue()
7903  : graph()->GetConstantFalse();
7904  return ast_context()->ReturnValue(value);
7905  } else {
7906  Bailout("delete with non-global variable");
7907  }
7908  } else {
7909  // Result of deleting non-property, non-variable reference is true.
7910  // Evaluate the subexpression for side effects.
7911  CHECK_ALIVE(VisitForEffect(expr->expression()));
7912  return ast_context()->ReturnValue(graph()->GetConstantTrue());
7913  }
7914 }
7915 
7916 
7917 void HGraphBuilder::VisitVoid(UnaryOperation* expr) {
7918  CHECK_ALIVE(VisitForEffect(expr->expression()));
7919  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
7920 }
7921 
7922 
7923 void HGraphBuilder::VisitTypeof(UnaryOperation* expr) {
7924  CHECK_ALIVE(VisitForTypeOf(expr->expression()));
7925  HValue* value = Pop();
7926  HValue* context = environment()->LookupContext();
7927  HInstruction* instr = new(zone()) HTypeof(context, value);
7928  return ast_context()->ReturnInstruction(instr, expr->id());
7929 }
7930 
7931 
7932 void HGraphBuilder::VisitAdd(UnaryOperation* expr) {
7933  CHECK_ALIVE(VisitForValue(expr->expression()));
7934  HValue* value = Pop();
7935  HValue* context = environment()->LookupContext();
7936  HInstruction* instr =
7937  new(zone()) HMul(context, value, graph_->GetConstant1());
7938  return ast_context()->ReturnInstruction(instr, expr->id());
7939 }
7940 
7941 
7942 void HGraphBuilder::VisitSub(UnaryOperation* expr) {
7943  CHECK_ALIVE(VisitForValue(expr->expression()));
7944  HValue* value = Pop();
7945  HValue* context = environment()->LookupContext();
7946  HInstruction* instr =
7947  new(zone()) HMul(context, value, graph_->GetConstantMinus1());
7948  TypeInfo info = oracle()->UnaryType(expr);
7949  if (info.IsUninitialized()) {
7950  AddInstruction(new(zone()) HSoftDeoptimize);
7952  info = TypeInfo::Unknown();
7953  }
7954  Representation rep = ToRepresentation(info);
7955  TraceRepresentation(expr->op(), info, instr, rep);
7956  instr->AssumeRepresentation(rep);
7957  return ast_context()->ReturnInstruction(instr, expr->id());
7958 }
7959 
7960 
7961 void HGraphBuilder::VisitBitNot(UnaryOperation* expr) {
7962  CHECK_ALIVE(VisitForValue(expr->expression()));
7963  HValue* value = Pop();
7964  TypeInfo info = oracle()->UnaryType(expr);
7965  if (info.IsUninitialized()) {
7966  AddInstruction(new(zone()) HSoftDeoptimize);
7968  }
7969  HInstruction* instr = new(zone()) HBitNot(value);
7970  return ast_context()->ReturnInstruction(instr, expr->id());
7971 }
7972 
7973 
7974 void HGraphBuilder::VisitNot(UnaryOperation* expr) {
7975  if (ast_context()->IsTest()) {
7976  TestContext* context = TestContext::cast(ast_context());
7977  VisitForControl(expr->expression(),
7978  context->if_false(),
7979  context->if_true());
7980  return;
7981  }
7982 
7983  if (ast_context()->IsEffect()) {
7984  VisitForEffect(expr->expression());
7985  return;
7986  }
7987 
7988  ASSERT(ast_context()->IsValue());
7989  HBasicBlock* materialize_false = graph()->CreateBasicBlock();
7990  HBasicBlock* materialize_true = graph()->CreateBasicBlock();
7991  CHECK_BAILOUT(VisitForControl(expr->expression(),
7992  materialize_false,
7993  materialize_true));
7994 
7995  if (materialize_false->HasPredecessor()) {
7996  materialize_false->SetJoinId(expr->MaterializeFalseId());
7997  set_current_block(materialize_false);
7998  Push(graph()->GetConstantFalse());
7999  } else {
8000  materialize_false = NULL;
8001  }
8002 
8003  if (materialize_true->HasPredecessor()) {
8004  materialize_true->SetJoinId(expr->MaterializeTrueId());
8005  set_current_block(materialize_true);
8006  Push(graph()->GetConstantTrue());
8007  } else {
8008  materialize_true = NULL;
8009  }
8010 
8011  HBasicBlock* join =
8012  CreateJoin(materialize_false, materialize_true, expr->id());
8013  set_current_block(join);
8014  if (join != NULL) return ast_context()->ReturnValue(Pop());
8015 }
8016 
8017 
8018 HInstruction* HGraphBuilder::BuildIncrement(bool returns_original_input,
8019  CountOperation* expr) {
8020  // The input to the count operation is on top of the expression stack.
8021  TypeInfo info = oracle()->IncrementType(expr);
8022  Representation rep = ToRepresentation(info);
8023  if (rep.IsTagged()) {
8024  rep = Representation::Integer32();
8025  }
8026 
8027  if (returns_original_input) {
8028  // We need an explicit HValue representing ToNumber(input). The
8029  // actual HChange instruction we need is (sometimes) added in a later
8030  // phase, so it is not available now to be used as an input to HAdd and
8031  // as the return value.
8032  HInstruction* number_input = new(zone()) HForceRepresentation(Pop(), rep);
8033  AddInstruction(number_input);
8034  Push(number_input);
8035  }
8036 
8037  // The addition has no side effects, so we do not need
8038  // to simulate the expression stack after this instruction.
8039  // Any later failures deopt to the load of the input or earlier.
8040  HConstant* delta = (expr->op() == Token::INC)
8041  ? graph_->GetConstant1()
8042  : graph_->GetConstantMinus1();
8043  HValue* context = environment()->LookupContext();
8044  HInstruction* instr = new(zone()) HAdd(context, Top(), delta);
8045  TraceRepresentation(expr->op(), info, instr, rep);
8046  instr->AssumeRepresentation(rep);
8047  AddInstruction(instr);
8048  return instr;
8049 }
8050 
8051 
8052 void HGraphBuilder::VisitCountOperation(CountOperation* expr) {
8053  ASSERT(!HasStackOverflow());
8054  ASSERT(current_block() != NULL);
8055  ASSERT(current_block()->HasPredecessor());
8056  Expression* target = expr->expression();
8057  VariableProxy* proxy = target->AsVariableProxy();
8058  Property* prop = target->AsProperty();
8059  if (proxy == NULL && prop == NULL) {
8060  return Bailout("invalid lhs in count operation");
8061  }
8062 
8063  // Match the full code generator stack by simulating an extra stack
8064  // element for postfix operations in a non-effect context. The return
8065  // value is ToNumber(input).
8066  bool returns_original_input =
8067  expr->is_postfix() && !ast_context()->IsEffect();
8068  HValue* input = NULL; // ToNumber(original_input).
8069  HValue* after = NULL; // The result after incrementing or decrementing.
8070 
8071  if (proxy != NULL) {
8072  Variable* var = proxy->var();
8073  if (var->mode() == CONST) {
8074  return Bailout("unsupported count operation with const");
8075  }
8076  // Argument of the count operation is a variable, not a property.
8077  ASSERT(prop == NULL);
8078  CHECK_ALIVE(VisitForValue(target));
8079 
8080  after = BuildIncrement(returns_original_input, expr);
8081  input = returns_original_input ? Top() : Pop();
8082  Push(after);
8083 
8084  switch (var->location()) {
8085  case Variable::UNALLOCATED:
8086  HandleGlobalVariableAssignment(var,
8087  after,
8088  expr->position(),
8089  expr->AssignmentId());
8090  break;
8091 
8092  case Variable::PARAMETER:
8093  case Variable::LOCAL:
8094  Bind(var, after);
8095  break;
8096 
8097  case Variable::CONTEXT: {
8098  // Bail out if we try to mutate a parameter value in a function
8099  // using the arguments object. We do not (yet) correctly handle the
8100  // arguments property of the function.
8101  if (info()->scope()->arguments() != NULL) {
8102  // Parameters will rewrite to context slots. We have no direct
8103  // way to detect that the variable is a parameter so we use a
8104  // linear search of the parameter list.
8105  int count = info()->scope()->num_parameters();
8106  for (int i = 0; i < count; ++i) {
8107  if (var == info()->scope()->parameter(i)) {
8108  return Bailout("assignment to parameter in arguments object");
8109  }
8110  }
8111  }
8112 
8113  HValue* context = BuildContextChainWalk(var);
8116  HStoreContextSlot* instr =
8117  new(zone()) HStoreContextSlot(context, var->index(), mode, after);
8118  AddInstruction(instr);
8119  if (instr->HasObservableSideEffects()) {
8120  AddSimulate(expr->AssignmentId());
8121  }
8122  break;
8123  }
8124 
8125  case Variable::LOOKUP:
8126  return Bailout("lookup variable in count operation");
8127  }
8128 
8129  } else {
8130  // Argument of the count operation is a property.
8131  ASSERT(prop != NULL);
8132  prop->RecordTypeFeedback(oracle(), zone());
8133 
8134  if (prop->key()->IsPropertyName()) {
8135  // Named property.
8136  if (returns_original_input) Push(graph_->GetConstantUndefined());
8137 
8138  CHECK_ALIVE(VisitForValue(prop->obj()));
8139  HValue* object = Top();
8140 
8141  Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
8142  Handle<Map> map;
8143  HInstruction* load;
8144  bool monomorphic = prop->IsMonomorphic();
8145  if (monomorphic) {
8146  map = prop->GetReceiverTypes()->first();
8147  if (map->is_dictionary_map()) monomorphic = false;
8148  }
8149  if (monomorphic) {
8150  Handle<JSFunction> getter;
8151  Handle<JSObject> holder;
8152  if (LookupGetter(map, name, &getter, &holder)) {
8153  load = BuildCallGetter(object, map, getter, holder);
8154  } else {
8155  load = BuildLoadNamedMonomorphic(object, name, prop, map);
8156  }
8157  } else {
8158  load = BuildLoadNamedGeneric(object, name, prop);
8159  }
8160  PushAndAdd(load);
8161  if (load->HasObservableSideEffects()) AddSimulate(prop->LoadId());
8162 
8163  after = BuildIncrement(returns_original_input, expr);
8164  input = Pop();
8165 
8166  HInstruction* store;
8167  if (!monomorphic) {
8168  // If we don't know the monomorphic type, do a generic store.
8169  CHECK_ALIVE(store = BuildStoreNamedGeneric(object, name, after));
8170  } else {
8171  Handle<JSFunction> setter;
8172  Handle<JSObject> holder;
8173  if (LookupSetter(map, name, &setter, &holder)) {
8174  store = BuildCallSetter(object, after, map, setter, holder);
8175  } else {
8176  CHECK_ALIVE(store = BuildStoreNamedMonomorphic(object,
8177  name,
8178  after,
8179  map));
8180  }
8181  }
8182  AddInstruction(store);
8183 
8184  // Overwrite the receiver in the bailout environment with the result
8185  // of the operation, and the placeholder with the original value if
8186  // necessary.
8187  environment()->SetExpressionStackAt(0, after);
8188  if (returns_original_input) environment()->SetExpressionStackAt(1, input);
8189  if (store->HasObservableSideEffects()) AddSimulate(expr->AssignmentId());
8190 
8191  } else {
8192  // Keyed property.
8193  if (returns_original_input) Push(graph_->GetConstantUndefined());
8194 
8195  CHECK_ALIVE(VisitForValue(prop->obj()));
8196  CHECK_ALIVE(VisitForValue(prop->key()));
8197  HValue* obj = environment()->ExpressionStackAt(1);
8198  HValue* key = environment()->ExpressionStackAt(0);
8199 
8200  bool has_side_effects = false;
8201  HValue* load = HandleKeyedElementAccess(
8202  obj, key, NULL, prop, prop->LoadId(), RelocInfo::kNoPosition,
8203  false, // is_store
8204  &has_side_effects);
8205  Push(load);
8206  if (has_side_effects) AddSimulate(prop->LoadId());
8207 
8208  after = BuildIncrement(returns_original_input, expr);
8209  input = Pop();
8210 
8211  expr->RecordTypeFeedback(oracle(), zone());
8212  HandleKeyedElementAccess(obj, key, after, expr, expr->AssignmentId(),
8213  RelocInfo::kNoPosition,
8214  true, // is_store
8215  &has_side_effects);
8216 
8217  // Drop the key from the bailout environment. Overwrite the receiver
8218  // with the result of the operation, and the placeholder with the
8219  // original value if necessary.
8220  Drop(1);
8221  environment()->SetExpressionStackAt(0, after);
8222  if (returns_original_input) environment()->SetExpressionStackAt(1, input);
8223  ASSERT(has_side_effects); // Stores always have side effects.
8224  AddSimulate(expr->AssignmentId());
8225  }
8226  }
8227 
8228  Drop(returns_original_input ? 2 : 1);
8229  return ast_context()->ReturnValue(expr->is_postfix() ? input : after);
8230 }
8231 
8232 
8233 HStringCharCodeAt* HGraphBuilder::BuildStringCharCodeAt(HValue* context,
8234  HValue* string,
8235  HValue* index) {
8236  AddInstruction(new(zone()) HCheckNonSmi(string));
8238  HStringLength* length = new(zone()) HStringLength(string);
8239  AddInstruction(length);
8240  HInstruction* checked_index =
8241  AddInstruction(new(zone()) HBoundsCheck(index, length));
8242  return new(zone()) HStringCharCodeAt(context, string, checked_index);
8243 }
8244 
8245 
8246 HInstruction* HGraphBuilder::BuildBinaryOperation(BinaryOperation* expr,
8247  HValue* left,
8248  HValue* right) {
8249  HValue* context = environment()->LookupContext();
8250  TypeInfo info = oracle()->BinaryType(expr);
8251  if (info.IsUninitialized()) {
8252  AddInstruction(new(zone()) HSoftDeoptimize);
8254  info = TypeInfo::Unknown();
8255  }
8256  HInstruction* instr = NULL;
8257  switch (expr->op()) {
8258  case Token::ADD:
8259  if (info.IsString()) {
8260  AddInstruction(new(zone()) HCheckNonSmi(left));
8262  AddInstruction(new(zone()) HCheckNonSmi(right));
8264  instr = new(zone()) HStringAdd(context, left, right);
8265  } else {
8266  instr = HAdd::NewHAdd(zone(), context, left, right);
8267  }
8268  break;
8269  case Token::SUB:
8270  instr = HSub::NewHSub(zone(), context, left, right);
8271  break;
8272  case Token::MUL:
8273  instr = HMul::NewHMul(zone(), context, left, right);
8274  break;
8275  case Token::MOD:
8276  instr = HMod::NewHMod(zone(), context, left, right);
8277  break;
8278  case Token::DIV:
8279  instr = HDiv::NewHDiv(zone(), context, left, right);
8280  break;
8281  case Token::BIT_XOR:
8282  case Token::BIT_AND:
8283  case Token::BIT_OR:
8284  instr = HBitwise::NewHBitwise(zone(), expr->op(), context, left, right);
8285  break;
8286  case Token::SAR:
8287  instr = HSar::NewHSar(zone(), context, left, right);
8288  break;
8289  case Token::SHR:
8290  instr = HShr::NewHShr(zone(), context, left, right);
8291  if (FLAG_opt_safe_uint32_operations && instr->IsShr()) {
8292  bool can_be_shift_by_zero = true;
8293  if (right->IsConstant()) {
8294  HConstant* right_const = HConstant::cast(right);
8295  if (right_const->HasInteger32Value() &&
8296  (right_const->Integer32Value() & 0x1f) != 0) {
8297  can_be_shift_by_zero = false;
8298  }
8299  }
8300 
8301  if (can_be_shift_by_zero) graph()->RecordUint32Instruction(instr);
8302  }
8303  break;
8304  case Token::SHL:
8305  instr = HShl::NewHShl(zone(), context, left, right);
8306  break;
8307  default:
8308  UNREACHABLE();
8309  }
8310 
8311  // If we hit an uninitialized binary op stub we will get type info
8312  // for a smi operation. If one of the operands is a constant string
8313  // do not generate code assuming it is a smi operation.
8314  if (info.IsSmi() &&
8315  ((left->IsConstant() && HConstant::cast(left)->handle()->IsString()) ||
8316  (right->IsConstant() && HConstant::cast(right)->handle()->IsString()))) {
8317  return instr;
8318  }
8319  Representation rep = ToRepresentation(info);
8320  // We only generate either int32 or generic tagged bitwise operations.
8321  if (instr->IsBitwiseBinaryOperation()) {
8323  InitializeObservedInputRepresentation(rep);
8324  if (rep.IsDouble()) rep = Representation::Integer32();
8325  }
8326  TraceRepresentation(expr->op(), info, instr, rep);
8327  instr->AssumeRepresentation(rep);
8328  return instr;
8329 }
8330 
8331 
8332 // Check for the form (%_ClassOf(foo) === 'BarClass').
8333 static bool IsClassOfTest(CompareOperation* expr) {
8334  if (expr->op() != Token::EQ_STRICT) return false;
8335  CallRuntime* call = expr->left()->AsCallRuntime();
8336  if (call == NULL) return false;
8337  Literal* literal = expr->right()->AsLiteral();
8338  if (literal == NULL) return false;
8339  if (!literal->handle()->IsString()) return false;
8340  if (!call->name()->IsEqualTo(CStrVector("_ClassOf"))) return false;
8341  ASSERT(call->arguments()->length() == 1);
8342  return true;
8343 }
8344 
8345 
8346 void HGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) {
8347  ASSERT(!HasStackOverflow());
8348  ASSERT(current_block() != NULL);
8349  ASSERT(current_block()->HasPredecessor());
8350  switch (expr->op()) {
8351  case Token::COMMA:
8352  return VisitComma(expr);
8353  case Token::OR:
8354  case Token::AND:
8355  return VisitLogicalExpression(expr);
8356  default:
8357  return VisitArithmeticExpression(expr);
8358  }
8359 }
8360 
8361 
8362 void HGraphBuilder::VisitComma(BinaryOperation* expr) {
8363  CHECK_ALIVE(VisitForEffect(expr->left()));
8364  // Visit the right subexpression in the same AST context as the entire
8365  // expression.
8366  Visit(expr->right());
8367 }
8368 
8369 
8370 void HGraphBuilder::VisitLogicalExpression(BinaryOperation* expr) {
8371  bool is_logical_and = expr->op() == Token::AND;
8372  if (ast_context()->IsTest()) {
8373  TestContext* context = TestContext::cast(ast_context());
8374  // Translate left subexpression.
8375  HBasicBlock* eval_right = graph()->CreateBasicBlock();
8376  if (is_logical_and) {
8377  CHECK_BAILOUT(VisitForControl(expr->left(),
8378  eval_right,
8379  context->if_false()));
8380  } else {
8381  CHECK_BAILOUT(VisitForControl(expr->left(),
8382  context->if_true(),
8383  eval_right));
8384  }
8385 
8386  // Translate right subexpression by visiting it in the same AST
8387  // context as the entire expression.
8388  if (eval_right->HasPredecessor()) {
8389  eval_right->SetJoinId(expr->RightId());
8390  set_current_block(eval_right);
8391  Visit(expr->right());
8392  }
8393 
8394  } else if (ast_context()->IsValue()) {
8395  CHECK_ALIVE(VisitForValue(expr->left()));
8396  ASSERT(current_block() != NULL);
8397 
8398  // We need an extra block to maintain edge-split form.
8399  HBasicBlock* empty_block = graph()->CreateBasicBlock();
8400  HBasicBlock* eval_right = graph()->CreateBasicBlock();
8401  TypeFeedbackId test_id = expr->left()->test_id();
8402  ToBooleanStub::Types expected(oracle()->ToBooleanTypes(test_id));
8403  HBranch* test = is_logical_and
8404  ? new(zone()) HBranch(Top(), eval_right, empty_block, expected)
8405  : new(zone()) HBranch(Top(), empty_block, eval_right, expected);
8406  current_block()->Finish(test);
8407 
8408  set_current_block(eval_right);
8409  Drop(1); // Value of the left subexpression.
8410  CHECK_BAILOUT(VisitForValue(expr->right()));
8411 
8412  HBasicBlock* join_block =
8413  CreateJoin(empty_block, current_block(), expr->id());
8414  set_current_block(join_block);
8415  return ast_context()->ReturnValue(Pop());
8416 
8417  } else {
8418  ASSERT(ast_context()->IsEffect());
8419  // In an effect context, we don't need the value of the left subexpression,
8420  // only its control flow and side effects. We need an extra block to
8421  // maintain edge-split form.
8422  HBasicBlock* empty_block = graph()->CreateBasicBlock();
8423  HBasicBlock* right_block = graph()->CreateBasicBlock();
8424  if (is_logical_and) {
8425  CHECK_BAILOUT(VisitForControl(expr->left(), right_block, empty_block));
8426  } else {
8427  CHECK_BAILOUT(VisitForControl(expr->left(), empty_block, right_block));
8428  }
8429 
8430  // TODO(kmillikin): Find a way to fix this. It's ugly that there are
8431  // actually two empty blocks (one here and one inserted by
8432  // TestContext::BuildBranch, and that they both have an HSimulate though the
8433  // second one is not a merge node, and that we really have no good AST ID to
8434  // put on that first HSimulate.
8435 
8436  if (empty_block->HasPredecessor()) {
8437  empty_block->SetJoinId(expr->id());
8438  } else {
8439  empty_block = NULL;
8440  }
8441 
8442  if (right_block->HasPredecessor()) {
8443  right_block->SetJoinId(expr->RightId());
8444  set_current_block(right_block);
8445  CHECK_BAILOUT(VisitForEffect(expr->right()));
8446  right_block = current_block();
8447  } else {
8448  right_block = NULL;
8449  }
8450 
8451  HBasicBlock* join_block =
8452  CreateJoin(empty_block, right_block, expr->id());
8453  set_current_block(join_block);
8454  // We did not materialize any value in the predecessor environments,
8455  // so there is no need to handle it here.
8456  }
8457 }
8458 
8459 
8460 void HGraphBuilder::VisitArithmeticExpression(BinaryOperation* expr) {
8461  CHECK_ALIVE(VisitForValue(expr->left()));
8462  CHECK_ALIVE(VisitForValue(expr->right()));
8463  HValue* right = Pop();
8464  HValue* left = Pop();
8465  HInstruction* instr = BuildBinaryOperation(expr, left, right);
8466  instr->set_position(expr->position());
8467  return ast_context()->ReturnInstruction(instr, expr->id());
8468 }
8469 
8470 
8471 void HGraphBuilder::TraceRepresentation(Token::Value op,
8472  TypeInfo info,
8473  HValue* value,
8474  Representation rep) {
8475  if (!FLAG_trace_representation) return;
8476  // TODO(svenpanne) Under which circumstances are we actually not flexible?
8477  // At first glance, this looks a bit weird...
8478  bool flexible = value->CheckFlag(HValue::kFlexibleRepresentation);
8479  PrintF("Operation %s has type info %s, %schange representation assumption "
8480  "for %s (ID %d) from %s to %s\n",
8481  Token::Name(op),
8482  info.ToString(),
8483  flexible ? "" : " DO NOT ",
8484  value->Mnemonic(),
8485  graph_->GetMaximumValueID(),
8486  value->representation().Mnemonic(),
8487  rep.Mnemonic());
8488 }
8489 
8490 
8491 Representation HGraphBuilder::ToRepresentation(TypeInfo info) {
8492  if (info.IsSmi()) return Representation::Integer32();
8493  if (info.IsInteger32()) return Representation::Integer32();
8494  if (info.IsDouble()) return Representation::Double();
8495  if (info.IsNumber()) return Representation::Double();
8496  return Representation::Tagged();
8497 }
8498 
8499 
8500 void HGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr,
8501  HTypeof* typeof_expr,
8502  Handle<String> check) {
8503  // Note: The HTypeof itself is removed during canonicalization, if possible.
8504  HValue* value = typeof_expr->value();
8505  HTypeofIsAndBranch* instr = new(zone()) HTypeofIsAndBranch(value, check);
8506  instr->set_position(expr->position());
8507  return ast_context()->ReturnControl(instr, expr->id());
8508 }
8509 
8510 
8511 static bool MatchLiteralCompareNil(HValue* left,
8512  Token::Value op,
8513  HValue* right,
8514  Handle<Object> nil,
8515  HValue** expr) {
8516  if (left->IsConstant() &&
8517  HConstant::cast(left)->handle().is_identical_to(nil) &&
8518  Token::IsEqualityOp(op)) {
8519  *expr = right;
8520  return true;
8521  }
8522  return false;
8523 }
8524 
8525 
8526 static bool MatchLiteralCompareTypeof(HValue* left,
8527  Token::Value op,
8528  HValue* right,
8529  HTypeof** typeof_expr,
8530  Handle<String>* check) {
8531  if (left->IsTypeof() &&
8532  Token::IsEqualityOp(op) &&
8533  right->IsConstant() &&
8534  HConstant::cast(right)->handle()->IsString()) {
8535  *typeof_expr = HTypeof::cast(left);
8536  *check = Handle<String>::cast(HConstant::cast(right)->handle());
8537  return true;
8538  }
8539  return false;
8540 }
8541 
8542 
8543 static bool IsLiteralCompareTypeof(HValue* left,
8544  Token::Value op,
8545  HValue* right,
8546  HTypeof** typeof_expr,
8547  Handle<String>* check) {
8548  return MatchLiteralCompareTypeof(left, op, right, typeof_expr, check) ||
8549  MatchLiteralCompareTypeof(right, op, left, typeof_expr, check);
8550 }
8551 
8552 
8553 static bool IsLiteralCompareNil(HValue* left,
8554  Token::Value op,
8555  HValue* right,
8556  Handle<Object> nil,
8557  HValue** expr) {
8558  return MatchLiteralCompareNil(left, op, right, nil, expr) ||
8559  MatchLiteralCompareNil(right, op, left, nil, expr);
8560 }
8561 
8562 
8563 static bool IsLiteralCompareBool(HValue* left,
8564  Token::Value op,
8565  HValue* right) {
8566  return op == Token::EQ_STRICT &&
8567  ((left->IsConstant() && HConstant::cast(left)->handle()->IsBoolean()) ||
8568  (right->IsConstant() && HConstant::cast(right)->handle()->IsBoolean()));
8569 }
8570 
8571 
8572 void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
8573  ASSERT(!HasStackOverflow());
8574  ASSERT(current_block() != NULL);
8575  ASSERT(current_block()->HasPredecessor());
8576  if (IsClassOfTest(expr)) {
8577  CallRuntime* call = expr->left()->AsCallRuntime();
8578  ASSERT(call->arguments()->length() == 1);
8579  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8580  HValue* value = Pop();
8581  Literal* literal = expr->right()->AsLiteral();
8582  Handle<String> rhs = Handle<String>::cast(literal->handle());
8583  HClassOfTestAndBranch* instr =
8584  new(zone()) HClassOfTestAndBranch(value, rhs);
8585  instr->set_position(expr->position());
8586  return ast_context()->ReturnControl(instr, expr->id());
8587  }
8588 
8589  TypeInfo type_info = oracle()->CompareType(expr);
8590  // Check if this expression was ever executed according to type feedback.
8591  // Note that for the special typeof/null/undefined cases we get unknown here.
8592  if (type_info.IsUninitialized()) {
8593  AddInstruction(new(zone()) HSoftDeoptimize);
8595  type_info = TypeInfo::Unknown();
8596  }
8597 
8598  CHECK_ALIVE(VisitForValue(expr->left()));
8599  CHECK_ALIVE(VisitForValue(expr->right()));
8600 
8601  HValue* context = environment()->LookupContext();
8602  HValue* right = Pop();
8603  HValue* left = Pop();
8604  Token::Value op = expr->op();
8605 
8606  HTypeof* typeof_expr = NULL;
8607  Handle<String> check;
8608  if (IsLiteralCompareTypeof(left, op, right, &typeof_expr, &check)) {
8609  return HandleLiteralCompareTypeof(expr, typeof_expr, check);
8610  }
8611  HValue* sub_expr = NULL;
8612  Factory* f = graph()->isolate()->factory();
8613  if (IsLiteralCompareNil(left, op, right, f->undefined_value(), &sub_expr)) {
8614  return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue);
8615  }
8616  if (IsLiteralCompareNil(left, op, right, f->null_value(), &sub_expr)) {
8617  return HandleLiteralCompareNil(expr, sub_expr, kNullValue);
8618  }
8619  if (IsLiteralCompareBool(left, op, right)) {
8620  HCompareObjectEqAndBranch* result =
8621  new(zone()) HCompareObjectEqAndBranch(left, right);
8622  result->set_position(expr->position());
8623  return ast_context()->ReturnControl(result, expr->id());
8624  }
8625 
8626  if (op == Token::INSTANCEOF) {
8627  // Check to see if the rhs of the instanceof is a global function not
8628  // residing in new space. If it is we assume that the function will stay the
8629  // same.
8630  Handle<JSFunction> target = Handle<JSFunction>::null();
8631  VariableProxy* proxy = expr->right()->AsVariableProxy();
8632  bool global_function = (proxy != NULL) && proxy->var()->IsUnallocated();
8633  if (global_function &&
8634  info()->has_global_object() &&
8635  !info()->global_object()->IsAccessCheckNeeded()) {
8636  Handle<String> name = proxy->name();
8637  Handle<GlobalObject> global(info()->global_object());
8638  LookupResult lookup(isolate());
8639  global->Lookup(*name, &lookup);
8640  if (lookup.IsNormal() && lookup.GetValue()->IsJSFunction()) {
8641  Handle<JSFunction> candidate(JSFunction::cast(lookup.GetValue()));
8642  // If the function is in new space we assume it's more likely to
8643  // change and thus prefer the general IC code.
8644  if (!isolate()->heap()->InNewSpace(*candidate)) {
8645  target = candidate;
8646  }
8647  }
8648  }
8649 
8650  // If the target is not null we have found a known global function that is
8651  // assumed to stay the same for this instanceof.
8652  if (target.is_null()) {
8653  HInstanceOf* result = new(zone()) HInstanceOf(context, left, right);
8654  result->set_position(expr->position());
8655  return ast_context()->ReturnInstruction(result, expr->id());
8656  } else {
8657  AddInstruction(new(zone()) HCheckFunction(right, target));
8658  HInstanceOfKnownGlobal* result =
8659  new(zone()) HInstanceOfKnownGlobal(context, left, target);
8660  result->set_position(expr->position());
8661  return ast_context()->ReturnInstruction(result, expr->id());
8662  }
8663  } else if (op == Token::IN) {
8664  HIn* result = new(zone()) HIn(context, left, right);
8665  result->set_position(expr->position());
8666  return ast_context()->ReturnInstruction(result, expr->id());
8667  } else if (type_info.IsNonPrimitive()) {
8668  switch (op) {
8669  case Token::EQ:
8670  case Token::EQ_STRICT: {
8671  // Can we get away with map check and not instance type check?
8672  Handle<Map> map = oracle()->GetCompareMap(expr);
8673  if (!map.is_null()) {
8674  AddInstruction(new(zone()) HCheckNonSmi(left));
8675  AddInstruction(HCheckMaps::NewWithTransitions(left, map, zone()));
8676  AddInstruction(new(zone()) HCheckNonSmi(right));
8677  AddInstruction(HCheckMaps::NewWithTransitions(right, map, zone()));
8678  HCompareObjectEqAndBranch* result =
8679  new(zone()) HCompareObjectEqAndBranch(left, right);
8680  result->set_position(expr->position());
8681  return ast_context()->ReturnControl(result, expr->id());
8682  } else {
8683  AddInstruction(new(zone()) HCheckNonSmi(left));
8685  AddInstruction(new(zone()) HCheckNonSmi(right));
8687  HCompareObjectEqAndBranch* result =
8688  new(zone()) HCompareObjectEqAndBranch(left, right);
8689  result->set_position(expr->position());
8690  return ast_context()->ReturnControl(result, expr->id());
8691  }
8692  }
8693  default:
8694  return Bailout("Unsupported non-primitive compare");
8695  }
8696  } else if (type_info.IsString() && oracle()->IsSymbolCompare(expr) &&
8697  (op == Token::EQ || op == Token::EQ_STRICT)) {
8698  AddInstruction(new(zone()) HCheckNonSmi(left));
8700  AddInstruction(new(zone()) HCheckNonSmi(right));
8702  HCompareObjectEqAndBranch* result =
8703  new(zone()) HCompareObjectEqAndBranch(left, right);
8704  result->set_position(expr->position());
8705  return ast_context()->ReturnControl(result, expr->id());
8706  } else {
8707  Representation r = ToRepresentation(type_info);
8708  if (r.IsTagged()) {
8709  HCompareGeneric* result =
8710  new(zone()) HCompareGeneric(context, left, right, op);
8711  result->set_position(expr->position());
8712  return ast_context()->ReturnInstruction(result, expr->id());
8713  } else {
8714  HCompareIDAndBranch* result =
8715  new(zone()) HCompareIDAndBranch(left, right, op);
8716  result->set_position(expr->position());
8717  result->SetInputRepresentation(r);
8718  return ast_context()->ReturnControl(result, expr->id());
8719  }
8720  }
8721 }
8722 
8723 
8724 void HGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr,
8725  HValue* value,
8726  NilValue nil) {
8727  ASSERT(!HasStackOverflow());
8728  ASSERT(current_block() != NULL);
8729  ASSERT(current_block()->HasPredecessor());
8730  EqualityKind kind =
8731  expr->op() == Token::EQ_STRICT ? kStrictEquality : kNonStrictEquality;
8732  HIsNilAndBranch* instr = new(zone()) HIsNilAndBranch(value, kind, nil);
8733  instr->set_position(expr->position());
8734  return ast_context()->ReturnControl(instr, expr->id());
8735 }
8736 
8737 
8738 HInstruction* HGraphBuilder::BuildThisFunction() {
8739  // If we share optimized code between different closures, the
8740  // this-function is not a constant, except inside an inlined body.
8741  if (function_state()->outer() != NULL) {
8742  return new(zone()) HConstant(
8745  } else {
8746  return new(zone()) HThisFunction;
8747  }
8748 }
8749 
8750 
8751 void HGraphBuilder::VisitThisFunction(ThisFunction* expr) {
8752  ASSERT(!HasStackOverflow());
8753  ASSERT(current_block() != NULL);
8754  ASSERT(current_block()->HasPredecessor());
8755  HInstruction* instr = BuildThisFunction();
8756  return ast_context()->ReturnInstruction(instr, expr->id());
8757 }
8758 
8759 
8761  ASSERT(globals_.is_empty());
8762  AstVisitor::VisitDeclarations(declarations);
8763  if (!globals_.is_empty()) {
8764  Handle<FixedArray> array =
8765  isolate()->factory()->NewFixedArray(globals_.length(), TENURED);
8766  for (int i = 0; i < globals_.length(); ++i) array->set(i, *globals_.at(i));
8767  int flags = DeclareGlobalsEvalFlag::encode(info()->is_eval()) |
8768  DeclareGlobalsNativeFlag::encode(info()->is_native()) |
8769  DeclareGlobalsLanguageMode::encode(info()->language_mode());
8770  HInstruction* result = new(zone()) HDeclareGlobals(
8771  environment()->LookupContext(), array, flags);
8772  AddInstruction(result);
8773  globals_.Clear();
8774  }
8775 }
8776 
8777 
8778 void HGraphBuilder::VisitVariableDeclaration(VariableDeclaration* declaration) {
8779  VariableProxy* proxy = declaration->proxy();
8780  VariableMode mode = declaration->mode();
8781  Variable* variable = proxy->var();
8782  bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
8783  switch (variable->location()) {
8784  case Variable::UNALLOCATED:
8785  globals_.Add(variable->name(), zone());
8786  globals_.Add(variable->binding_needs_init()
8787  ? isolate()->factory()->the_hole_value()
8788  : isolate()->factory()->undefined_value(), zone());
8789  return;
8790  case Variable::PARAMETER:
8791  case Variable::LOCAL:
8792  if (hole_init) {
8793  HValue* value = graph()->GetConstantHole();
8794  environment()->Bind(variable, value);
8795  }
8796  break;
8797  case Variable::CONTEXT:
8798  if (hole_init) {
8799  HValue* value = graph()->GetConstantHole();
8800  HValue* context = environment()->LookupContext();
8801  HStoreContextSlot* store = new(zone()) HStoreContextSlot(
8802  context, variable->index(), HStoreContextSlot::kNoCheck, value);
8803  AddInstruction(store);
8804  if (store->HasObservableSideEffects()) AddSimulate(proxy->id());
8805  }
8806  break;
8807  case Variable::LOOKUP:
8808  return Bailout("unsupported lookup slot in declaration");
8809  }
8810 }
8811 
8812 
8813 void HGraphBuilder::VisitFunctionDeclaration(FunctionDeclaration* declaration) {
8814  VariableProxy* proxy = declaration->proxy();
8815  Variable* variable = proxy->var();
8816  switch (variable->location()) {
8817  case Variable::UNALLOCATED: {
8818  globals_.Add(variable->name(), zone());
8819  Handle<SharedFunctionInfo> function =
8820  Compiler::BuildFunctionInfo(declaration->fun(), info()->script());
8821  // Check for stack-overflow exception.
8822  if (function.is_null()) return SetStackOverflow();
8823  globals_.Add(function, zone());
8824  return;
8825  }
8826  case Variable::PARAMETER:
8827  case Variable::LOCAL: {
8828  CHECK_ALIVE(VisitForValue(declaration->fun()));
8829  HValue* value = Pop();
8830  environment()->Bind(variable, value);
8831  break;
8832  }
8833  case Variable::CONTEXT: {
8834  CHECK_ALIVE(VisitForValue(declaration->fun()));
8835  HValue* value = Pop();
8836  HValue* context = environment()->LookupContext();
8837  HStoreContextSlot* store = new(zone()) HStoreContextSlot(
8838  context, variable->index(), HStoreContextSlot::kNoCheck, value);
8839  AddInstruction(store);
8840  if (store->HasObservableSideEffects()) AddSimulate(proxy->id());
8841  break;
8842  }
8843  case Variable::LOOKUP:
8844  return Bailout("unsupported lookup slot in declaration");
8845  }
8846 }
8847 
8848 
8849 void HGraphBuilder::VisitModuleDeclaration(ModuleDeclaration* declaration) {
8850  UNREACHABLE();
8851 }
8852 
8853 
8854 void HGraphBuilder::VisitImportDeclaration(ImportDeclaration* declaration) {
8855  UNREACHABLE();
8856 }
8857 
8858 
8859 void HGraphBuilder::VisitExportDeclaration(ExportDeclaration* declaration) {
8860  UNREACHABLE();
8861 }
8862 
8863 
8864 void HGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) {
8865  UNREACHABLE();
8866 }
8867 
8868 
8869 void HGraphBuilder::VisitModuleVariable(ModuleVariable* module) {
8870  UNREACHABLE();
8871 }
8872 
8873 
8874 void HGraphBuilder::VisitModulePath(ModulePath* module) {
8875  UNREACHABLE();
8876 }
8877 
8878 
8879 void HGraphBuilder::VisitModuleUrl(ModuleUrl* module) {
8880  UNREACHABLE();
8881 }
8882 
8883 
8884 // Generators for inline runtime functions.
8885 // Support for types.
8886 void HGraphBuilder::GenerateIsSmi(CallRuntime* call) {
8887  ASSERT(call->arguments()->length() == 1);
8888  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8889  HValue* value = Pop();
8890  HIsSmiAndBranch* result = new(zone()) HIsSmiAndBranch(value);
8891  return ast_context()->ReturnControl(result, call->id());
8892 }
8893 
8894 
8895 void HGraphBuilder::GenerateIsSpecObject(CallRuntime* call) {
8896  ASSERT(call->arguments()->length() == 1);
8897  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8898  HValue* value = Pop();
8899  HHasInstanceTypeAndBranch* result =
8900  new(zone()) HHasInstanceTypeAndBranch(value,
8903  return ast_context()->ReturnControl(result, call->id());
8904 }
8905 
8906 
8907 void HGraphBuilder::GenerateIsFunction(CallRuntime* call) {
8908  ASSERT(call->arguments()->length() == 1);
8909  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8910  HValue* value = Pop();
8911  HHasInstanceTypeAndBranch* result =
8912  new(zone()) HHasInstanceTypeAndBranch(value, JS_FUNCTION_TYPE);
8913  return ast_context()->ReturnControl(result, call->id());
8914 }
8915 
8916 
8917 void HGraphBuilder::GenerateHasCachedArrayIndex(CallRuntime* call) {
8918  ASSERT(call->arguments()->length() == 1);
8919  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8920  HValue* value = Pop();
8921  HHasCachedArrayIndexAndBranch* result =
8922  new(zone()) HHasCachedArrayIndexAndBranch(value);
8923  return ast_context()->ReturnControl(result, call->id());
8924 }
8925 
8926 
8927 void HGraphBuilder::GenerateIsArray(CallRuntime* call) {
8928  ASSERT(call->arguments()->length() == 1);
8929  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8930  HValue* value = Pop();
8931  HHasInstanceTypeAndBranch* result =
8932  new(zone()) HHasInstanceTypeAndBranch(value, JS_ARRAY_TYPE);
8933  return ast_context()->ReturnControl(result, call->id());
8934 }
8935 
8936 
8937 void HGraphBuilder::GenerateIsRegExp(CallRuntime* call) {
8938  ASSERT(call->arguments()->length() == 1);
8939  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8940  HValue* value = Pop();
8941  HHasInstanceTypeAndBranch* result =
8942  new(zone()) HHasInstanceTypeAndBranch(value, JS_REGEXP_TYPE);
8943  return ast_context()->ReturnControl(result, call->id());
8944 }
8945 
8946 
8947 void HGraphBuilder::GenerateIsObject(CallRuntime* call) {
8948  ASSERT(call->arguments()->length() == 1);
8949  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8950  HValue* value = Pop();
8951  HIsObjectAndBranch* result = new(zone()) HIsObjectAndBranch(value);
8952  return ast_context()->ReturnControl(result, call->id());
8953 }
8954 
8955 
8956 void HGraphBuilder::GenerateIsNonNegativeSmi(CallRuntime* call) {
8957  return Bailout("inlined runtime function: IsNonNegativeSmi");
8958 }
8959 
8960 
8961 void HGraphBuilder::GenerateIsUndetectableObject(CallRuntime* call) {
8962  ASSERT(call->arguments()->length() == 1);
8963  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
8964  HValue* value = Pop();
8965  HIsUndetectableAndBranch* result =
8966  new(zone()) HIsUndetectableAndBranch(value);
8967  return ast_context()->ReturnControl(result, call->id());
8968 }
8969 
8970 
8971 void HGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf(
8972  CallRuntime* call) {
8973  return Bailout(
8974  "inlined runtime function: IsStringWrapperSafeForDefaultValueOf");
8975 }
8976 
8977 
8978 // Support for construct call checks.
8979 void HGraphBuilder::GenerateIsConstructCall(CallRuntime* call) {
8980  ASSERT(call->arguments()->length() == 0);
8981  if (function_state()->outer() != NULL) {
8982  // We are generating graph for inlined function.
8983  HValue* value = function_state()->inlining_kind() == CONSTRUCT_CALL_RETURN
8984  ? graph()->GetConstantTrue()
8985  : graph()->GetConstantFalse();
8986  return ast_context()->ReturnValue(value);
8987  } else {
8988  return ast_context()->ReturnControl(new(zone()) HIsConstructCallAndBranch,
8989  call->id());
8990  }
8991 }
8992 
8993 
8994 // Support for arguments.length and arguments[?].
8995 void HGraphBuilder::GenerateArgumentsLength(CallRuntime* call) {
8996  // Our implementation of arguments (based on this stack frame or an
8997  // adapter below it) does not work for inlined functions. This runtime
8998  // function is blacklisted by AstNode::IsInlineable.
8999  ASSERT(function_state()->outer() == NULL);
9000  ASSERT(call->arguments()->length() == 0);
9001  HInstruction* elements = AddInstruction(
9002  new(zone()) HArgumentsElements(false));
9003  HArgumentsLength* result = new(zone()) HArgumentsLength(elements);
9004  return ast_context()->ReturnInstruction(result, call->id());
9005 }
9006 
9007 
9008 void HGraphBuilder::GenerateArguments(CallRuntime* call) {
9009  // Our implementation of arguments (based on this stack frame or an
9010  // adapter below it) does not work for inlined functions. This runtime
9011  // function is blacklisted by AstNode::IsInlineable.
9012  ASSERT(function_state()->outer() == NULL);
9013  ASSERT(call->arguments()->length() == 1);
9014  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9015  HValue* index = Pop();
9016  HInstruction* elements = AddInstruction(
9017  new(zone()) HArgumentsElements(false));
9018  HInstruction* length = AddInstruction(new(zone()) HArgumentsLength(elements));
9019  HInstruction* checked_index =
9020  AddInstruction(new(zone()) HBoundsCheck(index, length));
9021  HAccessArgumentsAt* result =
9022  new(zone()) HAccessArgumentsAt(elements, length, checked_index);
9023  return ast_context()->ReturnInstruction(result, call->id());
9024 }
9025 
9026 
9027 // Support for accessing the class and value fields of an object.
9028 void HGraphBuilder::GenerateClassOf(CallRuntime* call) {
9029  // The special form detected by IsClassOfTest is detected before we get here
9030  // and does not cause a bailout.
9031  return Bailout("inlined runtime function: ClassOf");
9032 }
9033 
9034 
9035 void HGraphBuilder::GenerateValueOf(CallRuntime* call) {
9036  ASSERT(call->arguments()->length() == 1);
9037  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9038  HValue* value = Pop();
9039  HValueOf* result = new(zone()) HValueOf(value);
9040  return ast_context()->ReturnInstruction(result, call->id());
9041 }
9042 
9043 
9044 void HGraphBuilder::GenerateDateField(CallRuntime* call) {
9045  ASSERT(call->arguments()->length() == 2);
9046  ASSERT_NE(NULL, call->arguments()->at(1)->AsLiteral());
9047  Smi* index = Smi::cast(*(call->arguments()->at(1)->AsLiteral()->handle()));
9048  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9049  HValue* date = Pop();
9050  HDateField* result = new(zone()) HDateField(date, index);
9051  return ast_context()->ReturnInstruction(result, call->id());
9052 }
9053 
9054 
9055 void HGraphBuilder::GenerateSetValueOf(CallRuntime* call) {
9056  ASSERT(call->arguments()->length() == 2);
9057  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9058  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
9059  HValue* value = Pop();
9060  HValue* object = Pop();
9061  // Check if object is a not a smi.
9062  HIsSmiAndBranch* smicheck = new(zone()) HIsSmiAndBranch(object);
9063  HBasicBlock* if_smi = graph()->CreateBasicBlock();
9064  HBasicBlock* if_heap_object = graph()->CreateBasicBlock();
9065  HBasicBlock* join = graph()->CreateBasicBlock();
9066  smicheck->SetSuccessorAt(0, if_smi);
9067  smicheck->SetSuccessorAt(1, if_heap_object);
9068  current_block()->Finish(smicheck);
9069  if_smi->Goto(join);
9070 
9071  // Check if object is a JSValue.
9072  set_current_block(if_heap_object);
9073  HHasInstanceTypeAndBranch* typecheck =
9074  new(zone()) HHasInstanceTypeAndBranch(object, JS_VALUE_TYPE);
9075  HBasicBlock* if_js_value = graph()->CreateBasicBlock();
9076  HBasicBlock* not_js_value = graph()->CreateBasicBlock();
9077  typecheck->SetSuccessorAt(0, if_js_value);
9078  typecheck->SetSuccessorAt(1, not_js_value);
9079  current_block()->Finish(typecheck);
9080  not_js_value->Goto(join);
9081 
9082  // Create in-object property store to kValueOffset.
9083  set_current_block(if_js_value);
9084  Handle<String> name = isolate()->factory()->undefined_symbol();
9085  AddInstruction(new(zone()) HStoreNamedField(object,
9086  name,
9087  value,
9088  true, // in-object store.
9090  if_js_value->Goto(join);
9091  join->SetJoinId(call->id());
9092  set_current_block(join);
9093  return ast_context()->ReturnValue(value);
9094 }
9095 
9096 
9097 // Fast support for charCodeAt(n).
9098 void HGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) {
9099  ASSERT(call->arguments()->length() == 2);
9100  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9101  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
9102  HValue* index = Pop();
9103  HValue* string = Pop();
9104  HValue* context = environment()->LookupContext();
9105  HStringCharCodeAt* result = BuildStringCharCodeAt(context, string, index);
9106  return ast_context()->ReturnInstruction(result, call->id());
9107 }
9108 
9109 
9110 // Fast support for string.charAt(n) and string[n].
9111 void HGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) {
9112  ASSERT(call->arguments()->length() == 1);
9113  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9114  HValue* char_code = Pop();
9115  HValue* context = environment()->LookupContext();
9116  HStringCharFromCode* result =
9117  new(zone()) HStringCharFromCode(context, char_code);
9118  return ast_context()->ReturnInstruction(result, call->id());
9119 }
9120 
9121 
9122 // Fast support for string.charAt(n) and string[n].
9123 void HGraphBuilder::GenerateStringCharAt(CallRuntime* call) {
9124  ASSERT(call->arguments()->length() == 2);
9125  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9126  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
9127  HValue* index = Pop();
9128  HValue* string = Pop();
9129  HValue* context = environment()->LookupContext();
9130  HStringCharCodeAt* char_code = BuildStringCharCodeAt(context, string, index);
9131  AddInstruction(char_code);
9132  HStringCharFromCode* result =
9133  new(zone()) HStringCharFromCode(context, char_code);
9134  return ast_context()->ReturnInstruction(result, call->id());
9135 }
9136 
9137 
9138 // Fast support for object equality testing.
9139 void HGraphBuilder::GenerateObjectEquals(CallRuntime* call) {
9140  ASSERT(call->arguments()->length() == 2);
9141  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9142  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
9143  HValue* right = Pop();
9144  HValue* left = Pop();
9145  HCompareObjectEqAndBranch* result =
9146  new(zone()) HCompareObjectEqAndBranch(left, right);
9147  return ast_context()->ReturnControl(result, call->id());
9148 }
9149 
9150 
9151 void HGraphBuilder::GenerateLog(CallRuntime* call) {
9152  // %_Log is ignored in optimized code.
9153  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
9154 }
9155 
9156 
9157 // Fast support for Math.random().
9158 void HGraphBuilder::GenerateRandomHeapNumber(CallRuntime* call) {
9159  HValue* context = environment()->LookupContext();
9160  HGlobalObject* global_object = new(zone()) HGlobalObject(context);
9161  AddInstruction(global_object);
9162  HRandom* result = new(zone()) HRandom(global_object);
9163  return ast_context()->ReturnInstruction(result, call->id());
9164 }
9165 
9166 
9167 // Fast support for StringAdd.
9168 void HGraphBuilder::GenerateStringAdd(CallRuntime* call) {
9169  ASSERT_EQ(2, call->arguments()->length());
9170  CHECK_ALIVE(VisitArgumentList(call->arguments()));
9171  HValue* context = environment()->LookupContext();
9172  HCallStub* result = new(zone()) HCallStub(context, CodeStub::StringAdd, 2);
9173  Drop(2);
9174  return ast_context()->ReturnInstruction(result, call->id());
9175 }
9176 
9177 
9178 // Fast support for SubString.
9179 void HGraphBuilder::GenerateSubString(CallRuntime* call) {
9180  ASSERT_EQ(3, call->arguments()->length());
9181  CHECK_ALIVE(VisitArgumentList(call->arguments()));
9182  HValue* context = environment()->LookupContext();
9183  HCallStub* result = new(zone()) HCallStub(context, CodeStub::SubString, 3);
9184  Drop(3);
9185  return ast_context()->ReturnInstruction(result, call->id());
9186 }
9187 
9188 
9189 // Fast support for StringCompare.
9190 void HGraphBuilder::GenerateStringCompare(CallRuntime* call) {
9191  ASSERT_EQ(2, call->arguments()->length());
9192  CHECK_ALIVE(VisitArgumentList(call->arguments()));
9193  HValue* context = environment()->LookupContext();
9194  HCallStub* result =
9195  new(zone()) HCallStub(context, CodeStub::StringCompare, 2);
9196  Drop(2);
9197  return ast_context()->ReturnInstruction(result, call->id());
9198 }
9199 
9200 
9201 // Support for direct calls from JavaScript to native RegExp code.
9202 void HGraphBuilder::GenerateRegExpExec(CallRuntime* call) {
9203  ASSERT_EQ(4, call->arguments()->length());
9204  CHECK_ALIVE(VisitArgumentList(call->arguments()));
9205  HValue* context = environment()->LookupContext();
9206  HCallStub* result = new(zone()) HCallStub(context, CodeStub::RegExpExec, 4);
9207  Drop(4);
9208  return ast_context()->ReturnInstruction(result, call->id());
9209 }
9210 
9211 
9212 // Construct a RegExp exec result with two in-object properties.
9213 void HGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) {
9214  ASSERT_EQ(3, call->arguments()->length());
9215  CHECK_ALIVE(VisitArgumentList(call->arguments()));
9216  HValue* context = environment()->LookupContext();
9217  HCallStub* result =
9218  new(zone()) HCallStub(context, CodeStub::RegExpConstructResult, 3);
9219  Drop(3);
9220  return ast_context()->ReturnInstruction(result, call->id());
9221 }
9222 
9223 
9224 // Support for fast native caches.
9225 void HGraphBuilder::GenerateGetFromCache(CallRuntime* call) {
9226  return Bailout("inlined runtime function: GetFromCache");
9227 }
9228 
9229 
9230 // Fast support for number to string.
9231 void HGraphBuilder::GenerateNumberToString(CallRuntime* call) {
9232  ASSERT_EQ(1, call->arguments()->length());
9233  CHECK_ALIVE(VisitArgumentList(call->arguments()));
9234  HValue* context = environment()->LookupContext();
9235  HCallStub* result =
9236  new(zone()) HCallStub(context, CodeStub::NumberToString, 1);
9237  Drop(1);
9238  return ast_context()->ReturnInstruction(result, call->id());
9239 }
9240 
9241 
9242 // Fast call for custom callbacks.
9243 void HGraphBuilder::GenerateCallFunction(CallRuntime* call) {
9244  // 1 ~ The function to call is not itself an argument to the call.
9245  int arg_count = call->arguments()->length() - 1;
9246  ASSERT(arg_count >= 1); // There's always at least a receiver.
9247 
9248  for (int i = 0; i < arg_count; ++i) {
9249  CHECK_ALIVE(VisitArgument(call->arguments()->at(i)));
9250  }
9251  CHECK_ALIVE(VisitForValue(call->arguments()->last()));
9252 
9253  HValue* function = Pop();
9254  HValue* context = environment()->LookupContext();
9255 
9256  // Branch for function proxies, or other non-functions.
9257  HHasInstanceTypeAndBranch* typecheck =
9258  new(zone()) HHasInstanceTypeAndBranch(function, JS_FUNCTION_TYPE);
9259  HBasicBlock* if_jsfunction = graph()->CreateBasicBlock();
9260  HBasicBlock* if_nonfunction = graph()->CreateBasicBlock();
9261  HBasicBlock* join = graph()->CreateBasicBlock();
9262  typecheck->SetSuccessorAt(0, if_jsfunction);
9263  typecheck->SetSuccessorAt(1, if_nonfunction);
9264  current_block()->Finish(typecheck);
9265 
9266  set_current_block(if_jsfunction);
9267  HInstruction* invoke_result = AddInstruction(
9268  new(zone()) HInvokeFunction(context, function, arg_count));
9269  Drop(arg_count);
9270  Push(invoke_result);
9271  if_jsfunction->Goto(join);
9272 
9273  set_current_block(if_nonfunction);
9274  HInstruction* call_result = AddInstruction(
9275  new(zone()) HCallFunction(context, function, arg_count));
9276  Drop(arg_count);
9277  Push(call_result);
9278  if_nonfunction->Goto(join);
9279 
9280  set_current_block(join);
9281  join->SetJoinId(call->id());
9282  return ast_context()->ReturnValue(Pop());
9283 }
9284 
9285 
9286 // Fast call to math functions.
9287 void HGraphBuilder::GenerateMathPow(CallRuntime* call) {
9288  ASSERT_EQ(2, call->arguments()->length());
9289  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9290  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
9291  HValue* right = Pop();
9292  HValue* left = Pop();
9293  HPower* result = new(zone()) HPower(left, right);
9294  return ast_context()->ReturnInstruction(result, call->id());
9295 }
9296 
9297 
9298 void HGraphBuilder::GenerateMathSin(CallRuntime* call) {
9299  ASSERT_EQ(1, call->arguments()->length());
9300  CHECK_ALIVE(VisitArgumentList(call->arguments()));
9301  HValue* context = environment()->LookupContext();
9302  HCallStub* result =
9303  new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1);
9304  result->set_transcendental_type(TranscendentalCache::SIN);
9305  Drop(1);
9306  return ast_context()->ReturnInstruction(result, call->id());
9307 }
9308 
9309 
9310 void HGraphBuilder::GenerateMathCos(CallRuntime* call) {
9311  ASSERT_EQ(1, call->arguments()->length());
9312  CHECK_ALIVE(VisitArgumentList(call->arguments()));
9313  HValue* context = environment()->LookupContext();
9314  HCallStub* result =
9315  new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1);
9316  result->set_transcendental_type(TranscendentalCache::COS);
9317  Drop(1);
9318  return ast_context()->ReturnInstruction(result, call->id());
9319 }
9320 
9321 
9322 void HGraphBuilder::GenerateMathTan(CallRuntime* call) {
9323  ASSERT_EQ(1, call->arguments()->length());
9324  CHECK_ALIVE(VisitArgumentList(call->arguments()));
9325  HValue* context = environment()->LookupContext();
9326  HCallStub* result =
9327  new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1);
9328  result->set_transcendental_type(TranscendentalCache::TAN);
9329  Drop(1);
9330  return ast_context()->ReturnInstruction(result, call->id());
9331 }
9332 
9333 
9334 void HGraphBuilder::GenerateMathLog(CallRuntime* call) {
9335  ASSERT_EQ(1, call->arguments()->length());
9336  CHECK_ALIVE(VisitArgumentList(call->arguments()));
9337  HValue* context = environment()->LookupContext();
9338  HCallStub* result =
9339  new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1);
9340  result->set_transcendental_type(TranscendentalCache::LOG);
9341  Drop(1);
9342  return ast_context()->ReturnInstruction(result, call->id());
9343 }
9344 
9345 
9346 void HGraphBuilder::GenerateMathSqrt(CallRuntime* call) {
9347  return Bailout("inlined runtime function: MathSqrt");
9348 }
9349 
9350 
9351 // Check whether two RegExps are equivalent
9352 void HGraphBuilder::GenerateIsRegExpEquivalent(CallRuntime* call) {
9353  return Bailout("inlined runtime function: IsRegExpEquivalent");
9354 }
9355 
9356 
9357 void HGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) {
9358  ASSERT(call->arguments()->length() == 1);
9359  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9360  HValue* value = Pop();
9361  HGetCachedArrayIndex* result = new(zone()) HGetCachedArrayIndex(value);
9362  return ast_context()->ReturnInstruction(result, call->id());
9363 }
9364 
9365 
9366 void HGraphBuilder::GenerateFastAsciiArrayJoin(CallRuntime* call) {
9367  return Bailout("inlined runtime function: FastAsciiArrayJoin");
9368 }
9369 
9370 
9371 #undef CHECK_BAILOUT
9372 #undef CHECK_ALIVE
9373 
9374 
9376  Scope* scope,
9377  Handle<JSFunction> closure,
9378  Zone* zone)
9379  : closure_(closure),
9380  values_(0, zone),
9381  assigned_variables_(4, zone),
9382  frame_type_(JS_FUNCTION),
9383  parameter_count_(0),
9384  specials_count_(1),
9385  local_count_(0),
9386  outer_(outer),
9387  entry_(NULL),
9388  pop_count_(0),
9389  push_count_(0),
9390  ast_id_(BailoutId::None()),
9391  zone_(zone) {
9392  Initialize(scope->num_parameters() + 1, scope->num_stack_slots(), 0);
9393 }
9394 
9395 
9396 HEnvironment::HEnvironment(const HEnvironment* other, Zone* zone)
9397  : values_(0, zone),
9398  assigned_variables_(0, zone),
9399  frame_type_(JS_FUNCTION),
9400  parameter_count_(0),
9401  specials_count_(1),
9402  local_count_(0),
9403  outer_(NULL),
9404  entry_(NULL),
9405  pop_count_(0),
9406  push_count_(0),
9407  ast_id_(other->ast_id()),
9408  zone_(zone) {
9409  Initialize(other);
9410 }
9411 
9412 
9413 HEnvironment::HEnvironment(HEnvironment* outer,
9414  Handle<JSFunction> closure,
9415  FrameType frame_type,
9416  int arguments,
9417  Zone* zone)
9418  : closure_(closure),
9419  values_(arguments, zone),
9420  assigned_variables_(0, zone),
9421  frame_type_(frame_type),
9422  parameter_count_(arguments),
9423  local_count_(0),
9424  outer_(outer),
9425  entry_(NULL),
9426  pop_count_(0),
9427  push_count_(0),
9428  ast_id_(BailoutId::None()),
9429  zone_(zone) {
9430 }
9431 
9432 
9433 void HEnvironment::Initialize(int parameter_count,
9434  int local_count,
9435  int stack_height) {
9436  parameter_count_ = parameter_count;
9437  local_count_ = local_count;
9438 
9439  // Avoid reallocating the temporaries' backing store on the first Push.
9440  int total = parameter_count + specials_count_ + local_count + stack_height;
9441  values_.Initialize(total + 4, zone());
9442  for (int i = 0; i < total; ++i) values_.Add(NULL, zone());
9443 }
9444 
9445 
9446 void HEnvironment::Initialize(const HEnvironment* other) {
9447  closure_ = other->closure();
9448  values_.AddAll(other->values_, zone());
9449  assigned_variables_.AddAll(other->assigned_variables_, zone());
9450  frame_type_ = other->frame_type_;
9451  parameter_count_ = other->parameter_count_;
9452  local_count_ = other->local_count_;
9453  if (other->outer_ != NULL) outer_ = other->outer_->Copy(); // Deep copy.
9454  entry_ = other->entry_;
9455  pop_count_ = other->pop_count_;
9456  push_count_ = other->push_count_;
9457  ast_id_ = other->ast_id_;
9458 }
9459 
9460 
9462  ASSERT(!block->IsLoopHeader());
9463  ASSERT(values_.length() == other->values_.length());
9464 
9465  int length = values_.length();
9466  for (int i = 0; i < length; ++i) {
9467  HValue* value = values_[i];
9468  if (value != NULL && value->IsPhi() && value->block() == block) {
9469  // There is already a phi for the i'th value.
9470  HPhi* phi = HPhi::cast(value);
9471  // Assert index is correct and that we haven't missed an incoming edge.
9472  ASSERT(phi->merged_index() == i);
9473  ASSERT(phi->OperandCount() == block->predecessors()->length());
9474  phi->AddInput(other->values_[i]);
9475  } else if (values_[i] != other->values_[i]) {
9476  // There is a fresh value on the incoming edge, a phi is needed.
9477  ASSERT(values_[i] != NULL && other->values_[i] != NULL);
9478  HPhi* phi = new(zone()) HPhi(i, zone());
9479  HValue* old_value = values_[i];
9480  for (int j = 0; j < block->predecessors()->length(); j++) {
9481  phi->AddInput(old_value);
9482  }
9483  phi->AddInput(other->values_[i]);
9484  this->values_[i] = phi;
9485  block->AddPhi(phi);
9486  }
9487  }
9488 }
9489 
9490 
9491 void HEnvironment::Bind(int index, HValue* value) {
9492  ASSERT(value != NULL);
9493  if (!assigned_variables_.Contains(index)) {
9494  assigned_variables_.Add(index, zone());
9495  }
9496  values_[index] = value;
9497 }
9498 
9499 
9500 bool HEnvironment::HasExpressionAt(int index) const {
9501  return index >= parameter_count_ + specials_count_ + local_count_;
9502 }
9503 
9504 
9507  return length() == first_expression_index();
9508 }
9509 
9510 
9511 void HEnvironment::SetExpressionStackAt(int index_from_top, HValue* value) {
9512  int count = index_from_top + 1;
9513  int index = values_.length() - count;
9514  ASSERT(HasExpressionAt(index));
9515  // The push count must include at least the element in question or else
9516  // the new value will not be included in this environment's history.
9517  if (push_count_ < count) {
9518  // This is the same effect as popping then re-pushing 'count' elements.
9519  pop_count_ += (count - push_count_);
9520  push_count_ = count;
9521  }
9522  values_[index] = value;
9523 }
9524 
9525 
9526 void HEnvironment::Drop(int count) {
9527  for (int i = 0; i < count; ++i) {
9528  Pop();
9529  }
9530 }
9531 
9532 
9534  return new(zone()) HEnvironment(this, zone());
9535 }
9536 
9537 
9539  HEnvironment* result = Copy();
9540  result->ClearHistory();
9541  return result;
9542 }
9543 
9544 
9546  HEnvironment* new_env = Copy();
9547  for (int i = 0; i < values_.length(); ++i) {
9548  HPhi* phi = new(zone()) HPhi(i, zone());
9549  phi->AddInput(values_[i]);
9550  new_env->values_[i] = phi;
9551  loop_header->AddPhi(phi);
9552  }
9553  new_env->ClearHistory();
9554  return new_env;
9555 }
9556 
9557 
9558 HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
9559  Handle<JSFunction> target,
9560  FrameType frame_type,
9561  int arguments) const {
9562  HEnvironment* new_env =
9563  new(zone()) HEnvironment(outer, target, frame_type,
9564  arguments + 1, zone());
9565  for (int i = 0; i <= arguments; ++i) { // Include receiver.
9566  new_env->Push(ExpressionStackAt(arguments - i));
9567  }
9568  new_env->ClearHistory();
9569  return new_env;
9570 }
9571 
9572 
9574  Handle<JSFunction> target,
9575  int arguments,
9576  FunctionLiteral* function,
9577  HConstant* undefined,
9578  CallKind call_kind,
9579  InliningKind inlining_kind) const {
9581 
9582  // Outer environment is a copy of this one without the arguments.
9583  int arity = function->scope()->num_parameters();
9584 
9585  HEnvironment* outer = Copy();
9586  outer->Drop(arguments + 1); // Including receiver.
9587  outer->ClearHistory();
9588 
9589  if (inlining_kind == CONSTRUCT_CALL_RETURN) {
9590  // Create artificial constructor stub environment. The receiver should
9591  // actually be the constructor function, but we pass the newly allocated
9592  // object instead, DoComputeConstructStubFrame() relies on that.
9593  outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments);
9594  } else if (inlining_kind == GETTER_CALL_RETURN) {
9595  // We need an additional StackFrame::INTERNAL frame for restoring the
9596  // correct context.
9597  outer = CreateStubEnvironment(outer, target, JS_GETTER, arguments);
9598  } else if (inlining_kind == SETTER_CALL_RETURN) {
9599  // We need an additional StackFrame::INTERNAL frame for temporarily saving
9600  // the argument of the setter, see StoreStubCompiler::CompileStoreViaSetter.
9601  outer = CreateStubEnvironment(outer, target, JS_SETTER, arguments);
9602  }
9603 
9604  if (arity != arguments) {
9605  // Create artificial arguments adaptation environment.
9606  outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments);
9607  }
9608 
9609  HEnvironment* inner =
9610  new(zone()) HEnvironment(outer, function->scope(), target, zone());
9611  // Get the argument values from the original environment.
9612  for (int i = 0; i <= arity; ++i) { // Include receiver.
9613  HValue* push = (i <= arguments) ?
9614  ExpressionStackAt(arguments - i) : undefined;
9615  inner->SetValueAt(i, push);
9616  }
9617  // If the function we are inlining is a strict mode function or a
9618  // builtin function, pass undefined as the receiver for function
9619  // calls (instead of the global receiver).
9620  if ((target->shared()->native() || !function->is_classic_mode()) &&
9621  call_kind == CALL_AS_FUNCTION && inlining_kind != CONSTRUCT_CALL_RETURN) {
9622  inner->SetValueAt(0, undefined);
9623  }
9624  inner->SetValueAt(arity + 1, LookupContext());
9625  for (int i = arity + 2; i < inner->length(); ++i) {
9626  inner->SetValueAt(i, undefined);
9627  }
9628 
9630  return inner;
9631 }
9632 
9633 
9635  for (int i = 0; i < length(); i++) {
9636  if (i == 0) stream->Add("parameters\n");
9637  if (i == parameter_count()) stream->Add("specials\n");
9638  if (i == parameter_count() + specials_count()) stream->Add("locals\n");
9639  if (i == parameter_count() + specials_count() + local_count()) {
9640  stream->Add("expressions\n");
9641  }
9642  HValue* val = values_.at(i);
9643  stream->Add("%d: ", i);
9644  if (val != NULL) {
9645  val->PrintNameTo(stream);
9646  } else {
9647  stream->Add("NULL");
9648  }
9649  stream->Add("\n");
9650  }
9651  PrintF("\n");
9652 }
9653 
9654 
9656  HeapStringAllocator string_allocator;
9657  StringStream trace(&string_allocator);
9658  PrintTo(&trace);
9659  PrintF("%s", *trace.ToCString());
9660 }
9661 
9662 
9664  Tag tag(this, "compilation");
9665  Handle<String> name = function->debug_name();
9666  PrintStringProperty("name", *name->ToCString());
9667  PrintStringProperty("method", *name->ToCString());
9668  PrintLongProperty("date", static_cast<int64_t>(OS::TimeCurrentMillis()));
9669 }
9670 
9671 
9672 void HTracer::TraceLithium(const char* name, LChunk* chunk) {
9673  Trace(name, chunk->graph(), chunk);
9674 }
9675 
9676 
9677 void HTracer::TraceHydrogen(const char* name, HGraph* graph) {
9678  Trace(name, graph, NULL);
9679 }
9680 
9681 
9682 void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
9683  Tag tag(this, "cfg");
9684  PrintStringProperty("name", name);
9685  const ZoneList<HBasicBlock*>* blocks = graph->blocks();
9686  for (int i = 0; i < blocks->length(); i++) {
9687  HBasicBlock* current = blocks->at(i);
9688  Tag block_tag(this, "block");
9689  PrintBlockProperty("name", current->block_id());
9690  PrintIntProperty("from_bci", -1);
9691  PrintIntProperty("to_bci", -1);
9692 
9693  if (!current->predecessors()->is_empty()) {
9694  PrintIndent();
9695  trace_.Add("predecessors");
9696  for (int j = 0; j < current->predecessors()->length(); ++j) {
9697  trace_.Add(" \"B%d\"", current->predecessors()->at(j)->block_id());
9698  }
9699  trace_.Add("\n");
9700  } else {
9701  PrintEmptyProperty("predecessors");
9702  }
9703 
9704  if (current->end()->SuccessorCount() == 0) {
9705  PrintEmptyProperty("successors");
9706  } else {
9707  PrintIndent();
9708  trace_.Add("successors");
9709  for (HSuccessorIterator it(current->end()); !it.Done(); it.Advance()) {
9710  trace_.Add(" \"B%d\"", it.Current()->block_id());
9711  }
9712  trace_.Add("\n");
9713  }
9714 
9715  PrintEmptyProperty("xhandlers");
9716  const char* flags = current->IsLoopSuccessorDominator()
9717  ? "dom-loop-succ"
9718  : "";
9719  PrintStringProperty("flags", flags);
9720 
9721  if (current->dominator() != NULL) {
9722  PrintBlockProperty("dominator", current->dominator()->block_id());
9723  }
9724 
9725  PrintIntProperty("loop_depth", current->LoopNestingDepth());
9726 
9727  if (chunk != NULL) {
9728  int first_index = current->first_instruction_index();
9729  int last_index = current->last_instruction_index();
9730  PrintIntProperty(
9731  "first_lir_id",
9732  LifetimePosition::FromInstructionIndex(first_index).Value());
9733  PrintIntProperty(
9734  "last_lir_id",
9735  LifetimePosition::FromInstructionIndex(last_index).Value());
9736  }
9737 
9738  {
9739  Tag states_tag(this, "states");
9740  Tag locals_tag(this, "locals");
9741  int total = current->phis()->length();
9742  PrintIntProperty("size", current->phis()->length());
9743  PrintStringProperty("method", "None");
9744  for (int j = 0; j < total; ++j) {
9745  HPhi* phi = current->phis()->at(j);
9746  PrintIndent();
9747  trace_.Add("%d ", phi->merged_index());
9748  phi->PrintNameTo(&trace_);
9749  trace_.Add(" ");
9750  phi->PrintTo(&trace_);
9751  trace_.Add("\n");
9752  }
9753  }
9754 
9755  {
9756  Tag HIR_tag(this, "HIR");
9757  HInstruction* instruction = current->first();
9758  while (instruction != NULL) {
9759  int bci = 0;
9760  int uses = instruction->UseCount();
9761  PrintIndent();
9762  trace_.Add("%d %d ", bci, uses);
9763  instruction->PrintNameTo(&trace_);
9764  trace_.Add(" ");
9765  instruction->PrintTo(&trace_);
9766  trace_.Add(" <|@\n");
9767  instruction = instruction->next();
9768  }
9769  }
9770 
9771 
9772  if (chunk != NULL) {
9773  Tag LIR_tag(this, "LIR");
9774  int first_index = current->first_instruction_index();
9775  int last_index = current->last_instruction_index();
9776  if (first_index != -1 && last_index != -1) {
9777  const ZoneList<LInstruction*>* instructions = chunk->instructions();
9778  for (int i = first_index; i <= last_index; ++i) {
9779  LInstruction* linstr = instructions->at(i);
9780  if (linstr != NULL) {
9781  PrintIndent();
9782  trace_.Add("%d ",
9784  linstr->PrintTo(&trace_);
9785  trace_.Add(" <|@\n");
9786  }
9787  }
9788  }
9789  }
9790  }
9791 }
9792 
9793 
9794 void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) {
9795  Tag tag(this, "intervals");
9796  PrintStringProperty("name", name);
9797 
9798  const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges();
9799  for (int i = 0; i < fixed_d->length(); ++i) {
9800  TraceLiveRange(fixed_d->at(i), "fixed", allocator->zone());
9801  }
9802 
9803  const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges();
9804  for (int i = 0; i < fixed->length(); ++i) {
9805  TraceLiveRange(fixed->at(i), "fixed", allocator->zone());
9806  }
9807 
9808  const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges();
9809  for (int i = 0; i < live_ranges->length(); ++i) {
9810  TraceLiveRange(live_ranges->at(i), "object", allocator->zone());
9811  }
9812 }
9813 
9814 
9815 void HTracer::TraceLiveRange(LiveRange* range, const char* type,
9816  Zone* zone) {
9817  if (range != NULL && !range->IsEmpty()) {
9818  PrintIndent();
9819  trace_.Add("%d %s", range->id(), type);
9820  if (range->HasRegisterAssigned()) {
9821  LOperand* op = range->CreateAssignedOperand(zone);
9822  int assigned_reg = op->index();
9823  if (op->IsDoubleRegister()) {
9824  trace_.Add(" \"%s\"",
9826  } else {
9827  ASSERT(op->IsRegister());
9828  trace_.Add(" \"%s\"", Register::AllocationIndexToString(assigned_reg));
9829  }
9830  } else if (range->IsSpilled()) {
9831  LOperand* op = range->TopLevel()->GetSpillOperand();
9832  if (op->IsDoubleStackSlot()) {
9833  trace_.Add(" \"double_stack:%d\"", op->index());
9834  } else {
9835  ASSERT(op->IsStackSlot());
9836  trace_.Add(" \"stack:%d\"", op->index());
9837  }
9838  }
9839  int parent_index = -1;
9840  if (range->IsChild()) {
9841  parent_index = range->parent()->id();
9842  } else {
9843  parent_index = range->id();
9844  }
9845  LOperand* op = range->FirstHint();
9846  int hint_index = -1;
9847  if (op != NULL && op->IsUnallocated()) {
9848  hint_index = LUnallocated::cast(op)->virtual_register();
9849  }
9850  trace_.Add(" %d %d", parent_index, hint_index);
9851  UseInterval* cur_interval = range->first_interval();
9852  while (cur_interval != NULL && range->Covers(cur_interval->start())) {
9853  trace_.Add(" [%d, %d[",
9854  cur_interval->start().Value(),
9855  cur_interval->end().Value());
9856  cur_interval = cur_interval->next();
9857  }
9858 
9859  UsePosition* current_pos = range->first_pos();
9860  while (current_pos != NULL) {
9861  if (current_pos->RegisterIsBeneficial() || FLAG_trace_all_uses) {
9862  trace_.Add(" %d M", current_pos->pos().Value());
9863  }
9864  current_pos = current_pos->next();
9865  }
9866 
9867  trace_.Add(" \"\"\n");
9868  }
9869 }
9870 
9871 
9872 void HTracer::FlushToFile() {
9873  AppendChars(filename_, *trace_.ToCString(), trace_.length(), false);
9874  trace_.Reset();
9875 }
9876 
9877 
9879  source_size_ += info->shared_info()->SourceSize();
9880 }
9881 
9882 
9884  PrintF("Timing results:\n");
9885  int64_t sum = 0;
9886  for (int i = 0; i < timing_.length(); ++i) {
9887  sum += timing_[i];
9888  }
9889 
9890  for (int i = 0; i < names_.length(); ++i) {
9891  PrintF("%30s", names_[i]);
9892  double ms = static_cast<double>(timing_[i]) / 1000;
9893  double percent = static_cast<double>(timing_[i]) * 100 / sum;
9894  PrintF(" - %7.3f ms / %4.1f %% ", ms, percent);
9895 
9896  unsigned size = sizes_[i];
9897  double size_percent = static_cast<double>(size) * 100 / total_size_;
9898  PrintF(" %8u bytes / %4.1f %%\n", size, size_percent);
9899  }
9900  double source_size_in_kb = static_cast<double>(source_size_) / 1024;
9901  double normalized_time = source_size_in_kb > 0
9902  ? (static_cast<double>(sum) / 1000) / source_size_in_kb
9903  : 0;
9904  double normalized_bytes = source_size_in_kb > 0
9905  ? total_size_ / source_size_in_kb
9906  : 0;
9907  PrintF("%30s - %7.3f ms %7.3f bytes\n", "Sum",
9908  normalized_time, normalized_bytes);
9909  PrintF("---------------------------------------------------------------\n");
9910  PrintF("%30s - %7.3f ms (%.1f times slower than full code gen)\n",
9911  "Total",
9912  static_cast<double>(total_) / 1000,
9913  static_cast<double>(total_) / full_code_gen_);
9914 }
9915 
9916 
9917 void HStatistics::SaveTiming(const char* name, int64_t ticks, unsigned size) {
9918  if (name == HPhase::kFullCodeGen) {
9919  full_code_gen_ += ticks;
9920  } else if (name == HPhase::kTotal) {
9921  total_ += ticks;
9922  } else {
9923  total_size_ += size;
9924  for (int i = 0; i < names_.length(); ++i) {
9925  if (names_[i] == name) {
9926  timing_[i] += ticks;
9927  sizes_[i] += size;
9928  return;
9929  }
9930  }
9931  names_.Add(name);
9932  timing_.Add(ticks);
9933  sizes_.Add(size);
9934  }
9935 }
9936 
9937 
9938 const char* const HPhase::kFullCodeGen = "Full code generator";
9939 const char* const HPhase::kTotal = "Total";
9940 
9941 
9942 void HPhase::Begin(const char* name,
9943  HGraph* graph,
9944  LChunk* chunk,
9945  LAllocator* allocator) {
9946  name_ = name;
9947  graph_ = graph;
9948  chunk_ = chunk;
9949  allocator_ = allocator;
9950  if (allocator != NULL && chunk_ == NULL) {
9951  chunk_ = allocator->chunk();
9952  }
9953  if (FLAG_hydrogen_stats) start_ = OS::Ticks();
9954  start_allocation_size_ = Zone::allocation_size_;
9955 }
9956 
9957 
9958 void HPhase::End() const {
9959  if (FLAG_hydrogen_stats) {
9960  int64_t end = OS::Ticks();
9961  unsigned size = Zone::allocation_size_ - start_allocation_size_;
9962  HStatistics::Instance()->SaveTiming(name_, end - start_, size);
9963  }
9964 
9965  // Produce trace output if flag is set so that the first letter of the
9966  // phase name matches the command line parameter FLAG_trace_phase.
9967  if (FLAG_trace_hydrogen &&
9968  OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL) {
9969  if (graph_ != NULL) HTracer::Instance()->TraceHydrogen(name_, graph_);
9970  if (chunk_ != NULL) HTracer::Instance()->TraceLithium(name_, chunk_);
9971  if (allocator_ != NULL) {
9972  HTracer::Instance()->TraceLiveRanges(name_, allocator_);
9973  }
9974  }
9975 
9976 #ifdef DEBUG
9977  if (graph_ != NULL) graph_->Verify(false); // No full verify.
9978  if (allocator_ != NULL) allocator_->Verify();
9979 #endif
9980 }
9981 
9982 } } // namespace v8::internal
Isolate * isolate()
Definition: hydrogen.h:247
static HPhi * cast(HValue *value)
void SetInitialEnvironment(HEnvironment *env)
Definition: hydrogen.cc:199
bool HasObservableSideEffects() const
#define INLINE_FUNCTION_LIST(F)
Definition: runtime.h:503
bool IsTest() const
Definition: hydrogen.h:628
int index() const
Definition: lithium.h:62
ArgumentsAllowedFlag
Definition: hydrogen.h:617
static LUnallocated * cast(LOperand *op)
Definition: lithium.h:198
TypeFeedbackId test_id() const
Definition: ast.h:353
bool IsFinished() const
Definition: hydrogen.h:101
#define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize)
Definition: hydrogen.cc:7817
bool IsDeoptimizing() const
Definition: hydrogen.h:149
HBasicBlock * if_true() const
Definition: hydrogen.h:728
GVNFlagSet gvn_flags() const
Handle< Map > GetCompareMap(CompareOperation *expr)
Definition: type-info.cc:355
VariableDeclaration * function() const
Definition: scopes.h:324
bool IsExternalArrayElementsKind(ElementsKind kind)
Definition: elements-kind.h:94
virtual void ReturnInstruction(HInstruction *instr, BailoutId ast_id)
Definition: hydrogen.cc:3088
bool Contains(const T &elm) const
Definition: list-inl.h:178
void DeadCodeElimination()
Definition: hydrogen.cc:3791
void set_entry(HEnterInlined *entry)
Definition: hydrogen.h:765
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 instructions(ARM only)") DEFINE_bool(enable_vfp2
bool calls_eval() const
Definition: scopes.h:297
virtual void ReturnControl(HControlInstruction *instr, BailoutId ast_id)
Definition: hydrogen.cc:3044
HValueMap * Copy(Zone *zone) const
Definition: hydrogen.h:1278
void PrintF(const char *format,...)
Definition: v8utils.cc:40
HBasicBlock * loop_header() const
Definition: hydrogen.h:224
virtual void ReturnControl(HControlInstruction *instr, BailoutId ast_id)
Definition: hydrogen.cc:3103
void set_block_id(int id)
Definition: hydrogen.h:62
virtual HValue * Canonicalize()
static int64_t Ticks()
bool IsEffect() const
Definition: hydrogen.h:626
virtual void SetDehoisted(bool is_dehoisted)=0
static uint32_t encode(boolvalue)
Definition: utils.h:262
HInstruction * previous() const
static Smi * FromInt(int value)
Definition: objects-inl.h:981
static const char * Name(Value tok)
Definition: token.h:196
Representation from() const
static bool MakeCode(CompilationInfo *info)
HEnvironment * CopyForInlining(Handle< JSFunction > target, int arguments, FunctionLiteral *function, HConstant *undefined, CallKind call_kind, InliningKind inlining_kind) const
Definition: hydrogen.cc:9573
static const int kEnumCacheBridgeIndicesCacheIndex
Definition: objects.h:2628
#define TRACE_GVN_4(msg, a1, a2, a3, a4)
Definition: hydrogen.cc:1407
void Delete(BoundsCheckKey *key)
Definition: hydrogen.cc:3613
LiveRange * parent() const
HValue * LookupContext() const
Definition: hydrogen.h:491
InliningKind inlining_kind() const
Definition: hydrogen.h:754
bool Dominates(HBasicBlock *other) const
Definition: hydrogen.cc:222
HBasicBlock * block() const
static Handle< T > cast(Handle< S > that)
Definition: handles.h:81
void set_use_optimistic_licm(bool value)
Definition: hydrogen.h:349
static Representation Integer32()
void Insert(BoundsCheckKey *key, BoundsCheckBbData *data, Zone *zone)
Definition: hydrogen.cc:3609
static bool Analyze(CompilationInfo *info)
Definition: scopes.cc:274
HConstant * GetConstant1()
Definition: hydrogen.cc:594
FunctionState(HGraphBuilder *owner, CompilationInfo *info, TypeFeedbackOracle *oracle, InliningKind inlining_kind)
Definition: hydrogen.cc:2934
bool HasIllegalRedeclaration() const
Definition: scopes.h:214
Expression * condition() const
Definition: hydrogen.h:726
void SetArgumentsObject(HArgumentsObject *object)
Definition: hydrogen.h:299
void Push(HValue *value)
Definition: hydrogen.h:885
BailoutId id() const
Definition: ast.h:352
virtual void ReturnInstruction(HInstruction *instr, BailoutId ast_id)=0
static HCheckInstanceType * NewIsString(HValue *value, Zone *zone)
void Finish(HControlInstruction *last)
Definition: hydrogen.cc:158
HInstruction * first() const
Definition: hydrogen.h:65
static TypeInfo Unknown()
Definition: type-info.h:59
HValueMap(Zone *zone)
Definition: hydrogen.h:1257
int int32_t
Definition: unicode.cc:47
#define DECLARE_FLAG(type)
HEnvironment * arguments_environment()
Definition: hydrogen.h:438
virtual void ReturnInstruction(HInstruction *instr, BailoutId ast_id)
Definition: hydrogen.cc:3037
Location location() const
Definition: variables.h:146
HValue * Lookup(Variable *variable) const
Definition: hydrogen.h:481
void AssumeRepresentation(Representation r)
void MarkAsLoopSuccessorDominator()
Definition: hydrogen.h:155
void Bind(Variable *variable, HValue *value)
Definition: hydrogen.h:471
virtual void ReturnValue(HValue *value)
Definition: hydrogen.cc:3032
HGraphBuilder * owner() const
Definition: hydrogen.h:654
void FinishExitWithDeoptimization(HDeoptimize::UseEnvironment has_uses)
Definition: hydrogen.h:132
EqualityKind
Definition: v8.h:145
static LifetimePosition FromInstructionIndex(int index)
HBasicBlock * dominator() const
Definition: hydrogen.h:82
void SetExpressionStackAt(int index_from_top, HValue *value)
Definition: hydrogen.cc:9511
virtual void ReturnControl(HControlInstruction *instr, BailoutId ast_id)=0
FrameType frame_type() const
Definition: hydrogen.h:448
bool CheckArgumentsPhiUses()
Definition: hydrogen.cc:1155
Zone * zone() const
Definition: hydrogen.h:563
HRangeAnalysis(HGraph *graph)
Definition: hydrogen.cc:1220
static GVNFlagSet ConvertChangesToDependsFlags(GVNFlagSet flags)
static GvnBasicBlockState * CreateEntry(Zone *zone, HBasicBlock *entry_block, HValueMap *entry_map)
Definition: hydrogen.cc:2032
int PredecessorIndexOf(HBasicBlock *predecessor) const
Definition: hydrogen.cc:381
static HCheckMaps * NewWithTransitions(HValue *object, Handle< Map > map, Zone *zone)
BoundsCheckBbData(BoundsCheckKey *key, int32_t lower_offset, int32_t upper_offset, HBasicBlock *bb, HBoundsCheck *lower_check, HBoundsCheck *upper_check, BoundsCheckBbData *next_in_bb, BoundsCheckBbData *father_in_dt)
Definition: hydrogen.cc:3526
List< Handle< Map > > MapHandleList
Definition: list.h:198
#define ASSERT(condition)
Definition: checks.h:270
BoundsCheckBbData * NextInBasicBlock() const
Definition: hydrogen.cc:3458
void SaveTiming(const char *name, int64_t ticks, unsigned size)
Definition: hydrogen.cc:9917
void InsertRepresentationChanges()
Definition: hydrogen.cc:2608
HBasicBlock * current_block() const
Definition: hydrogen.h:872
HConstant * GetConstantTrue()
Definition: hydrogen.cc:604
static HInstruction * NewHMod(Zone *zone, HValue *context, HValue *left, HValue *right)
FunctionState * outer()
Definition: hydrogen.h:762
const ZoneList< HBasicBlock * > * predecessors() const
Definition: hydrogen.h:71
HEnvironment * Copy() const
Definition: hydrogen.cc:9533
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3830
void RegisterBackEdge(HBasicBlock *block)
Definition: hydrogen.cc:406
static HInstruction * NewHMul(Zone *zone, HValue *context, HValue *left, HValue *right)
static SharedFunctionInfo * cast(Object *obj)
HControlInstruction * end() const
Definition: hydrogen.h:69
virtual HValue * OperandAt(int index) const
bool HasPredecessor() const
Definition: hydrogen.h:72
TypeInfo IncrementType(CountOperation *expr)
Definition: type-info.cc:475
static Representation Double()
HEnvironment * CopyAsLoopHeader(HBasicBlock *block) const
Definition: hydrogen.cc:9545
virtual void ReturnValue(HValue *value)
Definition: hydrogen.cc:3017
FunctionState * function_state() const
Definition: hydrogen.h:896
bool has_osr_loop_entry()
Definition: hydrogen.h:320
int GetMaximumValueID() const
Definition: hydrogen.h:303
bool Equals(const Representation &other)
bool ContainsAnyOf(const EnumSet &set) const
Definition: utils.h:970
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
int parameter_count() const
Definition: hydrogen.h:449
Factory * factory()
Definition: isolate.h:992
bool IsFastElementsKind(ElementsKind kind)
Representation representation() const
ZoneList< HUnknownOSRValue * > * osr_values()
Definition: hydrogen.h:332
Handle< String > name() const
Definition: variables.h:96
static HStatistics * Instance()
Definition: hydrogen.h:1344
TypeFeedbackOracle * oracle() const
Definition: hydrogen.h:894
void AssignCommonDominator(HBasicBlock *other)
Definition: hydrogen.cc:298
static Smi * cast(Object *object)
HEnvironment * environment() const
Definition: hydrogen.h:874
HEnvironment * last_environment() const
Definition: hydrogen.h:83
static const char * AllocationIndexToString(int index)
void DehoistSimpleArrayIndexComputations()
Definition: hydrogen.cc:3751
static Handle< ScopeInfo > Create(Scope *scope, Zone *zone)
Definition: scopeinfo.cc:41
const ZoneList< HValue * > * values() const
Definition: hydrogen.h:444
static bool Parse(CompilationInfo *info, int flags)
Definition: parser.cc:5944
int last_instruction_index() const
Definition: hydrogen.h:90
void TraceLithium(const char *name, LChunk *chunk)
Definition: hydrogen.cc:9672
Handle< String > SubString(Handle< String > str, int start, int end, PretenureFlag pretenure)
Definition: handles.cc:326
static HCheckInstanceType * NewIsJSArray(HValue *value, Zone *zone)
AstContext(HGraphBuilder *owner, Expression::Context kind)
Definition: hydrogen.cc:2983
int ContextChainLength(Scope *scope)
Definition: scopes.cc:735
void Add(Vector< const char > format, Vector< FmtElm > elms)
HConstant * GetConstantFalse()
Definition: hydrogen.cc:609
bool Covers(LifetimePosition position)
virtual BailoutId ContinueId() const =0
LOperand * CreateAssignedOperand(Zone *zone)
static HCheckInstanceType * NewIsSpecObject(HValue *value, Zone *zone)
void Initialize(CompilationInfo *info)
Definition: hydrogen.cc:9878
#define IN
BoundsCheckKey * Key() const
Definition: hydrogen.cc:3452
void RemovePhi(HPhi *phi)
Definition: hydrogen.cc:96
int virtual_register() const
Definition: lithium.h:184
void VisitDeclarations(ZoneList< Declaration * > *declarations)
Definition: hydrogen.cc:8760
bool IsEmpty() const
Definition: data-flow.h:176
virtual int OperandCount()=0
Variable * var() const
Definition: ast.h:1411
#define UNREACHABLE()
Definition: checks.h:50
bool ExpressionStackIsEmpty() const
Definition: hydrogen.cc:9505
VariableProxy * proxy() const
Definition: ast.h:443
bool Equals(HValue *other)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
BailoutId osr_ast_id() const
Definition: compiler.h:76
void set_osr_loop_entry(HBasicBlock *entry)
Definition: hydrogen.h:328
Zone * zone() const
Definition: hydrogen.h:248
BoundsCheckBbData ** LookupOrInsert(BoundsCheckKey *key, Zone *zone)
Definition: hydrogen.cc:3604
void set_arguments_elements(HArgumentsElements *arguments_elements)
Definition: hydrogen.h:768
HBasicBlock * GetLastBackEdge() const
Definition: hydrogen.cc:412
int32_t LowerOffset() const
Definition: hydrogen.cc:3453
const ZoneList< HBasicBlock * > * dominated_blocks() const
Definition: hydrogen.h:73
EnumSet< GVNFlag > GVNFlagSet
int first_expression_index() const
Definition: hydrogen.h:467
bool IsFastPackedElementsKind(ElementsKind kind)
HConstant * GetConstantUndefined() const
Definition: hydrogen.h:287
Variable * arguments() const
Definition: scopes.h:339
int num_stack_slots() const
Definition: scopes.h:366
void AddIncomingEdge(HBasicBlock *block, HEnvironment *other)
Definition: hydrogen.cc:9461
HBasicBlock(HGraph *graph)
Definition: hydrogen.cc:55
static HUnaryOperation * cast(HValue *value)
bool HasRegisterAssigned() const
#define TRACE_GVN_1(msg, a1)
Definition: hydrogen.cc:1392
static bool IsValidElementsTransition(ElementsKind from_kind, ElementsKind to_kind)
Definition: objects.cc:10148
NilValue
Definition: v8.h:141
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1059
static BailoutId Declarations()
Definition: utils.h:1016
void PostProcessLoopHeader(IterationStatement *stmt)
Definition: hydrogen.cc:243
const int kPointerSize
Definition: globals.h:220
bool HasEnvironment() const
Definition: hydrogen.h:110
void set_current_block(HBasicBlock *block)
Definition: hydrogen.h:873
T * NewArray(size_t size)
Definition: allocation.h:83
int32_t UpperOffset() const
Definition: hydrogen.cc:3454
void UpdateEnvironment(HEnvironment *env)
Definition: hydrogen.h:111
BreakAndContinueScope * break_scope() const
Definition: hydrogen.h:869
static Value NegateCompareOp(Value op)
Definition: token.h:226
#define GVN_TRACKED_FLAG_LIST(V)
HArgumentsElements * arguments_elements()
Definition: hydrogen.h:767
void EliminateUnreachablePhis()
Definition: hydrogen.cc:1111
void set_undefined_constant(HConstant *constant)
Definition: hydrogen.h:284
void RecordUint32Instruction(HInstruction *instr)
Definition: hydrogen.h:361
static HTracer * Instance()
Definition: hydrogen.h:1415
static HInstruction * NewHSub(Zone *zone, HValue *context, HValue *left, HValue *right)
bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind, ElementsKind to_kind)
GvnBasicBlockState * next_in_dominator_tree_traversal(Zone *zone, HBasicBlock **dominator)
Definition: hydrogen.cc:2043
void AddInput(HValue *value)
UseInterval * first_interval() const
CompilationInfo * info() const
Definition: hydrogen.h:249
Entry * Lookup(void *key, uint32_t hash, bool insert, ZoneAllocationPolicyallocator=ZoneAllocationPolicy())
static unsigned allocation_size_
Definition: zone.h:91
HValue * ExpressionStackAt(int index_from_top) const
Definition: hydrogen.h:518
HEnvironment(HEnvironment *outer, Scope *scope, Handle< JSFunction > closure, Zone *zone)
Definition: hydrogen.cc:9375
static void VPrint(const char *format, va_list args)
SparseSet(Zone *zone, int capacity)
Definition: hydrogen.cc:1671
static TestContext * cast(AstContext *context)
Definition: hydrogen.h:721
int index() const
Definition: variables.h:147
bool IsLexicalVariableMode(VariableMode mode)
Definition: v8globals.h:521
void PropagateDeoptimizingMark()
Definition: hydrogen.cc:1064
int length() const
Definition: utils.h:384
HInstruction * next() const
HGlobalValueNumberer(HGraph *graph, CompilationInfo *info)
Definition: hydrogen.cc:1710
int local_count() const
Definition: hydrogen.h:451
static double TimeCurrentMillis()
HBasicBlock * osr_loop_entry()
Definition: hydrogen.h:324
static GVNFlag DependsOnFlagFromInt(int x)
int num_parameters() const
Definition: scopes.h:336
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
virtual void ReturnInstruction(HInstruction *instr, BailoutId ast_id)
Definition: hydrogen.cc:3057
HConstant * GetConstantMinus1()
Definition: hydrogen.cc:599
Definition: v8.h:105
static int SizeFor(int length)
Definition: objects.h:2434
static HInstruction * NewHShr(Zone *zone, HValue *context, HValue *left, HValue *right)
static HInstruction * NewHBitwise(Zone *zone, Token::Value op, HValue *context, HValue *left, HValue *right)
static BailoutId FunctionEntry()
Definition: utils.h:1015
static Value InvertCompareOp(Value op)
Definition: token.h:241
bool Contains(int n) const
Definition: hydrogen.cc:1682
HInstruction * last() const
Definition: hydrogen.h:66
TypeInfo CompareType(CompareOperation *expr)
Definition: type-info.cc:315
static bool IsEqualityOp(Value op)
Definition: token.h:222
SmartArrayPointer< char > GetGVNFlagsString(GVNFlagSet flags)
Definition: hydrogen.cc:1816
#define BASE_EMBEDDED
Definition: allocation.h:68
void MarkDeoptimizeOnUndefined()
Definition: hydrogen.cc:2664
virtual void ReturnControl(HControlInstruction *instr, BailoutId ast_id)
Definition: hydrogen.cc:3068
void ComputeMinusZeroChecks()
Definition: hydrogen.cc:2908
static ScopeInfo * Empty()
Definition: scopeinfo.cc:152
HStackCheckEliminator(HGraph *graph)
Definition: hydrogen.cc:1628
void AddSimulate(BailoutId ast_id)
Definition: hydrogen.h:128
LOperand * FirstHint() const
BoundsCheckBbData * FatherInDominatorTree() const
Definition: hydrogen.cc:3459
T * NewArray(int length)
Definition: zone-inl.h:72
void RecordDeletedPhi(int merge_index)
Definition: hydrogen.h:79
Handle< JSFunction > closure() const
Definition: compiler.h:70
virtual void PrintTo(StringStream *stream)
bool IsDeclaredVariableMode(VariableMode mode)
Definition: v8globals.h:516
activate correct semantics for inheriting readonliness false
Definition: flags.cc:141
void DeleteAndReplaceWith(HValue *other)
static BailoutId None()
Definition: utils.h:1014
Vector< const char > CStrVector(const char *data)
Definition: utils.h:526
static HInstruction * NewHDiv(Zone *zone, HValue *context, HValue *left, HValue *right)
void Drop(int count)
Definition: hydrogen.cc:9526
#define GVN_UNTRACKED_FLAG_LIST(V)
void SetJoinId(BailoutId ast_id)
Definition: hydrogen.cc:206
void AddEnvironmentValue(HValue *value, Zone *zone)
virtual intptr_t Hashcode()
HBasicBlock * CreateBasicBlock()
Definition: hydrogen.cc:709
bool IsInlineReturnTarget() const
Definition: hydrogen.h:146
static int SizeFor(int length)
Definition: objects.h:2353
TypeInfo BinaryType(BinaryOperation *expr)
Definition: type-info.cc:391
bool OffsetIsCovered(int32_t offset) const
Definition: hydrogen.cc:3461
void BindContext(HValue *value)
Definition: hydrogen.h:477
void PrintNameTo(StringStream *stream)
static BoundsCheckKey * Create(Zone *zone, HBoundsCheck *check, int32_t *offset)
Definition: hydrogen.cc:3385
const int kElementsKindCount
Definition: elements-kind.h:76
Definition: v8.h:1425
void InsertAfter(HInstruction *previous)
static const int kHeaderSize
Definition: objects.h:2296
int specials_count() const
Definition: hydrogen.h:450
void SetValueAt(int index, HValue *value)
Definition: hydrogen.h:555
static int SNPrintF(Vector< char > str, const char *format,...)
HLoopInformation * loop()
Definition: hydrogen.cc:781
HArgumentsObject * GetArgumentsObject() const
Definition: hydrogen.h:295
const ZoneList< HBasicBlock * > * blocks() const
Definition: hydrogen.h:251
virtual void SetSuccessorAt(int i, HBasicBlock *block)=0
int first_instruction_index() const
Definition: hydrogen.h:86
HBasicBlock * BasicBlock() const
Definition: hydrogen.cc:3455
Representation to() const
#define CHECK_ALIVE(call)
Definition: hydrogen.cc:3146
PostorderProcessor * PerformStep(Zone *zone, BitVector *visited, ZoneList< HBasicBlock * > *order)
Definition: hydrogen.cc:791
UsePosition * first_pos() const
int LoopNestingDepth() const
Definition: hydrogen.cc:232
HGraph * graph() const
Definition: lithium.h:663
const ZoneList< HPhi * > * phi_list() const
Definition: hydrogen.h:252
HBasicBlock * parent_loop_header() const
Definition: hydrogen.h:112
bool IsLoopSuccessorDominator() const
Definition: hydrogen.h:152
void InitializeInferredTypes()
Definition: hydrogen.cc:2466
HEnvironment * DiscardInlined(bool drop_extra)
Definition: hydrogen.h:540
void set_bailout_reason(const char *reason)
Definition: compiler.h:187
void SetOperandAt(int index, HValue *value)
void AddSimulate(BailoutId ast_id)
Definition: hydrogen.cc:3828
HEnterInlined * entry()
Definition: hydrogen.h:764
static PostorderProcessor * CreateEntryProcessor(Zone *zone, HBasicBlock *block, BitVector *visited)
Definition: hydrogen.cc:784
void FinishExit(HControlInstruction *instruction)
Definition: hydrogen.cc:682
ElementsKind GetInitialFastElementsKind()
bool CheckFlag(Flag f) const
FunctionSorter(int index, int ticks, int ast_length, int src_length)
Definition: hydrogen.cc:6734
T ToIntegral() const
Definition: utils.h:979
HGraph(CompilationInfo *info)
Definition: hydrogen.cc:688
Handle< SharedFunctionInfo > shared_info() const
Definition: compiler.h:71
int AppendChars(const char *filename, const char *str, int size, bool verbose)
Definition: v8utils.cc:214
static Handle< Object > CreateArrayLiteralBoilerplate(Isolate *isolate, Handle< FixedArray > literals, Handle< FixedArray > elements)
Definition: runtime.cc:461
HValue * IndexBase() const
Definition: hydrogen.cc:3378
void InsertBefore(HInstruction *next)
void Goto(HBasicBlock *block, FunctionState *state=NULL)
Definition: hydrogen.cc:168
HGraphBuilder(CompilationInfo *info, TypeFeedbackOracle *oracle)
Definition: hydrogen.cc:619
void EliminateRedundantPhis()
Definition: hydrogen.cc:1077
bool binding_needs_init() const
Definition: variables.h:125
HLoopInformation * loop_information() const
Definition: hydrogen.h:70
bool IsStartBlock() const
Definition: hydrogen.h:98
static Handle< T > null()
Definition: handles.h:86
void CoverCheck(HBoundsCheck *new_check, int32_t new_offset)
Definition: hydrogen.cc:3479
virtual void SetIndexOffset(uint32_t index_offset)=0
static HInstruction * NewHAdd(Zone *zone, HValue *context, HValue *left, HValue *right)
bool IsLoopHeader() const
Definition: hydrogen.h:97
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
virtual void ReturnValue(HValue *value)
Definition: hydrogen.cc:3022
void SetBlock(HBasicBlock *block)
void PrintTo(StringStream *stream)
Definition: hydrogen.cc:9634
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
Definition: compiler.cc:926
PostorderProcessor * child()
Definition: hydrogen.cc:779
void USE(T)
Definition: globals.h:289
void set_parent_loop_header(HBasicBlock *block)
Definition: hydrogen.h:114
#define TRACE_GVN_2(msg, a1, a2)
Definition: hydrogen.cc:1397
ContainedInLattice AddRange(ContainedInLattice containment, const int *ranges, int ranges_length, Interval new_range)
Definition: jsregexp.cc:111
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
HBasicBlock * CreateJoin(HBasicBlock *first, HBasicBlock *second, BailoutId join_id)
Definition: hydrogen.cc:637
#define ASSERT_NE(v1, v2)
Definition: checks.h:272
virtual HValue * OperandAt(int index) const =0
bool end_
HConstant * GetConstantHole()
Definition: hydrogen.cc:614
void AddPhi(HPhi *phi)
Definition: hydrogen.cc:89
CompilationInfo * compilation_info()
Definition: hydrogen.h:751
static const int kEnumCacheBridgeCacheIndex
Definition: objects.h:2627
const T & at(int index) const
Definition: utils.h:398
int update_type_change_checksum(int delta)
Definition: hydrogen.h:340
OptimizingCompilerThread * optimizing_compiler_thread()
Definition: isolate.h:1065
Zone * zone() const
Definition: hydrogen.h:1252
void ComputeSafeUint32Operations()
Definition: hydrogen.cc:2887
PostorderProcessor * parent()
Definition: hydrogen.cc:777
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
void set_ast_id(BailoutId id)
Definition: hydrogen.h:457
HBasicBlock * if_false() const
Definition: hydrogen.h:729
static const char * AllocationIndexToString(int index)
Definition: assembler-arm.h:87
static bool HasCustomCallGenerator(Handle< JSFunction > function)
Definition: stub-cache.cc:1444
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
void EliminateRedundantBoundsChecks()
Definition: hydrogen.cc:3696
static GVNFlag ChangesFlagFromInt(int x)
static const int kMaxInliningLevels
Definition: compiler.h:419
bool IsSymbolCompare(CompareOperation *expr)
Definition: type-info.cc:345
void TraceHydrogen(const char *name, HGraph *graph)
Definition: hydrogen.cc:9677
void Bailout(const char *reason)
Definition: hydrogen.cc:3153
bool has_global_object() const
Definition: compiler.h:140
BailoutId EntryId() const
Definition: ast.h:386
void AddIndirectUsesTo(int *use_count)
char * StrDup(const char *str)
Definition: allocation.cc:85
const ZoneList< HPhi * > * phis() const
Definition: hydrogen.h:64
void TraceCompilation(FunctionLiteral *function)
Definition: hydrogen.cc:9663
void AssignLoopSuccessorDominators()
Definition: hydrogen.cc:325
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kValueOffset
Definition: objects.h:6385
bool is_function_scope() const
Definition: scopes.h:276
int block_id() const
Definition: hydrogen.h:61
bool IsValue() const
Definition: hydrogen.h:627
void Push(HValue *value)
Definition: hydrogen.h:496
static Representation Tagged()
Handle< JSFunction > closure() const
Definition: hydrogen.h:443
SmartArrayPointer< const char > ToCString() const
static HCheckInstanceType * NewIsSymbol(HValue *value, Zone *zone)
T Min(T a, T b)
Definition: utils.h:229
HValue * Length() const
Definition: hydrogen.cc:3379
#define INLINE_RUNTIME_FUNCTION_LIST(F)
Definition: runtime.h:541
virtual void SetKey(HValue *key)=0
VariableMode mode() const
Definition: ast.h:444
Zone * zone() const
Definition: hydrogen.h:418
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
Definition: flags.cc:495
void Add(E element)
Definition: utils.h:973
static void RecordFunctionCompilation(Logger::LogEventsAndTags tag, CompilationInfo *info, Handle< SharedFunctionInfo > shared)
Definition: compiler.cc:1017
static HInstruction * NewHSar(Zone *zone, HValue *context, HValue *left, HValue *right)
static HInstruction * NewHShl(Zone *zone, HValue *context, HValue *left, HValue *right)
#define VOID
static HValue * cast(HValue *value)
void check(i::Vector< const char > string)
HSideEffectMap * dominators()
Definition: hydrogen.cc:2041
TypeFeedbackOracle * oracle() const
Definition: hydrogen.h:727
#define CHECK_BAILOUT(call)
Definition: hydrogen.cc:3139
#define ARRAY_SIZE(a)
Definition: globals.h:281
virtual void ReturnValue(HValue *value)=0
TypeInfo UnaryType(UnaryOperation *expr)
Definition: type-info.cc:372
HBoundsCheck * UpperCheck() const
Definition: hydrogen.cc:3457
HBoundsCheck * LowerCheck() const
Definition: hydrogen.cc:3456
void TraceLiveRanges(const char *name, LAllocator *allocator)
Definition: hydrogen.cc:9794
void set_osr_values(ZoneList< HUnknownOSRValue * > *values)
Definition: hydrogen.h:336
LOperand * GetSpillOperand() const
HBasicBlock * entry_block() const
Definition: hydrogen.h:253
static JSObject * cast(Object *obj)
bool Optimize(SmartArrayPointer< char > *bailout_reason)
Definition: hydrogen.cc:3287
HInferRepresentation(HGraph *graph)
Definition: hydrogen.cc:2244
void AddInstruction(HInstruction *instr)
Definition: hydrogen.cc:106
ZoneList< Declaration * > * declarations()
Definition: scopes.h:342
static char * StrChr(char *str, int c)
void AddAll(const List< T, AllocationPolicy > &other, AllocationPolicy allocator=AllocationPolicy())
void TraceGVN(const char *msg,...)
Definition: hydrogen.cc:1383
HGraph * graph() const
Definition: hydrogen.h:868
HInstruction * AddInstruction(HInstruction *instr)
Definition: hydrogen.cc:3821
HBasicBlock * function_return()
Definition: hydrogen.h:755
bool IsFastDoubleElementsKind(ElementsKind kind)
const ZoneList< HBasicBlock * > * blocks() const
Definition: hydrogen.h:223
const ZoneList< LInstruction * > * instructions() const
Definition: lithium.h:664
HUseIterator uses() const
#define TRACE_GVN_5(msg, a1, a2, a3, a4, a5)
Definition: hydrogen.cc:1412
void AddLeaveInlined(HValue *return_value, FunctionState *state)
Definition: hydrogen.cc:183
friend class FunctionState
Definition: hydrogen.h:1245
HEnvironment * CopyWithoutHistory() const
Definition: hydrogen.cc:9538
Scope * scope() const
Definition: compiler.h:67
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset flag
Definition: objects-inl.h:3923
static JSFunction * cast(Object *obj)