v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
hydrogen.cc
Go to the documentation of this file.
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "hydrogen.h"
29 
30 #include <algorithm>
31 
32 #include "v8.h"
33 #include "allocation-site-scopes.h"
34 #include "codegen.h"
35 #include "full-codegen.h"
36 #include "hashmap.h"
37 #include "hydrogen-bce.h"
38 #include "hydrogen-bch.h"
39 #include "hydrogen-canonicalize.h"
41 #include "hydrogen-dce.h"
42 #include "hydrogen-dehoist.h"
46 #include "hydrogen-infer-types.h"
48 #include "hydrogen-gvn.h"
51 #include "hydrogen-osr.h"
53 #include "hydrogen-redundant-phi.h"
56 #include "hydrogen-sce.h"
59 #include "lithium-allocator.h"
60 #include "parser.h"
61 #include "runtime.h"
62 #include "scopeinfo.h"
63 #include "scopes.h"
64 #include "stub-cache.h"
65 #include "typing.h"
66 
67 #if V8_TARGET_ARCH_IA32
69 #elif V8_TARGET_ARCH_X64
71 #elif V8_TARGET_ARCH_ARM64
73 #elif V8_TARGET_ARCH_ARM
75 #elif V8_TARGET_ARCH_MIPS
77 #else
78 #error Unsupported target architecture.
79 #endif
80 
81 namespace v8 {
82 namespace internal {
83 
84 HBasicBlock::HBasicBlock(HGraph* graph)
85  : block_id_(graph->GetNextBlockID()),
86  graph_(graph),
87  phis_(4, graph->zone()),
88  first_(NULL),
89  last_(NULL),
90  end_(NULL),
91  loop_information_(NULL),
92  predecessors_(2, graph->zone()),
93  dominator_(NULL),
94  dominated_blocks_(4, graph->zone()),
95  last_environment_(NULL),
96  argument_count_(-1),
97  first_instruction_index_(-1),
98  last_instruction_index_(-1),
99  deleted_phis_(4, graph->zone()),
100  parent_loop_header_(NULL),
101  inlined_entry_block_(NULL),
102  is_inline_return_target_(false),
103  is_reachable_(true),
104  dominates_loop_successors_(false),
105  is_osr_entry_(false) { }
106 
107 
108 Isolate* HBasicBlock::isolate() const {
109  return graph_->isolate();
110 }
111 
112 
113 void HBasicBlock::MarkUnreachable() {
114  is_reachable_ = false;
115 }
116 
117 
118 void HBasicBlock::AttachLoopInformation() {
119  ASSERT(!IsLoopHeader());
120  loop_information_ = new(zone()) HLoopInformation(this, zone());
121 }
122 
123 
124 void HBasicBlock::DetachLoopInformation() {
125  ASSERT(IsLoopHeader());
126  loop_information_ = NULL;
127 }
128 
129 
130 void HBasicBlock::AddPhi(HPhi* phi) {
131  ASSERT(!IsStartBlock());
132  phis_.Add(phi, zone());
133  phi->SetBlock(this);
134 }
135 
136 
137 void HBasicBlock::RemovePhi(HPhi* phi) {
138  ASSERT(phi->block() == this);
139  ASSERT(phis_.Contains(phi));
140  phi->Kill();
141  phis_.RemoveElement(phi);
142  phi->SetBlock(NULL);
143 }
144 
145 
146 void HBasicBlock::AddInstruction(HInstruction* instr,
147  HSourcePosition position) {
148  ASSERT(!IsStartBlock() || !IsFinished());
149  ASSERT(!instr->IsLinked());
150  ASSERT(!IsFinished());
151 
152  if (!position.IsUnknown()) {
153  instr->set_position(position);
154  }
155  if (first_ == NULL) {
156  ASSERT(last_environment() != NULL);
157  ASSERT(!last_environment()->ast_id().IsNone());
158  HBlockEntry* entry = new(zone()) HBlockEntry();
159  entry->InitializeAsFirst(this);
160  if (!position.IsUnknown()) {
161  entry->set_position(position);
162  } else {
163  ASSERT(!FLAG_hydrogen_track_positions ||
164  !graph()->info()->IsOptimizing());
165  }
166  first_ = last_ = entry;
167  }
168  instr->InsertAfter(last_);
169 }
170 
171 
172 HPhi* HBasicBlock::AddNewPhi(int merged_index) {
173  if (graph()->IsInsideNoSideEffectsScope()) {
174  merged_index = HPhi::kInvalidMergedIndex;
175  }
176  HPhi* phi = new(zone()) HPhi(merged_index, zone());
177  AddPhi(phi);
178  return phi;
179 }
180 
181 
182 HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id,
183  RemovableSimulate removable) {
184  ASSERT(HasEnvironment());
185  HEnvironment* environment = last_environment();
186  ASSERT(ast_id.IsNone() ||
187  ast_id == BailoutId::StubEntry() ||
188  environment->closure()->shared()->VerifyBailoutId(ast_id));
189 
190  int push_count = environment->push_count();
191  int pop_count = environment->pop_count();
192 
193  HSimulate* instr =
194  new(zone()) HSimulate(ast_id, pop_count, zone(), removable);
195 #ifdef DEBUG
196  instr->set_closure(environment->closure());
197 #endif
198  // Order of pushed values: newest (top of stack) first. This allows
199  // HSimulate::MergeWith() to easily append additional pushed values
200  // that are older (from further down the stack).
201  for (int i = 0; i < push_count; ++i) {
202  instr->AddPushedValue(environment->ExpressionStackAt(i));
203  }
204  for (GrowableBitVector::Iterator it(environment->assigned_variables(),
205  zone());
206  !it.Done();
207  it.Advance()) {
208  int index = it.Current();
209  instr->AddAssignedValue(index, environment->Lookup(index));
210  }
211  environment->ClearHistory();
212  return instr;
213 }
214 
215 
216 void HBasicBlock::Finish(HControlInstruction* end, HSourcePosition position) {
217  ASSERT(!IsFinished());
218  AddInstruction(end, position);
219  end_ = end;
220  for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
221  it.Current()->RegisterPredecessor(this);
222  }
223 }
224 
225 
226 void HBasicBlock::Goto(HBasicBlock* block,
227  HSourcePosition position,
228  FunctionState* state,
229  bool add_simulate) {
230  bool drop_extra = state != NULL &&
231  state->inlining_kind() == NORMAL_RETURN;
232 
233  if (block->IsInlineReturnTarget()) {
234  HEnvironment* env = last_environment();
235  int argument_count = env->arguments_environment()->parameter_count();
236  AddInstruction(new(zone())
237  HLeaveInlined(state->entry(), argument_count),
238  position);
239  UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
240  }
241 
242  if (add_simulate) AddNewSimulate(BailoutId::None(), position);
243  HGoto* instr = new(zone()) HGoto(block);
244  Finish(instr, position);
245 }
246 
247 
248 void HBasicBlock::AddLeaveInlined(HValue* return_value,
249  FunctionState* state,
250  HSourcePosition position) {
251  HBasicBlock* target = state->function_return();
252  bool drop_extra = state->inlining_kind() == NORMAL_RETURN;
253 
254  ASSERT(target->IsInlineReturnTarget());
255  ASSERT(return_value != NULL);
256  HEnvironment* env = last_environment();
257  int argument_count = env->arguments_environment()->parameter_count();
258  AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count),
259  position);
260  UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
261  last_environment()->Push(return_value);
262  AddNewSimulate(BailoutId::None(), position);
263  HGoto* instr = new(zone()) HGoto(target);
264  Finish(instr, position);
265 }
266 
267 
268 void HBasicBlock::SetInitialEnvironment(HEnvironment* env) {
269  ASSERT(!HasEnvironment());
270  ASSERT(first() == NULL);
271  UpdateEnvironment(env);
272 }
273 
274 
275 void HBasicBlock::UpdateEnvironment(HEnvironment* env) {
276  last_environment_ = env;
277  graph()->update_maximum_environment_size(env->first_expression_index());
278 }
279 
280 
281 void HBasicBlock::SetJoinId(BailoutId ast_id) {
282  int length = predecessors_.length();
283  ASSERT(length > 0);
284  for (int i = 0; i < length; i++) {
285  HBasicBlock* predecessor = predecessors_[i];
286  ASSERT(predecessor->end()->IsGoto());
287  HSimulate* simulate = HSimulate::cast(predecessor->end()->previous());
288  ASSERT(i != 0 ||
289  (predecessor->last_environment()->closure().is_null() ||
290  predecessor->last_environment()->closure()->shared()
291  ->VerifyBailoutId(ast_id)));
292  simulate->set_ast_id(ast_id);
293  predecessor->last_environment()->set_ast_id(ast_id);
294  }
295 }
296 
297 
298 bool HBasicBlock::Dominates(HBasicBlock* other) const {
299  HBasicBlock* current = other->dominator();
300  while (current != NULL) {
301  if (current == this) return true;
302  current = current->dominator();
303  }
304  return false;
305 }
306 
307 
308 bool HBasicBlock::EqualToOrDominates(HBasicBlock* other) const {
309  if (this == other) return true;
310  return Dominates(other);
311 }
312 
313 
314 int HBasicBlock::LoopNestingDepth() const {
315  const HBasicBlock* current = this;
316  int result = (current->IsLoopHeader()) ? 1 : 0;
317  while (current->parent_loop_header() != NULL) {
318  current = current->parent_loop_header();
319  result++;
320  }
321  return result;
322 }
323 
324 
325 void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) {
326  ASSERT(IsLoopHeader());
327 
328  SetJoinId(stmt->EntryId());
329  if (predecessors()->length() == 1) {
330  // This is a degenerated loop.
331  DetachLoopInformation();
332  return;
333  }
334 
335  // Only the first entry into the loop is from outside the loop. All other
336  // entries must be back edges.
337  for (int i = 1; i < predecessors()->length(); ++i) {
338  loop_information()->RegisterBackEdge(predecessors()->at(i));
339  }
340 }
341 
342 
343 void HBasicBlock::MarkSuccEdgeUnreachable(int succ) {
344  ASSERT(IsFinished());
345  HBasicBlock* succ_block = end()->SuccessorAt(succ);
346 
347  ASSERT(succ_block->predecessors()->length() == 1);
348  succ_block->MarkUnreachable();
349 }
350 
351 
352 void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
353  if (HasPredecessor()) {
354  // Only loop header blocks can have a predecessor added after
355  // instructions have been added to the block (they have phis for all
356  // values in the environment, these phis may be eliminated later).
357  ASSERT(IsLoopHeader() || first_ == NULL);
358  HEnvironment* incoming_env = pred->last_environment();
359  if (IsLoopHeader()) {
360  ASSERT(phis()->length() == incoming_env->length());
361  for (int i = 0; i < phis_.length(); ++i) {
362  phis_[i]->AddInput(incoming_env->values()->at(i));
363  }
364  } else {
365  last_environment()->AddIncomingEdge(this, pred->last_environment());
366  }
367  } else if (!HasEnvironment() && !IsFinished()) {
368  ASSERT(!IsLoopHeader());
369  SetInitialEnvironment(pred->last_environment()->Copy());
370  }
371 
372  predecessors_.Add(pred, zone());
373 }
374 
375 
376 void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
377  ASSERT(!dominated_blocks_.Contains(block));
378  // Keep the list of dominated blocks sorted such that if there is two
379  // succeeding block in this list, the predecessor is before the successor.
380  int index = 0;
381  while (index < dominated_blocks_.length() &&
382  dominated_blocks_[index]->block_id() < block->block_id()) {
383  ++index;
384  }
385  dominated_blocks_.InsertAt(index, block, zone());
386 }
387 
388 
389 void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
390  if (dominator_ == NULL) {
391  dominator_ = other;
392  other->AddDominatedBlock(this);
393  } else if (other->dominator() != NULL) {
394  HBasicBlock* first = dominator_;
395  HBasicBlock* second = other;
396 
397  while (first != second) {
398  if (first->block_id() > second->block_id()) {
399  first = first->dominator();
400  } else {
401  second = second->dominator();
402  }
403  ASSERT(first != NULL && second != NULL);
404  }
405 
406  if (dominator_ != first) {
407  ASSERT(dominator_->dominated_blocks_.Contains(this));
408  dominator_->dominated_blocks_.RemoveElement(this);
409  dominator_ = first;
410  first->AddDominatedBlock(this);
411  }
412  }
413 }
414 
415 
416 void HBasicBlock::AssignLoopSuccessorDominators() {
417  // Mark blocks that dominate all subsequent reachable blocks inside their
418  // loop. Exploit the fact that blocks are sorted in reverse post order. When
419  // the loop is visited in increasing block id order, if the number of
420  // non-loop-exiting successor edges at the dominator_candidate block doesn't
421  // exceed the number of previously encountered predecessor edges, there is no
422  // path from the loop header to any block with higher id that doesn't go
423  // through the dominator_candidate block. In this case, the
424  // dominator_candidate block is guaranteed to dominate all blocks reachable
425  // from it with higher ids.
426  HBasicBlock* last = loop_information()->GetLastBackEdge();
427  int outstanding_successors = 1; // one edge from the pre-header
428  // Header always dominates everything.
429  MarkAsLoopSuccessorDominator();
430  for (int j = block_id(); j <= last->block_id(); ++j) {
431  HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
432  for (HPredecessorIterator it(dominator_candidate); !it.Done();
433  it.Advance()) {
434  HBasicBlock* predecessor = it.Current();
435  // Don't count back edges.
436  if (predecessor->block_id() < dominator_candidate->block_id()) {
437  outstanding_successors--;
438  }
439  }
440 
441  // If more successors than predecessors have been seen in the loop up to
442  // now, it's not possible to guarantee that the current block dominates
443  // all of the blocks with higher IDs. In this case, assume conservatively
444  // that those paths through loop that don't go through the current block
445  // contain all of the loop's dependencies. Also be careful to record
446  // dominator information about the current loop that's being processed,
447  // and not nested loops, which will be processed when
448  // AssignLoopSuccessorDominators gets called on their header.
449  ASSERT(outstanding_successors >= 0);
450  HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
451  if (outstanding_successors == 0 &&
452  (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
453  dominator_candidate->MarkAsLoopSuccessorDominator();
454  }
455  HControlInstruction* end = dominator_candidate->end();
456  for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
457  HBasicBlock* successor = it.Current();
458  // Only count successors that remain inside the loop and don't loop back
459  // to a loop header.
460  if (successor->block_id() > dominator_candidate->block_id() &&
461  successor->block_id() <= last->block_id()) {
462  // Backwards edges must land on loop headers.
463  ASSERT(successor->block_id() > dominator_candidate->block_id() ||
464  successor->IsLoopHeader());
465  outstanding_successors++;
466  }
467  }
468  }
469 }
470 
471 
472 int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
473  for (int i = 0; i < predecessors_.length(); ++i) {
474  if (predecessors_[i] == predecessor) return i;
475  }
476  UNREACHABLE();
477  return -1;
478 }
479 
480 
481 #ifdef DEBUG
482 void HBasicBlock::Verify() {
483  // Check that every block is finished.
484  ASSERT(IsFinished());
485  ASSERT(block_id() >= 0);
486 
487  // Check that the incoming edges are in edge split form.
488  if (predecessors_.length() > 1) {
489  for (int i = 0; i < predecessors_.length(); ++i) {
490  ASSERT(predecessors_[i]->end()->SecondSuccessor() == NULL);
491  }
492  }
493 }
494 #endif
495 
496 
497 void HLoopInformation::RegisterBackEdge(HBasicBlock* block) {
498  this->back_edges_.Add(block, block->zone());
499  AddBlock(block);
500 }
501 
502 
503 HBasicBlock* HLoopInformation::GetLastBackEdge() const {
504  int max_id = -1;
505  HBasicBlock* result = NULL;
506  for (int i = 0; i < back_edges_.length(); ++i) {
507  HBasicBlock* cur = back_edges_[i];
508  if (cur->block_id() > max_id) {
509  max_id = cur->block_id();
510  result = cur;
511  }
512  }
513  return result;
514 }
515 
516 
517 void HLoopInformation::AddBlock(HBasicBlock* block) {
518  if (block == loop_header()) return;
519  if (block->parent_loop_header() == loop_header()) return;
520  if (block->parent_loop_header() != NULL) {
521  AddBlock(block->parent_loop_header());
522  } else {
523  block->set_parent_loop_header(loop_header());
524  blocks_.Add(block, block->zone());
525  for (int i = 0; i < block->predecessors()->length(); ++i) {
526  AddBlock(block->predecessors()->at(i));
527  }
528  }
529 }
530 
531 
532 #ifdef DEBUG
533 
534 // Checks reachability of the blocks in this graph and stores a bit in
535 // the BitVector "reachable()" for every block that can be reached
536 // from the start block of the graph. If "dont_visit" is non-null, the given
537 // block is treated as if it would not be part of the graph. "visited_count()"
538 // returns the number of reachable blocks.
539 class ReachabilityAnalyzer BASE_EMBEDDED {
540  public:
541  ReachabilityAnalyzer(HBasicBlock* entry_block,
542  int block_count,
543  HBasicBlock* dont_visit)
544  : visited_count_(0),
545  stack_(16, entry_block->zone()),
546  reachable_(block_count, entry_block->zone()),
547  dont_visit_(dont_visit) {
548  PushBlock(entry_block);
549  Analyze();
550  }
551 
552  int visited_count() const { return visited_count_; }
553  const BitVector* reachable() const { return &reachable_; }
554 
555  private:
556  void PushBlock(HBasicBlock* block) {
557  if (block != NULL && block != dont_visit_ &&
558  !reachable_.Contains(block->block_id())) {
559  reachable_.Add(block->block_id());
560  stack_.Add(block, block->zone());
561  visited_count_++;
562  }
563  }
564 
565  void Analyze() {
566  while (!stack_.is_empty()) {
567  HControlInstruction* end = stack_.RemoveLast()->end();
568  for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
569  PushBlock(it.Current());
570  }
571  }
572  }
573 
574  int visited_count_;
575  ZoneList<HBasicBlock*> stack_;
576  BitVector reachable_;
577  HBasicBlock* dont_visit_;
578 };
579 
580 
581 void HGraph::Verify(bool do_full_verify) const {
582  Heap::RelocationLock relocation_lock(isolate()->heap());
583  AllowHandleDereference allow_deref;
584  AllowDeferredHandleDereference allow_deferred_deref;
585  for (int i = 0; i < blocks_.length(); i++) {
586  HBasicBlock* block = blocks_.at(i);
587 
588  block->Verify();
589 
590  // Check that every block contains at least one node and that only the last
591  // node is a control instruction.
592  HInstruction* current = block->first();
593  ASSERT(current != NULL && current->IsBlockEntry());
594  while (current != NULL) {
595  ASSERT((current->next() == NULL) == current->IsControlInstruction());
596  ASSERT(current->block() == block);
597  current->Verify();
598  current = current->next();
599  }
600 
601  // Check that successors are correctly set.
602  HBasicBlock* first = block->end()->FirstSuccessor();
603  HBasicBlock* second = block->end()->SecondSuccessor();
604  ASSERT(second == NULL || first != NULL);
605 
606  // Check that the predecessor array is correct.
607  if (first != NULL) {
608  ASSERT(first->predecessors()->Contains(block));
609  if (second != NULL) {
610  ASSERT(second->predecessors()->Contains(block));
611  }
612  }
613 
614  // Check that phis have correct arguments.
615  for (int j = 0; j < block->phis()->length(); j++) {
616  HPhi* phi = block->phis()->at(j);
617  phi->Verify();
618  }
619 
620  // Check that all join blocks have predecessors that end with an
621  // unconditional goto and agree on their environment node id.
622  if (block->predecessors()->length() >= 2) {
623  BailoutId id =
624  block->predecessors()->first()->last_environment()->ast_id();
625  for (int k = 0; k < block->predecessors()->length(); k++) {
626  HBasicBlock* predecessor = block->predecessors()->at(k);
627  ASSERT(predecessor->end()->IsGoto() ||
628  predecessor->end()->IsDeoptimize());
629  ASSERT(predecessor->last_environment()->ast_id() == id);
630  }
631  }
632  }
633 
634  // Check special property of first block to have no predecessors.
635  ASSERT(blocks_.at(0)->predecessors()->is_empty());
636 
637  if (do_full_verify) {
638  // Check that the graph is fully connected.
639  ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL);
640  ASSERT(analyzer.visited_count() == blocks_.length());
641 
642  // Check that entry block dominator is NULL.
643  ASSERT(entry_block_->dominator() == NULL);
644 
645  // Check dominators.
646  for (int i = 0; i < blocks_.length(); ++i) {
647  HBasicBlock* block = blocks_.at(i);
648  if (block->dominator() == NULL) {
649  // Only start block may have no dominator assigned to.
650  ASSERT(i == 0);
651  } else {
652  // Assert that block is unreachable if dominator must not be visited.
653  ReachabilityAnalyzer dominator_analyzer(entry_block_,
654  blocks_.length(),
655  block->dominator());
656  ASSERT(!dominator_analyzer.reachable()->Contains(block->block_id()));
657  }
658  }
659  }
660 }
661 
662 #endif
663 
664 
665 HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
666  int32_t value) {
667  if (!pointer->is_set()) {
668  // Can't pass GetInvalidContext() to HConstant::New, because that will
669  // recursively call GetConstant
670  HConstant* constant = HConstant::New(zone(), NULL, value);
671  constant->InsertAfter(entry_block()->first());
672  pointer->set(constant);
673  return constant;
674  }
675  return ReinsertConstantIfNecessary(pointer->get());
676 }
677 
678 
679 HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) {
680  if (!constant->IsLinked()) {
681  // The constant was removed from the graph. Reinsert.
682  constant->ClearFlag(HValue::kIsDead);
683  constant->InsertAfter(entry_block()->first());
684  }
685  return constant;
686 }
687 
688 
689 HConstant* HGraph::GetConstant0() {
690  return GetConstant(&constant_0_, 0);
691 }
692 
693 
694 HConstant* HGraph::GetConstant1() {
695  return GetConstant(&constant_1_, 1);
696 }
697 
698 
699 HConstant* HGraph::GetConstantMinus1() {
700  return GetConstant(&constant_minus1_, -1);
701 }
702 
703 
704 #define DEFINE_GET_CONSTANT(Name, name, htype, boolean_value) \
705 HConstant* HGraph::GetConstant##Name() { \
706  if (!constant_##name##_.is_set()) { \
707  HConstant* constant = new(zone()) HConstant( \
708  Unique<Object>::CreateImmovable(isolate()->factory()->name##_value()), \
709  Representation::Tagged(), \
710  htype, \
711  true, \
712  boolean_value, \
713  false, \
714  ODDBALL_TYPE); \
715  constant->InsertAfter(entry_block()->first()); \
716  constant_##name##_.set(constant); \
717  } \
718  return ReinsertConstantIfNecessary(constant_##name##_.get()); \
719 }
720 
721 
722 DEFINE_GET_CONSTANT(Undefined, undefined, HType::Tagged(), false)
723 DEFINE_GET_CONSTANT(True, true, HType::Boolean(), true)
724 DEFINE_GET_CONSTANT(False, false, HType::Boolean(), false)
725 DEFINE_GET_CONSTANT(Hole, the_hole, HType::Tagged(), false)
726 DEFINE_GET_CONSTANT(Null, null, HType::Tagged(), false)
727 
728 
729 #undef DEFINE_GET_CONSTANT
730 
731 #define DEFINE_IS_CONSTANT(Name, name) \
732 bool HGraph::IsConstant##Name(HConstant* constant) { \
733  return constant_##name##_.is_set() && constant == constant_##name##_.get(); \
734 }
735 DEFINE_IS_CONSTANT(Undefined, undefined)
736 DEFINE_IS_CONSTANT(0, 0)
737 DEFINE_IS_CONSTANT(1, 1)
738 DEFINE_IS_CONSTANT(Minus1, minus1)
739 DEFINE_IS_CONSTANT(True, true)
740 DEFINE_IS_CONSTANT(False, false)
741 DEFINE_IS_CONSTANT(Hole, the_hole)
742 DEFINE_IS_CONSTANT(Null, null)
743 
744 #undef DEFINE_IS_CONSTANT
745 
746 
747 HConstant* HGraph::GetInvalidContext() {
748  return GetConstant(&constant_invalid_context_, 0xFFFFC0C7);
749 }
750 
751 
752 bool HGraph::IsStandardConstant(HConstant* constant) {
753  if (IsConstantUndefined(constant)) return true;
754  if (IsConstant0(constant)) return true;
755  if (IsConstant1(constant)) return true;
756  if (IsConstantMinus1(constant)) return true;
757  if (IsConstantTrue(constant)) return true;
758  if (IsConstantFalse(constant)) return true;
759  if (IsConstantHole(constant)) return true;
760  if (IsConstantNull(constant)) return true;
761  return false;
762 }
763 
764 
765 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder)
766  : builder_(builder),
767  finished_(false),
768  did_then_(false),
769  did_else_(false),
770  did_else_if_(false),
771  did_and_(false),
772  did_or_(false),
773  captured_(false),
774  needs_compare_(true),
775  pending_merge_block_(false),
776  split_edge_merge_block_(NULL),
777  merge_at_join_blocks_(NULL),
778  normal_merge_at_join_block_count_(0),
779  deopt_merge_at_join_block_count_(0) {
780  HEnvironment* env = builder->environment();
781  first_true_block_ = builder->CreateBasicBlock(env->Copy());
782  first_false_block_ = builder->CreateBasicBlock(env->Copy());
783 }
784 
785 
786 HGraphBuilder::IfBuilder::IfBuilder(
787  HGraphBuilder* builder,
788  HIfContinuation* continuation)
789  : builder_(builder),
790  finished_(false),
791  did_then_(false),
792  did_else_(false),
793  did_else_if_(false),
794  did_and_(false),
795  did_or_(false),
796  captured_(false),
797  needs_compare_(false),
798  pending_merge_block_(false),
799  first_true_block_(NULL),
800  first_false_block_(NULL),
801  split_edge_merge_block_(NULL),
802  merge_at_join_blocks_(NULL),
803  normal_merge_at_join_block_count_(0),
804  deopt_merge_at_join_block_count_(0) {
805  continuation->Continue(&first_true_block_,
806  &first_false_block_);
807 }
808 
809 
810 HControlInstruction* HGraphBuilder::IfBuilder::AddCompare(
811  HControlInstruction* compare) {
812  ASSERT(did_then_ == did_else_);
813  if (did_else_) {
814  // Handle if-then-elseif
815  did_else_if_ = true;
816  did_else_ = false;
817  did_then_ = false;
818  did_and_ = false;
819  did_or_ = false;
820  pending_merge_block_ = false;
821  split_edge_merge_block_ = NULL;
822  HEnvironment* env = builder_->environment();
823  first_true_block_ = builder_->CreateBasicBlock(env->Copy());
824  first_false_block_ = builder_->CreateBasicBlock(env->Copy());
825  }
826  if (split_edge_merge_block_ != NULL) {
827  HEnvironment* env = first_false_block_->last_environment();
828  HBasicBlock* split_edge =
829  builder_->CreateBasicBlock(env->Copy());
830  if (did_or_) {
831  compare->SetSuccessorAt(0, split_edge);
832  compare->SetSuccessorAt(1, first_false_block_);
833  } else {
834  compare->SetSuccessorAt(0, first_true_block_);
835  compare->SetSuccessorAt(1, split_edge);
836  }
837  builder_->GotoNoSimulate(split_edge, split_edge_merge_block_);
838  } else {
839  compare->SetSuccessorAt(0, first_true_block_);
840  compare->SetSuccessorAt(1, first_false_block_);
841  }
842  builder_->FinishCurrentBlock(compare);
843  needs_compare_ = false;
844  return compare;
845 }
846 
847 
848 void HGraphBuilder::IfBuilder::Or() {
849  ASSERT(!needs_compare_);
850  ASSERT(!did_and_);
851  did_or_ = true;
852  HEnvironment* env = first_false_block_->last_environment();
853  if (split_edge_merge_block_ == NULL) {
854  split_edge_merge_block_ =
855  builder_->CreateBasicBlock(env->Copy());
856  builder_->GotoNoSimulate(first_true_block_, split_edge_merge_block_);
857  first_true_block_ = split_edge_merge_block_;
858  }
859  builder_->set_current_block(first_false_block_);
860  first_false_block_ = builder_->CreateBasicBlock(env->Copy());
861 }
862 
863 
864 void HGraphBuilder::IfBuilder::And() {
865  ASSERT(!needs_compare_);
866  ASSERT(!did_or_);
867  did_and_ = true;
868  HEnvironment* env = first_false_block_->last_environment();
869  if (split_edge_merge_block_ == NULL) {
870  split_edge_merge_block_ = builder_->CreateBasicBlock(env->Copy());
871  builder_->GotoNoSimulate(first_false_block_, split_edge_merge_block_);
872  first_false_block_ = split_edge_merge_block_;
873  }
874  builder_->set_current_block(first_true_block_);
875  first_true_block_ = builder_->CreateBasicBlock(env->Copy());
876 }
877 
878 
879 void HGraphBuilder::IfBuilder::CaptureContinuation(
880  HIfContinuation* continuation) {
881  ASSERT(!did_else_if_);
882  ASSERT(!finished_);
883  ASSERT(!captured_);
884 
885  HBasicBlock* true_block = NULL;
886  HBasicBlock* false_block = NULL;
887  Finish(&true_block, &false_block);
888  ASSERT(true_block != NULL);
889  ASSERT(false_block != NULL);
890  continuation->Capture(true_block, false_block);
891  captured_ = true;
892  builder_->set_current_block(NULL);
893  End();
894 }
895 
896 
897 void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) {
898  ASSERT(!did_else_if_);
899  ASSERT(!finished_);
900  ASSERT(!captured_);
901  HBasicBlock* true_block = NULL;
902  HBasicBlock* false_block = NULL;
903  Finish(&true_block, &false_block);
904  merge_at_join_blocks_ = NULL;
905  if (true_block != NULL && !true_block->IsFinished()) {
906  ASSERT(continuation->IsTrueReachable());
907  builder_->GotoNoSimulate(true_block, continuation->true_branch());
908  }
909  if (false_block != NULL && !false_block->IsFinished()) {
910  ASSERT(continuation->IsFalseReachable());
911  builder_->GotoNoSimulate(false_block, continuation->false_branch());
912  }
913  captured_ = true;
914  End();
915 }
916 
917 
918 void HGraphBuilder::IfBuilder::Then() {
919  ASSERT(!captured_);
920  ASSERT(!finished_);
921  did_then_ = true;
922  if (needs_compare_) {
923  // Handle if's without any expressions, they jump directly to the "else"
924  // branch. However, we must pretend that the "then" branch is reachable,
925  // so that the graph builder visits it and sees any live range extending
926  // constructs within it.
927  HConstant* constant_false = builder_->graph()->GetConstantFalse();
928  ToBooleanStub::Types boolean_type = ToBooleanStub::Types();
929  boolean_type.Add(ToBooleanStub::BOOLEAN);
930  HBranch* branch = builder()->New<HBranch>(
931  constant_false, boolean_type, first_true_block_, first_false_block_);
932  builder_->FinishCurrentBlock(branch);
933  }
934  builder_->set_current_block(first_true_block_);
935  pending_merge_block_ = true;
936 }
937 
938 
939 void HGraphBuilder::IfBuilder::Else() {
940  ASSERT(did_then_);
941  ASSERT(!captured_);
942  ASSERT(!finished_);
943  AddMergeAtJoinBlock(false);
944  builder_->set_current_block(first_false_block_);
945  pending_merge_block_ = true;
946  did_else_ = true;
947 }
948 
949 
950 void HGraphBuilder::IfBuilder::Deopt(const char* reason) {
951  ASSERT(did_then_);
952  builder_->Add<HDeoptimize>(reason, Deoptimizer::EAGER);
953  AddMergeAtJoinBlock(true);
954 }
955 
956 
957 void HGraphBuilder::IfBuilder::Return(HValue* value) {
958  HValue* parameter_count = builder_->graph()->GetConstantMinus1();
959  builder_->FinishExitCurrentBlock(
960  builder_->New<HReturn>(value, parameter_count));
961  AddMergeAtJoinBlock(false);
962 }
963 
964 
965 void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) {
966  if (!pending_merge_block_) return;
967  HBasicBlock* block = builder_->current_block();
968  ASSERT(block == NULL || !block->IsFinished());
969  MergeAtJoinBlock* record =
970  new(builder_->zone()) MergeAtJoinBlock(block, deopt,
971  merge_at_join_blocks_);
972  merge_at_join_blocks_ = record;
973  if (block != NULL) {
974  ASSERT(block->end() == NULL);
975  if (deopt) {
976  normal_merge_at_join_block_count_++;
977  } else {
978  deopt_merge_at_join_block_count_++;
979  }
980  }
981  builder_->set_current_block(NULL);
982  pending_merge_block_ = false;
983 }
984 
985 
986 void HGraphBuilder::IfBuilder::Finish() {
987  ASSERT(!finished_);
988  if (!did_then_) {
989  Then();
990  }
991  AddMergeAtJoinBlock(false);
992  if (!did_else_) {
993  Else();
994  AddMergeAtJoinBlock(false);
995  }
996  finished_ = true;
997 }
998 
999 
1000 void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation,
1001  HBasicBlock** else_continuation) {
1002  Finish();
1003 
1004  MergeAtJoinBlock* else_record = merge_at_join_blocks_;
1005  if (else_continuation != NULL) {
1006  *else_continuation = else_record->block_;
1007  }
1008  MergeAtJoinBlock* then_record = else_record->next_;
1009  if (then_continuation != NULL) {
1010  *then_continuation = then_record->block_;
1011  }
1012  ASSERT(then_record->next_ == NULL);
1013 }
1014 
1015 
1016 void HGraphBuilder::IfBuilder::End() {
1017  if (captured_) return;
1018  Finish();
1019 
1020  int total_merged_blocks = normal_merge_at_join_block_count_ +
1021  deopt_merge_at_join_block_count_;
1022  ASSERT(total_merged_blocks >= 1);
1023  HBasicBlock* merge_block = total_merged_blocks == 1
1024  ? NULL : builder_->graph()->CreateBasicBlock();
1025 
1026  // Merge non-deopt blocks first to ensure environment has right size for
1027  // padding.
1028  MergeAtJoinBlock* current = merge_at_join_blocks_;
1029  while (current != NULL) {
1030  if (!current->deopt_ && current->block_ != NULL) {
1031  // If there is only one block that makes it through to the end of the
1032  // if, then just set it as the current block and continue rather then
1033  // creating an unnecessary merge block.
1034  if (total_merged_blocks == 1) {
1035  builder_->set_current_block(current->block_);
1036  return;
1037  }
1038  builder_->GotoNoSimulate(current->block_, merge_block);
1039  }
1040  current = current->next_;
1041  }
1042 
1043  // Merge deopt blocks, padding when necessary.
1044  current = merge_at_join_blocks_;
1045  while (current != NULL) {
1046  if (current->deopt_ && current->block_ != NULL) {
1047  current->block_->FinishExit(
1048  HAbnormalExit::New(builder_->zone(), NULL),
1049  HSourcePosition::Unknown());
1050  }
1051  current = current->next_;
1052  }
1053  builder_->set_current_block(merge_block);
1054 }
1055 
1056 
1057 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder,
1058  HValue* context,
1059  LoopBuilder::Direction direction)
1060  : builder_(builder),
1061  context_(context),
1062  direction_(direction),
1063  finished_(false) {
1064  header_block_ = builder->CreateLoopHeaderBlock();
1065  body_block_ = NULL;
1066  exit_block_ = NULL;
1067  exit_trampoline_block_ = NULL;
1068  increment_amount_ = builder_->graph()->GetConstant1();
1069 }
1070 
1071 
1072 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder,
1073  HValue* context,
1074  LoopBuilder::Direction direction,
1075  HValue* increment_amount)
1076  : builder_(builder),
1077  context_(context),
1078  direction_(direction),
1079  finished_(false) {
1080  header_block_ = builder->CreateLoopHeaderBlock();
1081  body_block_ = NULL;
1082  exit_block_ = NULL;
1083  exit_trampoline_block_ = NULL;
1084  increment_amount_ = increment_amount;
1085 }
1086 
1087 
1088 HValue* HGraphBuilder::LoopBuilder::BeginBody(
1089  HValue* initial,
1090  HValue* terminating,
1091  Token::Value token) {
1092  HEnvironment* env = builder_->environment();
1093  phi_ = header_block_->AddNewPhi(env->values()->length());
1094  phi_->AddInput(initial);
1095  env->Push(initial);
1096  builder_->GotoNoSimulate(header_block_);
1097 
1098  HEnvironment* body_env = env->Copy();
1099  HEnvironment* exit_env = env->Copy();
1100  // Remove the phi from the expression stack
1101  body_env->Pop();
1102  exit_env->Pop();
1103  body_block_ = builder_->CreateBasicBlock(body_env);
1104  exit_block_ = builder_->CreateBasicBlock(exit_env);
1105 
1106  builder_->set_current_block(header_block_);
1107  env->Pop();
1108  builder_->FinishCurrentBlock(builder_->New<HCompareNumericAndBranch>(
1109  phi_, terminating, token, body_block_, exit_block_));
1110 
1111  builder_->set_current_block(body_block_);
1112  if (direction_ == kPreIncrement || direction_ == kPreDecrement) {
1113  HValue* one = builder_->graph()->GetConstant1();
1114  if (direction_ == kPreIncrement) {
1115  increment_ = HAdd::New(zone(), context_, phi_, one);
1116  } else {
1117  increment_ = HSub::New(zone(), context_, phi_, one);
1118  }
1119  increment_->ClearFlag(HValue::kCanOverflow);
1120  builder_->AddInstruction(increment_);
1121  return increment_;
1122  } else {
1123  return phi_;
1124  }
1125 }
1126 
1127 
1129  if (exit_trampoline_block_ == NULL) {
1130  // Its the first time we saw a break.
1131  HEnvironment* env = exit_block_->last_environment()->Copy();
1132  exit_trampoline_block_ = builder_->CreateBasicBlock(env);
1133  builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_);
1134  }
1135 
1136  builder_->GotoNoSimulate(exit_trampoline_block_);
1137  builder_->set_current_block(NULL);
1138 }
1139 
1140 
1141 void HGraphBuilder::LoopBuilder::EndBody() {
1142  ASSERT(!finished_);
1143 
1144  if (direction_ == kPostIncrement || direction_ == kPostDecrement) {
1145  if (direction_ == kPostIncrement) {
1146  increment_ = HAdd::New(zone(), context_, phi_, increment_amount_);
1147  } else {
1148  increment_ = HSub::New(zone(), context_, phi_, increment_amount_);
1149  }
1150  increment_->ClearFlag(HValue::kCanOverflow);
1151  builder_->AddInstruction(increment_);
1152  }
1153 
1154  // Push the new increment value on the expression stack to merge into the phi.
1155  builder_->environment()->Push(increment_);
1156  HBasicBlock* last_block = builder_->current_block();
1157  builder_->GotoNoSimulate(last_block, header_block_);
1158  header_block_->loop_information()->RegisterBackEdge(last_block);
1159 
1160  if (exit_trampoline_block_ != NULL) {
1161  builder_->set_current_block(exit_trampoline_block_);
1162  } else {
1163  builder_->set_current_block(exit_block_);
1164  }
1165  finished_ = true;
1166 }
1167 
1168 
1169 HGraph* HGraphBuilder::CreateGraph() {
1170  graph_ = new(zone()) HGraph(info_);
1171  if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_);
1172  CompilationPhase phase("H_Block building", info_);
1173  set_current_block(graph()->entry_block());
1174  if (!BuildGraph()) return NULL;
1175  graph()->FinalizeUniqueness();
1176  return graph_;
1177 }
1178 
1179 
1180 HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) {
1181  ASSERT(current_block() != NULL);
1182  ASSERT(!FLAG_hydrogen_track_positions ||
1183  !position_.IsUnknown() ||
1184  !info_->IsOptimizing());
1185  current_block()->AddInstruction(instr, source_position());
1186  if (graph()->IsInsideNoSideEffectsScope()) {
1187  instr->SetFlag(HValue::kHasNoObservableSideEffects);
1188  }
1189  return instr;
1190 }
1191 
1192 
1193 void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) {
1194  ASSERT(!FLAG_hydrogen_track_positions ||
1195  !info_->IsOptimizing() ||
1196  !position_.IsUnknown());
1197  current_block()->Finish(last, source_position());
1198  if (last->IsReturn() || last->IsAbnormalExit()) {
1199  set_current_block(NULL);
1200  }
1201 }
1202 
1203 
1204 void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) {
1205  ASSERT(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() ||
1206  !position_.IsUnknown());
1207  current_block()->FinishExit(instruction, source_position());
1208  if (instruction->IsReturn() || instruction->IsAbnormalExit()) {
1209  set_current_block(NULL);
1210  }
1211 }
1212 
1213 
1214 void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) {
1215  if (FLAG_native_code_counters && counter->Enabled()) {
1216  HValue* reference = Add<HConstant>(ExternalReference(counter));
1217  HValue* old_value = Add<HLoadNamedField>(
1218  reference, static_cast<HValue*>(NULL), HObjectAccess::ForCounter());
1219  HValue* new_value = AddUncasted<HAdd>(old_value, graph()->GetConstant1());
1220  new_value->ClearFlag(HValue::kCanOverflow); // Ignore counter overflow
1221  Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(),
1222  new_value, STORE_TO_INITIALIZED_ENTRY);
1223  }
1224 }
1225 
1226 
1227 void HGraphBuilder::AddSimulate(BailoutId id,
1228  RemovableSimulate removable) {
1229  ASSERT(current_block() != NULL);
1230  ASSERT(!graph()->IsInsideNoSideEffectsScope());
1231  current_block()->AddNewSimulate(id, source_position(), removable);
1232 }
1233 
1234 
1235 HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
1236  HBasicBlock* b = graph()->CreateBasicBlock();
1237  b->SetInitialEnvironment(env);
1238  return b;
1239 }
1240 
1241 
1242 HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
1243  HBasicBlock* header = graph()->CreateBasicBlock();
1244  HEnvironment* entry_env = environment()->CopyAsLoopHeader(header);
1245  header->SetInitialEnvironment(entry_env);
1246  header->AttachLoopInformation();
1247  return header;
1248 }
1249 
1250 
1251 HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) {
1252  if (obj->type().IsHeapObject()) return obj;
1253  return Add<HCheckHeapObject>(obj);
1254 }
1255 
1256 
1257 void HGraphBuilder::FinishExitWithHardDeoptimization(const char* reason) {
1258  Add<HDeoptimize>(reason, Deoptimizer::EAGER);
1259  FinishExitCurrentBlock(New<HAbnormalExit>());
1260 }
1261 
1262 
1263 HValue* HGraphBuilder::BuildCheckMap(HValue* obj, Handle<Map> map) {
1264  return Add<HCheckMaps>(obj, map, top_info());
1265 }
1266 
1267 
1268 HValue* HGraphBuilder::BuildCheckString(HValue* string) {
1269  if (!string->type().IsString()) {
1270  ASSERT(!string->IsConstant() ||
1271  !HConstant::cast(string)->HasStringValue());
1272  BuildCheckHeapObject(string);
1273  return Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING);
1274  }
1275  return string;
1276 }
1277 
1278 
1279 HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* function) {
1280  if (object->type().IsJSObject()) return object;
1281  if (function->IsConstant() &&
1282  HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
1284  HConstant::cast(function)->handle(isolate()));
1285  SharedFunctionInfo* shared = f->shared();
1286  if (shared->strict_mode() == STRICT || shared->native()) return object;
1287  }
1288  return Add<HWrapReceiver>(object, function);
1289 }
1290 
1291 
1292 HValue* HGraphBuilder::BuildCheckForCapacityGrow(
1293  HValue* object,
1294  HValue* elements,
1295  ElementsKind kind,
1296  HValue* length,
1297  HValue* key,
1298  bool is_js_array,
1299  PropertyAccessType access_type) {
1300  IfBuilder length_checker(this);
1301 
1302  Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ;
1303  length_checker.If<HCompareNumericAndBranch>(key, length, token);
1304 
1305  length_checker.Then();
1306 
1307  HValue* current_capacity = AddLoadFixedArrayLength(elements);
1308 
1309  IfBuilder capacity_checker(this);
1310 
1311  capacity_checker.If<HCompareNumericAndBranch>(key, current_capacity,
1312  Token::GTE);
1313  capacity_checker.Then();
1314 
1315  HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap));
1316  HValue* max_capacity = AddUncasted<HAdd>(current_capacity, max_gap);
1317  IfBuilder key_checker(this);
1318  key_checker.If<HCompareNumericAndBranch>(key, max_capacity, Token::LT);
1319  key_checker.Then();
1320  key_checker.ElseDeopt("Key out of capacity range");
1321  key_checker.End();
1322 
1323  HValue* new_capacity = BuildNewElementsCapacity(key);
1324  HValue* new_elements = BuildGrowElementsCapacity(object, elements,
1325  kind, kind, length,
1326  new_capacity);
1327 
1328  environment()->Push(new_elements);
1329  capacity_checker.Else();
1330 
1331  environment()->Push(elements);
1332  capacity_checker.End();
1333 
1334  if (is_js_array) {
1335  HValue* new_length = AddUncasted<HAdd>(key, graph_->GetConstant1());
1336  new_length->ClearFlag(HValue::kCanOverflow);
1337 
1338  Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind),
1339  new_length);
1340  }
1341 
1342  if (access_type == STORE && kind == FAST_SMI_ELEMENTS) {
1343  HValue* checked_elements = environment()->Top();
1344 
1345  // Write zero to ensure that the new element is initialized with some smi.
1346  Add<HStoreKeyed>(checked_elements, key, graph()->GetConstant0(), kind);
1347  }
1348 
1349  length_checker.Else();
1350  Add<HBoundsCheck>(key, length);
1351 
1352  environment()->Push(elements);
1353  length_checker.End();
1354 
1355  return environment()->Pop();
1356 }
1357 
1358 
1359 HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object,
1360  HValue* elements,
1361  ElementsKind kind,
1362  HValue* length) {
1363  Factory* factory = isolate()->factory();
1364 
1365  IfBuilder cow_checker(this);
1366 
1367  cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map());
1368  cow_checker.Then();
1369 
1370  HValue* capacity = AddLoadFixedArrayLength(elements);
1371 
1372  HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind,
1373  kind, length, capacity);
1374 
1375  environment()->Push(new_elements);
1376 
1377  cow_checker.Else();
1378 
1379  environment()->Push(elements);
1380 
1381  cow_checker.End();
1382 
1383  return environment()->Pop();
1384 }
1385 
1386 
1387 void HGraphBuilder::BuildTransitionElementsKind(HValue* object,
1388  HValue* map,
1389  ElementsKind from_kind,
1390  ElementsKind to_kind,
1391  bool is_jsarray) {
1392  ASSERT(!IsFastHoleyElementsKind(from_kind) ||
1393  IsFastHoleyElementsKind(to_kind));
1394 
1395  if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
1396  Add<HTrapAllocationMemento>(object);
1397  }
1398 
1399  if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
1400  HInstruction* elements = AddLoadElements(object);
1401 
1402  HInstruction* empty_fixed_array = Add<HConstant>(
1403  isolate()->factory()->empty_fixed_array());
1404 
1405  IfBuilder if_builder(this);
1406 
1407  if_builder.IfNot<HCompareObjectEqAndBranch>(elements, empty_fixed_array);
1408 
1409  if_builder.Then();
1410 
1411  HInstruction* elements_length = AddLoadFixedArrayLength(elements);
1412 
1413  HInstruction* array_length = is_jsarray
1414  ? Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
1415  HObjectAccess::ForArrayLength(from_kind))
1416  : elements_length;
1417 
1418  BuildGrowElementsCapacity(object, elements, from_kind, to_kind,
1419  array_length, elements_length);
1420 
1421  if_builder.End();
1422  }
1423 
1424  Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map);
1425 }
1426 
1427 
1428 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoadHelper(
1429  HValue* elements,
1430  HValue* key,
1431  HValue* hash,
1432  HValue* mask,
1433  int current_probe) {
1434  if (current_probe == kNumberDictionaryProbes) {
1435  return NULL;
1436  }
1437 
1438  int32_t offset = SeededNumberDictionary::GetProbeOffset(current_probe);
1439  HValue* raw_index = (current_probe == 0)
1440  ? hash
1441  : AddUncasted<HAdd>(hash, Add<HConstant>(offset));
1442  raw_index = AddUncasted<HBitwise>(Token::BIT_AND, raw_index, mask);
1443  int32_t entry_size = SeededNumberDictionary::kEntrySize;
1444  raw_index = AddUncasted<HMul>(raw_index, Add<HConstant>(entry_size));
1445  raw_index->ClearFlag(HValue::kCanOverflow);
1446 
1447  int32_t base_offset = SeededNumberDictionary::kElementsStartIndex;
1448  HValue* key_index = AddUncasted<HAdd>(raw_index, Add<HConstant>(base_offset));
1449  key_index->ClearFlag(HValue::kCanOverflow);
1450 
1451  HValue* candidate_key = Add<HLoadKeyed>(elements, key_index,
1452  static_cast<HValue*>(NULL),
1453  FAST_ELEMENTS);
1454 
1455  IfBuilder key_compare(this);
1456  key_compare.IfNot<HCompareObjectEqAndBranch>(key, candidate_key);
1457  key_compare.Then();
1458  {
1459  // Key at the current probe doesn't match, try at the next probe.
1460  HValue* result = BuildUncheckedDictionaryElementLoadHelper(
1461  elements, key, hash, mask, current_probe + 1);
1462  if (result == NULL) {
1463  key_compare.Deopt("probes exhausted in keyed load dictionary lookup");
1464  result = graph()->GetConstantUndefined();
1465  } else {
1466  Push(result);
1467  }
1468  }
1469  key_compare.Else();
1470  {
1471  // Key at current probe matches. Details must be zero, otherwise the
1472  // dictionary element requires special handling.
1473  HValue* details_index = AddUncasted<HAdd>(
1474  raw_index, Add<HConstant>(base_offset + 2));
1475  details_index->ClearFlag(HValue::kCanOverflow);
1476 
1477  HValue* details = Add<HLoadKeyed>(elements, details_index,
1478  static_cast<HValue*>(NULL),
1479  FAST_ELEMENTS);
1480  IfBuilder details_compare(this);
1481  details_compare.If<HCompareNumericAndBranch>(details,
1482  graph()->GetConstant0(),
1483  Token::NE);
1484  details_compare.ThenDeopt("keyed load dictionary element not fast case");
1485 
1486  details_compare.Else();
1487  {
1488  // Key matches and details are zero --> fast case. Load and return the
1489  // value.
1490  HValue* result_index = AddUncasted<HAdd>(
1491  raw_index, Add<HConstant>(base_offset + 1));
1492  result_index->ClearFlag(HValue::kCanOverflow);
1493 
1494  Push(Add<HLoadKeyed>(elements, result_index,
1495  static_cast<HValue*>(NULL),
1496  FAST_ELEMENTS));
1497  }
1498  details_compare.End();
1499  }
1500  key_compare.End();
1501 
1502  return Pop();
1503 }
1504 
1505 
1506 HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) {
1507  int32_t seed_value = static_cast<uint32_t>(isolate()->heap()->HashSeed());
1508  HValue* seed = Add<HConstant>(seed_value);
1509  HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, index, seed);
1510 
1511  // hash = ~hash + (hash << 15);
1512  HValue* shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(15));
1513  HValue* not_hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash,
1514  graph()->GetConstantMinus1());
1515  hash = AddUncasted<HAdd>(shifted_hash, not_hash);
1516 
1517  // hash = hash ^ (hash >> 12);
1518  shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(12));
1519  hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1520 
1521  // hash = hash + (hash << 2);
1522  shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(2));
1523  hash = AddUncasted<HAdd>(hash, shifted_hash);
1524 
1525  // hash = hash ^ (hash >> 4);
1526  shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(4));
1527  hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1528 
1529  // hash = hash * 2057;
1530  hash = AddUncasted<HMul>(hash, Add<HConstant>(2057));
1531  hash->ClearFlag(HValue::kCanOverflow);
1532 
1533  // hash = hash ^ (hash >> 16);
1534  shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(16));
1535  return AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1536 }
1537 
1538 
1539 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(HValue* receiver,
1540  HValue* key) {
1541  HValue* elements = AddLoadElements(receiver);
1542 
1543  HValue* hash = BuildElementIndexHash(key);
1544 
1545  HValue* capacity = Add<HLoadKeyed>(
1546  elements,
1547  Add<HConstant>(NameDictionary::kCapacityIndex),
1548  static_cast<HValue*>(NULL),
1549  FAST_ELEMENTS);
1550 
1551  HValue* mask = AddUncasted<HSub>(capacity, graph()->GetConstant1());
1552  mask->ChangeRepresentation(Representation::Integer32());
1553  mask->ClearFlag(HValue::kCanOverflow);
1554 
1555  return BuildUncheckedDictionaryElementLoadHelper(elements, key,
1556  hash, mask, 0);
1557 }
1558 
1559 
1560 HValue* HGraphBuilder::BuildRegExpConstructResult(HValue* length,
1561  HValue* index,
1562  HValue* input) {
1563  NoObservableSideEffectsScope scope(this);
1564 
1565  // Compute the size of the RegExpResult followed by FixedArray with length.
1566  HValue* size = length;
1567  size = AddUncasted<HShl>(size, Add<HConstant>(kPointerSizeLog2));
1568  size = AddUncasted<HAdd>(size, Add<HConstant>(static_cast<int32_t>(
1569  JSRegExpResult::kSize + FixedArray::kHeaderSize)));
1570 
1571  // Make sure size does not exceeds max regular heap object size.
1572  Add<HBoundsCheck>(size, Add<HConstant>(Page::kMaxRegularHeapObjectSize));
1573 
1574  // Allocate the JSRegExpResult and the FixedArray in one step.
1575  HValue* result = Add<HAllocate>(
1576  size, HType::JSArray(), NOT_TENURED, JS_ARRAY_TYPE);
1577 
1578  // Determine the elements FixedArray.
1579  HValue* elements = Add<HInnerAllocatedObject>(
1580  result, Add<HConstant>(JSRegExpResult::kSize));
1581 
1582  // Initialize the JSRegExpResult header.
1583  HValue* global_object = Add<HLoadNamedField>(
1584  context(), static_cast<HValue*>(NULL),
1585  HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
1586  HValue* native_context = Add<HLoadNamedField>(
1587  global_object, static_cast<HValue*>(NULL),
1588  HObjectAccess::ForGlobalObjectNativeContext());
1589  AddStoreMapNoWriteBarrier(result, Add<HLoadNamedField>(
1590  native_context, static_cast<HValue*>(NULL),
1591  HObjectAccess::ForContextSlot(Context::REGEXP_RESULT_MAP_INDEX)));
1592  Add<HStoreNamedField>(
1593  result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset),
1594  Add<HConstant>(isolate()->factory()->empty_fixed_array()));
1595  Add<HStoreNamedField>(
1596  result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
1597  elements);
1598  Add<HStoreNamedField>(
1599  result, HObjectAccess::ForJSArrayOffset(JSArray::kLengthOffset), length);
1600 
1601  // Initialize the additional fields.
1602  Add<HStoreNamedField>(
1603  result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kIndexOffset),
1604  index);
1605  Add<HStoreNamedField>(
1606  result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kInputOffset),
1607  input);
1608 
1609  // Initialize the elements header.
1610  AddStoreMapConstantNoWriteBarrier(elements,
1611  isolate()->factory()->fixed_array_map());
1612  Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(), length);
1613 
1614  // Initialize the elements contents with undefined.
1615  LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
1616  index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT);
1617  {
1618  Add<HStoreKeyed>(elements, index, graph()->GetConstantUndefined(),
1619  FAST_ELEMENTS);
1620  }
1621  loop.EndBody();
1622 
1623  return result;
1624 }
1625 
1626 
1627 HValue* HGraphBuilder::BuildNumberToString(HValue* object, Type* type) {
1628  NoObservableSideEffectsScope scope(this);
1629 
1630  // Convert constant numbers at compile time.
1631  if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) {
1632  Handle<Object> number = HConstant::cast(object)->handle(isolate());
1633  Handle<String> result = isolate()->factory()->NumberToString(number);
1634  return Add<HConstant>(result);
1635  }
1636 
1637  // Create a joinable continuation.
1638  HIfContinuation found(graph()->CreateBasicBlock(),
1639  graph()->CreateBasicBlock());
1640 
1641  // Load the number string cache.
1642  HValue* number_string_cache =
1643  Add<HLoadRoot>(Heap::kNumberStringCacheRootIndex);
1644 
1645  // Make the hash mask from the length of the number string cache. It
1646  // contains two elements (number and string) for each cache entry.
1647  HValue* mask = AddLoadFixedArrayLength(number_string_cache);
1648  mask->set_type(HType::Smi());
1649  mask = AddUncasted<HSar>(mask, graph()->GetConstant1());
1650  mask = AddUncasted<HSub>(mask, graph()->GetConstant1());
1651 
1652  // Check whether object is a smi.
1653  IfBuilder if_objectissmi(this);
1654  if_objectissmi.If<HIsSmiAndBranch>(object);
1655  if_objectissmi.Then();
1656  {
1657  // Compute hash for smi similar to smi_get_hash().
1658  HValue* hash = AddUncasted<HBitwise>(Token::BIT_AND, object, mask);
1659 
1660  // Load the key.
1661  HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1662  HValue* key = Add<HLoadKeyed>(number_string_cache, key_index,
1663  static_cast<HValue*>(NULL),
1665 
1666  // Check if object == key.
1667  IfBuilder if_objectiskey(this);
1668  if_objectiskey.If<HCompareObjectEqAndBranch>(object, key);
1669  if_objectiskey.Then();
1670  {
1671  // Make the key_index available.
1672  Push(key_index);
1673  }
1674  if_objectiskey.JoinContinuation(&found);
1675  }
1676  if_objectissmi.Else();
1677  {
1678  if (type->Is(Type::SignedSmall())) {
1679  if_objectissmi.Deopt("Expected smi");
1680  } else {
1681  // Check if the object is a heap number.
1682  IfBuilder if_objectisnumber(this);
1683  HValue* objectisnumber = if_objectisnumber.If<HCompareMap>(
1684  object, isolate()->factory()->heap_number_map());
1685  if_objectisnumber.Then();
1686  {
1687  // Compute hash for heap number similar to double_get_hash().
1688  HValue* low = Add<HLoadNamedField>(
1689  object, objectisnumber,
1690  HObjectAccess::ForHeapNumberValueLowestBits());
1691  HValue* high = Add<HLoadNamedField>(
1692  object, objectisnumber,
1693  HObjectAccess::ForHeapNumberValueHighestBits());
1694  HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, low, high);
1695  hash = AddUncasted<HBitwise>(Token::BIT_AND, hash, mask);
1696 
1697  // Load the key.
1698  HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1699  HValue* key = Add<HLoadKeyed>(number_string_cache, key_index,
1700  static_cast<HValue*>(NULL),
1702 
1703  // Check if key is a heap number (the number string cache contains only
1704  // SMIs and heap number, so it is sufficient to do a SMI check here).
1705  IfBuilder if_keyisnotsmi(this);
1706  HValue* keyisnotsmi = if_keyisnotsmi.IfNot<HIsSmiAndBranch>(key);
1707  if_keyisnotsmi.Then();
1708  {
1709  // Check if values of key and object match.
1710  IfBuilder if_keyeqobject(this);
1711  if_keyeqobject.If<HCompareNumericAndBranch>(
1712  Add<HLoadNamedField>(key, keyisnotsmi,
1713  HObjectAccess::ForHeapNumberValue()),
1714  Add<HLoadNamedField>(object, objectisnumber,
1715  HObjectAccess::ForHeapNumberValue()),
1716  Token::EQ);
1717  if_keyeqobject.Then();
1718  {
1719  // Make the key_index available.
1720  Push(key_index);
1721  }
1722  if_keyeqobject.JoinContinuation(&found);
1723  }
1724  if_keyisnotsmi.JoinContinuation(&found);
1725  }
1726  if_objectisnumber.Else();
1727  {
1728  if (type->Is(Type::Number())) {
1729  if_objectisnumber.Deopt("Expected heap number");
1730  }
1731  }
1732  if_objectisnumber.JoinContinuation(&found);
1733  }
1734  }
1735  if_objectissmi.JoinContinuation(&found);
1736 
1737  // Check for cache hit.
1738  IfBuilder if_found(this, &found);
1739  if_found.Then();
1740  {
1741  // Count number to string operation in native code.
1742  AddIncrementCounter(isolate()->counters()->number_to_string_native());
1743 
1744  // Load the value in case of cache hit.
1745  HValue* key_index = Pop();
1746  HValue* value_index = AddUncasted<HAdd>(key_index, graph()->GetConstant1());
1747  Push(Add<HLoadKeyed>(number_string_cache, value_index,
1748  static_cast<HValue*>(NULL),
1750  }
1751  if_found.Else();
1752  {
1753  // Cache miss, fallback to runtime.
1754  Add<HPushArgument>(object);
1755  Push(Add<HCallRuntime>(
1756  isolate()->factory()->empty_string(),
1757  Runtime::FunctionForId(Runtime::kHiddenNumberToStringSkipCache),
1758  1));
1759  }
1760  if_found.End();
1761 
1762  return Pop();
1763 }
1764 
1765 
1766 HAllocate* HGraphBuilder::BuildAllocate(
1767  HValue* object_size,
1768  HType type,
1769  InstanceType instance_type,
1770  HAllocationMode allocation_mode) {
1771  // Compute the effective allocation size.
1772  HValue* size = object_size;
1773  if (allocation_mode.CreateAllocationMementos()) {
1774  size = AddUncasted<HAdd>(size, Add<HConstant>(AllocationMemento::kSize));
1775  size->ClearFlag(HValue::kCanOverflow);
1776  }
1777 
1778  // Perform the actual allocation.
1779  HAllocate* object = Add<HAllocate>(
1780  size, type, allocation_mode.GetPretenureMode(),
1781  instance_type, allocation_mode.feedback_site());
1782 
1783  // Setup the allocation memento.
1784  if (allocation_mode.CreateAllocationMementos()) {
1785  BuildCreateAllocationMemento(
1786  object, object_size, allocation_mode.current_site());
1787  }
1788 
1789  return object;
1790 }
1791 
1792 
1793 HValue* HGraphBuilder::BuildAddStringLengths(HValue* left_length,
1794  HValue* right_length) {
1795  // Compute the combined string length and check against max string length.
1796  HValue* length = AddUncasted<HAdd>(left_length, right_length);
1797  HValue* max_length = Add<HConstant>(String::kMaxLength);
1798  Add<HBoundsCheck>(length, max_length);
1799  return length;
1800 }
1801 
1802 
1803 HValue* HGraphBuilder::BuildCreateConsString(
1804  HValue* length,
1805  HValue* left,
1806  HValue* right,
1807  HAllocationMode allocation_mode) {
1808  // Determine the string instance types.
1809  HInstruction* left_instance_type = AddLoadStringInstanceType(left);
1810  HInstruction* right_instance_type = AddLoadStringInstanceType(right);
1811 
1812  // Allocate the cons string object. HAllocate does not care whether we
1813  // pass CONS_STRING_TYPE or CONS_ASCII_STRING_TYPE here, so we just use
1814  // CONS_STRING_TYPE here. Below we decide whether the cons string is
1815  // one-byte or two-byte and set the appropriate map.
1816  ASSERT(HAllocate::CompatibleInstanceTypes(CONS_STRING_TYPE,
1818  HAllocate* result = BuildAllocate(Add<HConstant>(ConsString::kSize),
1819  HType::String(), CONS_STRING_TYPE,
1820  allocation_mode);
1821 
1822  // Compute intersection and difference of instance types.
1823  HValue* anded_instance_types = AddUncasted<HBitwise>(
1824  Token::BIT_AND, left_instance_type, right_instance_type);
1825  HValue* xored_instance_types = AddUncasted<HBitwise>(
1826  Token::BIT_XOR, left_instance_type, right_instance_type);
1827 
1828  // We create a one-byte cons string if
1829  // 1. both strings are one-byte, or
1830  // 2. at least one of the strings is two-byte, but happens to contain only
1831  // one-byte characters.
1832  // To do this, we check
1833  // 1. if both strings are one-byte, or if the one-byte data hint is set in
1834  // both strings, or
1835  // 2. if one of the strings has the one-byte data hint set and the other
1836  // string is one-byte.
1837  IfBuilder if_onebyte(this);
1840  if_onebyte.If<HCompareNumericAndBranch>(
1841  AddUncasted<HBitwise>(
1842  Token::BIT_AND, anded_instance_types,
1843  Add<HConstant>(static_cast<int32_t>(
1845  graph()->GetConstant0(), Token::NE);
1846  if_onebyte.Or();
1848  kOneByteDataHintTag != 0 &&
1850  if_onebyte.If<HCompareNumericAndBranch>(
1851  AddUncasted<HBitwise>(
1852  Token::BIT_AND, xored_instance_types,
1853  Add<HConstant>(static_cast<int32_t>(
1855  Add<HConstant>(static_cast<int32_t>(
1857  if_onebyte.Then();
1858  {
1859  // We can safely skip the write barrier for storing the map here.
1860  Handle<Map> map = isolate()->factory()->cons_ascii_string_map();
1861  AddStoreMapConstantNoWriteBarrier(result, map);
1862  }
1863  if_onebyte.Else();
1864  {
1865  // We can safely skip the write barrier for storing the map here.
1866  Handle<Map> map = isolate()->factory()->cons_string_map();
1867  AddStoreMapConstantNoWriteBarrier(result, map);
1868  }
1869  if_onebyte.End();
1870 
1871  // Initialize the cons string fields.
1872  Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
1873  Add<HConstant>(String::kEmptyHashField));
1874  Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
1875  Add<HStoreNamedField>(result, HObjectAccess::ForConsStringFirst(), left);
1876  Add<HStoreNamedField>(result, HObjectAccess::ForConsStringSecond(), right);
1877 
1878  // Count the native string addition.
1879  AddIncrementCounter(isolate()->counters()->string_add_native());
1880 
1881  return result;
1882 }
1883 
1884 
1885 void HGraphBuilder::BuildCopySeqStringChars(HValue* src,
1886  HValue* src_offset,
1887  String::Encoding src_encoding,
1888  HValue* dst,
1889  HValue* dst_offset,
1890  String::Encoding dst_encoding,
1891  HValue* length) {
1892  ASSERT(dst_encoding != String::ONE_BYTE_ENCODING ||
1893  src_encoding == String::ONE_BYTE_ENCODING);
1894  LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
1895  HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT);
1896  {
1897  HValue* src_index = AddUncasted<HAdd>(src_offset, index);
1898  HValue* value =
1899  AddUncasted<HSeqStringGetChar>(src_encoding, src, src_index);
1900  HValue* dst_index = AddUncasted<HAdd>(dst_offset, index);
1901  Add<HSeqStringSetChar>(dst_encoding, dst, dst_index, value);
1902  }
1903  loop.EndBody();
1904 }
1905 
1906 
1907 HValue* HGraphBuilder::BuildObjectSizeAlignment(
1908  HValue* unaligned_size, int header_size) {
1909  ASSERT((header_size & kObjectAlignmentMask) == 0);
1910  HValue* size = AddUncasted<HAdd>(
1911  unaligned_size, Add<HConstant>(static_cast<int32_t>(
1912  header_size + kObjectAlignmentMask)));
1913  size->ClearFlag(HValue::kCanOverflow);
1914  return AddUncasted<HBitwise>(
1915  Token::BIT_AND, size, Add<HConstant>(static_cast<int32_t>(
1916  ~kObjectAlignmentMask)));
1917 }
1918 
1919 
1920 HValue* HGraphBuilder::BuildUncheckedStringAdd(
1921  HValue* left,
1922  HValue* right,
1923  HAllocationMode allocation_mode) {
1924  // Determine the string lengths.
1925  HValue* left_length = AddLoadStringLength(left);
1926  HValue* right_length = AddLoadStringLength(right);
1927 
1928  // Compute the combined string length.
1929  HValue* length = BuildAddStringLengths(left_length, right_length);
1930 
1931  // Do some manual constant folding here.
1932  if (left_length->IsConstant()) {
1933  HConstant* c_left_length = HConstant::cast(left_length);
1934  ASSERT_NE(0, c_left_length->Integer32Value());
1935  if (c_left_length->Integer32Value() + 1 >= ConsString::kMinLength) {
1936  // The right string contains at least one character.
1937  return BuildCreateConsString(length, left, right, allocation_mode);
1938  }
1939  } else if (right_length->IsConstant()) {
1940  HConstant* c_right_length = HConstant::cast(right_length);
1941  ASSERT_NE(0, c_right_length->Integer32Value());
1942  if (c_right_length->Integer32Value() + 1 >= ConsString::kMinLength) {
1943  // The left string contains at least one character.
1944  return BuildCreateConsString(length, left, right, allocation_mode);
1945  }
1946  }
1947 
1948  // Check if we should create a cons string.
1949  IfBuilder if_createcons(this);
1950  if_createcons.If<HCompareNumericAndBranch>(
1951  length, Add<HConstant>(ConsString::kMinLength), Token::GTE);
1952  if_createcons.Then();
1953  {
1954  // Create a cons string.
1955  Push(BuildCreateConsString(length, left, right, allocation_mode));
1956  }
1957  if_createcons.Else();
1958  {
1959  // Determine the string instance types.
1960  HValue* left_instance_type = AddLoadStringInstanceType(left);
1961  HValue* right_instance_type = AddLoadStringInstanceType(right);
1962 
1963  // Compute union and difference of instance types.
1964  HValue* ored_instance_types = AddUncasted<HBitwise>(
1965  Token::BIT_OR, left_instance_type, right_instance_type);
1966  HValue* xored_instance_types = AddUncasted<HBitwise>(
1967  Token::BIT_XOR, left_instance_type, right_instance_type);
1968 
1969  // Check if both strings have the same encoding and both are
1970  // sequential.
1971  IfBuilder if_sameencodingandsequential(this);
1972  if_sameencodingandsequential.If<HCompareNumericAndBranch>(
1973  AddUncasted<HBitwise>(
1974  Token::BIT_AND, xored_instance_types,
1975  Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
1976  graph()->GetConstant0(), Token::EQ);
1977  if_sameencodingandsequential.And();
1979  if_sameencodingandsequential.If<HCompareNumericAndBranch>(
1980  AddUncasted<HBitwise>(
1981  Token::BIT_AND, ored_instance_types,
1982  Add<HConstant>(static_cast<int32_t>(kStringRepresentationMask))),
1983  graph()->GetConstant0(), Token::EQ);
1984  if_sameencodingandsequential.Then();
1985  {
1986  HConstant* string_map =
1987  Add<HConstant>(isolate()->factory()->string_map());
1988  HConstant* ascii_string_map =
1989  Add<HConstant>(isolate()->factory()->ascii_string_map());
1990 
1991  // Determine map and size depending on whether result is one-byte string.
1992  IfBuilder if_onebyte(this);
1994  if_onebyte.If<HCompareNumericAndBranch>(
1995  AddUncasted<HBitwise>(
1996  Token::BIT_AND, ored_instance_types,
1997  Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
1998  graph()->GetConstant0(), Token::NE);
1999  if_onebyte.Then();
2000  {
2001  // Allocate sequential one-byte string object.
2002  Push(length);
2003  Push(ascii_string_map);
2004  }
2005  if_onebyte.Else();
2006  {
2007  // Allocate sequential two-byte string object.
2008  HValue* size = AddUncasted<HShl>(length, graph()->GetConstant1());
2009  size->ClearFlag(HValue::kCanOverflow);
2010  size->SetFlag(HValue::kUint32);
2011  Push(size);
2012  Push(string_map);
2013  }
2014  if_onebyte.End();
2015  HValue* map = Pop();
2016 
2017  // Calculate the number of bytes needed for the characters in the
2018  // string while observing object alignment.
2019  STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0);
2020  HValue* size = BuildObjectSizeAlignment(Pop(), SeqString::kHeaderSize);
2021 
2022  // Allocate the string object. HAllocate does not care whether we pass
2023  // STRING_TYPE or ASCII_STRING_TYPE here, so we just use STRING_TYPE here.
2024  HAllocate* result = BuildAllocate(
2025  size, HType::String(), STRING_TYPE, allocation_mode);
2026 
2027  // We can safely skip the write barrier for storing map here.
2028  AddStoreMapNoWriteBarrier(result, map);
2029 
2030  // Initialize the string fields.
2031  Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
2032  Add<HConstant>(String::kEmptyHashField));
2033  Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
2034 
2035  // Copy characters to the result string.
2036  IfBuilder if_twobyte(this);
2037  if_twobyte.If<HCompareObjectEqAndBranch>(map, string_map);
2038  if_twobyte.Then();
2039  {
2040  // Copy characters from the left string.
2041  BuildCopySeqStringChars(
2042  left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2043  result, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2044  left_length);
2045 
2046  // Copy characters from the right string.
2047  BuildCopySeqStringChars(
2048  right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2049  result, left_length, String::TWO_BYTE_ENCODING,
2050  right_length);
2051  }
2052  if_twobyte.Else();
2053  {
2054  // Copy characters from the left string.
2055  BuildCopySeqStringChars(
2056  left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2057  result, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2058  left_length);
2059 
2060  // Copy characters from the right string.
2061  BuildCopySeqStringChars(
2062  right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2063  result, left_length, String::ONE_BYTE_ENCODING,
2064  right_length);
2065  }
2066  if_twobyte.End();
2067 
2068  // Count the native string addition.
2069  AddIncrementCounter(isolate()->counters()->string_add_native());
2070 
2071  // Return the sequential string.
2072  Push(result);
2073  }
2074  if_sameencodingandsequential.Else();
2075  {
2076  // Fallback to the runtime to add the two strings.
2077  Add<HPushArgument>(left);
2078  Add<HPushArgument>(right);
2079  Push(Add<HCallRuntime>(
2080  isolate()->factory()->empty_string(),
2081  Runtime::FunctionForId(Runtime::kHiddenStringAdd),
2082  2));
2083  }
2084  if_sameencodingandsequential.End();
2085  }
2086  if_createcons.End();
2087 
2088  return Pop();
2089 }
2090 
2091 
2092 HValue* HGraphBuilder::BuildStringAdd(
2093  HValue* left,
2094  HValue* right,
2095  HAllocationMode allocation_mode) {
2096  NoObservableSideEffectsScope no_effects(this);
2097 
2098  // Determine string lengths.
2099  HValue* left_length = AddLoadStringLength(left);
2100  HValue* right_length = AddLoadStringLength(right);
2101 
2102  // Check if left string is empty.
2103  IfBuilder if_leftempty(this);
2104  if_leftempty.If<HCompareNumericAndBranch>(
2105  left_length, graph()->GetConstant0(), Token::EQ);
2106  if_leftempty.Then();
2107  {
2108  // Count the native string addition.
2109  AddIncrementCounter(isolate()->counters()->string_add_native());
2110 
2111  // Just return the right string.
2112  Push(right);
2113  }
2114  if_leftempty.Else();
2115  {
2116  // Check if right string is empty.
2117  IfBuilder if_rightempty(this);
2118  if_rightempty.If<HCompareNumericAndBranch>(
2119  right_length, graph()->GetConstant0(), Token::EQ);
2120  if_rightempty.Then();
2121  {
2122  // Count the native string addition.
2123  AddIncrementCounter(isolate()->counters()->string_add_native());
2124 
2125  // Just return the left string.
2126  Push(left);
2127  }
2128  if_rightempty.Else();
2129  {
2130  // Add the two non-empty strings.
2131  Push(BuildUncheckedStringAdd(left, right, allocation_mode));
2132  }
2133  if_rightempty.End();
2134  }
2135  if_leftempty.End();
2136 
2137  return Pop();
2138 }
2139 
2140 
2141 HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
2142  HValue* checked_object,
2143  HValue* key,
2144  HValue* val,
2145  bool is_js_array,
2146  ElementsKind elements_kind,
2147  PropertyAccessType access_type,
2148  LoadKeyedHoleMode load_mode,
2149  KeyedAccessStoreMode store_mode) {
2150  ASSERT((!IsExternalArrayElementsKind(elements_kind) &&
2151  !IsFixedTypedArrayElementsKind(elements_kind)) ||
2152  !is_js_array);
2153  // No GVNFlag is necessary for ElementsKind if there is an explicit dependency
2154  // on a HElementsTransition instruction. The flag can also be removed if the
2155  // map to check has FAST_HOLEY_ELEMENTS, since there can be no further
2156  // ElementsKind transitions. Finally, the dependency can be removed for stores
2157  // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the
2158  // generated store code.
2159  if ((elements_kind == FAST_HOLEY_ELEMENTS) ||
2160  (elements_kind == FAST_ELEMENTS && access_type == STORE)) {
2161  checked_object->ClearDependsOnFlag(kElementsKind);
2162  }
2163 
2164  bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind);
2165  bool fast_elements = IsFastObjectElementsKind(elements_kind);
2166  HValue* elements = AddLoadElements(checked_object);
2167  if (access_type == STORE && (fast_elements || fast_smi_only_elements) &&
2168  store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
2169  HCheckMaps* check_cow_map = Add<HCheckMaps>(
2170  elements, isolate()->factory()->fixed_array_map(), top_info());
2171  check_cow_map->ClearDependsOnFlag(kElementsKind);
2172  }
2173  HInstruction* length = NULL;
2174  if (is_js_array) {
2175  length = Add<HLoadNamedField>(
2176  checked_object, static_cast<HValue*>(NULL),
2177  HObjectAccess::ForArrayLength(elements_kind));
2178  } else {
2179  length = AddLoadFixedArrayLength(elements);
2180  }
2181  length->set_type(HType::Smi());
2182  HValue* checked_key = NULL;
2183  if (IsExternalArrayElementsKind(elements_kind) ||
2184  IsFixedTypedArrayElementsKind(elements_kind)) {
2185  HValue* backing_store;
2186  if (IsExternalArrayElementsKind(elements_kind)) {
2187  backing_store = Add<HLoadNamedField>(
2188  elements, static_cast<HValue*>(NULL),
2189  HObjectAccess::ForExternalArrayExternalPointer());
2190  } else {
2191  backing_store = elements;
2192  }
2193  if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
2194  NoObservableSideEffectsScope no_effects(this);
2195  IfBuilder length_checker(this);
2196  length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT);
2197  length_checker.Then();
2198  IfBuilder negative_checker(this);
2199  HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>(
2200  key, graph()->GetConstant0(), Token::GTE);
2201  negative_checker.Then();
2202  HInstruction* result = AddElementAccess(
2203  backing_store, key, val, bounds_check, elements_kind, access_type);
2204  negative_checker.ElseDeopt("Negative key encountered");
2205  negative_checker.End();
2206  length_checker.End();
2207  return result;
2208  } else {
2209  ASSERT(store_mode == STANDARD_STORE);
2210  checked_key = Add<HBoundsCheck>(key, length);
2211  return AddElementAccess(
2212  backing_store, checked_key, val,
2213  checked_object, elements_kind, access_type);
2214  }
2215  }
2216  ASSERT(fast_smi_only_elements ||
2217  fast_elements ||
2218  IsFastDoubleElementsKind(elements_kind));
2219 
2220  // In case val is stored into a fast smi array, assure that the value is a smi
2221  // before manipulating the backing store. Otherwise the actual store may
2222  // deopt, leaving the backing store in an invalid state.
2223  if (access_type == STORE && IsFastSmiElementsKind(elements_kind) &&
2224  !val->type().IsSmi()) {
2225  val = AddUncasted<HForceRepresentation>(val, Representation::Smi());
2226  }
2227 
2228  if (IsGrowStoreMode(store_mode)) {
2229  NoObservableSideEffectsScope no_effects(this);
2230  elements = BuildCheckForCapacityGrow(checked_object, elements,
2231  elements_kind, length, key,
2232  is_js_array, access_type);
2233  checked_key = key;
2234  } else {
2235  checked_key = Add<HBoundsCheck>(key, length);
2236 
2237  if (access_type == STORE && (fast_elements || fast_smi_only_elements)) {
2238  if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) {
2239  NoObservableSideEffectsScope no_effects(this);
2240  elements = BuildCopyElementsOnWrite(checked_object, elements,
2241  elements_kind, length);
2242  } else {
2243  HCheckMaps* check_cow_map = Add<HCheckMaps>(
2244  elements, isolate()->factory()->fixed_array_map(), top_info());
2245  check_cow_map->ClearDependsOnFlag(kElementsKind);
2246  }
2247  }
2248  }
2249  return AddElementAccess(elements, checked_key, val, checked_object,
2250  elements_kind, access_type, load_mode);
2251 }
2252 
2253 
2254 
2255 HValue* HGraphBuilder::BuildAllocateArrayFromLength(
2256  JSArrayBuilder* array_builder,
2257  HValue* length_argument) {
2258  if (length_argument->IsConstant() &&
2259  HConstant::cast(length_argument)->HasSmiValue()) {
2260  int array_length = HConstant::cast(length_argument)->Integer32Value();
2261  HValue* new_object = array_length == 0
2262  ? array_builder->AllocateEmptyArray()
2263  : array_builder->AllocateArray(length_argument, length_argument);
2264  return new_object;
2265  }
2266 
2267  HValue* constant_zero = graph()->GetConstant0();
2268  HConstant* max_alloc_length =
2269  Add<HConstant>(JSObject::kInitialMaxFastElementArray);
2270  HInstruction* checked_length = Add<HBoundsCheck>(length_argument,
2271  max_alloc_length);
2272  IfBuilder if_builder(this);
2273  if_builder.If<HCompareNumericAndBranch>(checked_length, constant_zero,
2274  Token::EQ);
2275  if_builder.Then();
2276  const int initial_capacity = JSArray::kPreallocatedArrayElements;
2277  HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
2278  Push(initial_capacity_node); // capacity
2279  Push(constant_zero); // length
2280  if_builder.Else();
2281  if (!(top_info()->IsStub()) &&
2282  IsFastPackedElementsKind(array_builder->kind())) {
2283  // We'll come back later with better (holey) feedback.
2284  if_builder.Deopt("Holey array despite packed elements_kind feedback");
2285  } else {
2286  Push(checked_length); // capacity
2287  Push(checked_length); // length
2288  }
2289  if_builder.End();
2290 
2291  // Figure out total size
2292  HValue* length = Pop();
2293  HValue* capacity = Pop();
2294  return array_builder->AllocateArray(capacity, length);
2295 }
2296 
2297 HValue* HGraphBuilder::BuildAllocateElements(ElementsKind kind,
2298  HValue* capacity) {
2299  int elements_size;
2300  InstanceType instance_type;
2301 
2302  if (IsFastDoubleElementsKind(kind)) {
2303  elements_size = kDoubleSize;
2304  instance_type = FIXED_DOUBLE_ARRAY_TYPE;
2305  } else {
2306  elements_size = kPointerSize;
2307  instance_type = FIXED_ARRAY_TYPE;
2308  }
2309 
2310  HConstant* elements_size_value = Add<HConstant>(elements_size);
2311  HValue* mul = AddUncasted<HMul>(capacity, elements_size_value);
2312  mul->ClearFlag(HValue::kCanOverflow);
2313 
2314  HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize);
2315  HValue* total_size = AddUncasted<HAdd>(mul, header_size);
2316  total_size->ClearFlag(HValue::kCanOverflow);
2317 
2318  PretenureFlag pretenure_flag = !FLAG_allocation_site_pretenuring ?
2319  isolate()->heap()->GetPretenureMode() : NOT_TENURED;
2320 
2321  return Add<HAllocate>(total_size, HType::Tagged(), pretenure_flag,
2322  instance_type);
2323 }
2324 
2325 
2326 void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements,
2327  ElementsKind kind,
2328  HValue* capacity) {
2329  Factory* factory = isolate()->factory();
2331  ? factory->fixed_double_array_map()
2332  : factory->fixed_array_map();
2333 
2334  AddStoreMapConstant(elements, map);
2335  Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(),
2336  capacity);
2337 }
2338 
2339 
2340 HValue* HGraphBuilder::BuildAllocateElementsAndInitializeElementsHeader(
2341  ElementsKind kind,
2342  HValue* capacity) {
2343  // The HForceRepresentation is to prevent possible deopt on int-smi
2344  // conversion after allocation but before the new object fields are set.
2345  capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi());
2346  HValue* new_elements = BuildAllocateElements(kind, capacity);
2347  BuildInitializeElementsHeader(new_elements, kind, capacity);
2348  return new_elements;
2349 }
2350 
2351 
2352 HInnerAllocatedObject* HGraphBuilder::BuildJSArrayHeader(HValue* array,
2353  HValue* array_map,
2355  ElementsKind elements_kind,
2356  HValue* allocation_site_payload,
2357  HValue* length_field) {
2358 
2359  Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map);
2360 
2361  HConstant* empty_fixed_array =
2362  Add<HConstant>(isolate()->factory()->empty_fixed_array());
2363 
2364  HObjectAccess access = HObjectAccess::ForPropertiesPointer();
2365  Add<HStoreNamedField>(array, access, empty_fixed_array);
2366  Add<HStoreNamedField>(array, HObjectAccess::ForArrayLength(elements_kind),
2367  length_field);
2368 
2369  if (mode == TRACK_ALLOCATION_SITE) {
2370  BuildCreateAllocationMemento(
2371  array, Add<HConstant>(JSArray::kSize), allocation_site_payload);
2372  }
2373 
2374  int elements_location = JSArray::kSize;
2375  if (mode == TRACK_ALLOCATION_SITE) {
2376  elements_location += AllocationMemento::kSize;
2377  }
2378 
2379  HInnerAllocatedObject* elements = Add<HInnerAllocatedObject>(
2380  array, Add<HConstant>(elements_location));
2381  Add<HStoreNamedField>(array, HObjectAccess::ForElementsPointer(), elements);
2382  return elements;
2383 }
2384 
2385 
2386 HInstruction* HGraphBuilder::AddElementAccess(
2387  HValue* elements,
2388  HValue* checked_key,
2389  HValue* val,
2390  HValue* dependency,
2391  ElementsKind elements_kind,
2392  PropertyAccessType access_type,
2393  LoadKeyedHoleMode load_mode) {
2394  if (access_type == STORE) {
2395  ASSERT(val != NULL);
2396  if (elements_kind == EXTERNAL_UINT8_CLAMPED_ELEMENTS ||
2397  elements_kind == UINT8_CLAMPED_ELEMENTS) {
2398  val = Add<HClampToUint8>(val);
2399  }
2400  return Add<HStoreKeyed>(elements, checked_key, val, elements_kind,
2401  elements_kind == FAST_SMI_ELEMENTS
2403  : INITIALIZING_STORE);
2404  }
2405 
2406  ASSERT(access_type == LOAD);
2407  ASSERT(val == NULL);
2408  HLoadKeyed* load = Add<HLoadKeyed>(
2409  elements, checked_key, dependency, elements_kind, load_mode);
2410  if (FLAG_opt_safe_uint32_operations &&
2411  (elements_kind == EXTERNAL_UINT32_ELEMENTS ||
2412  elements_kind == UINT32_ELEMENTS)) {
2413  graph()->RecordUint32Instruction(load);
2414  }
2415  return load;
2416 }
2417 
2418 
2419 HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object) {
2420  return Add<HLoadNamedField>(
2421  object, static_cast<HValue*>(NULL), HObjectAccess::ForElementsPointer());
2422 }
2423 
2424 
2425 HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(HValue* object) {
2426  return Add<HLoadNamedField>(
2427  object, static_cast<HValue*>(NULL), HObjectAccess::ForFixedArrayLength());
2428 }
2429 
2430 
2431 HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) {
2432  HValue* half_old_capacity = AddUncasted<HShr>(old_capacity,
2433  graph_->GetConstant1());
2434 
2435  HValue* new_capacity = AddUncasted<HAdd>(half_old_capacity, old_capacity);
2436  new_capacity->ClearFlag(HValue::kCanOverflow);
2437 
2438  HValue* min_growth = Add<HConstant>(16);
2439 
2440  new_capacity = AddUncasted<HAdd>(new_capacity, min_growth);
2441  new_capacity->ClearFlag(HValue::kCanOverflow);
2442 
2443  return new_capacity;
2444 }
2445 
2446 
2447 void HGraphBuilder::BuildNewSpaceArrayCheck(HValue* length, ElementsKind kind) {
2448  int element_size = IsFastDoubleElementsKind(kind) ? kDoubleSize
2449  : kPointerSize;
2450  int max_size = Page::kMaxRegularHeapObjectSize / element_size;
2451  max_size -= JSArray::kSize / element_size;
2452  HConstant* max_size_constant = Add<HConstant>(max_size);
2453  Add<HBoundsCheck>(length, max_size_constant);
2454 }
2455 
2456 
2457 HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object,
2458  HValue* elements,
2459  ElementsKind kind,
2460  ElementsKind new_kind,
2461  HValue* length,
2462  HValue* new_capacity) {
2463  BuildNewSpaceArrayCheck(new_capacity, new_kind);
2464 
2465  HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader(
2466  new_kind, new_capacity);
2467 
2468  BuildCopyElements(elements, kind,
2469  new_elements, new_kind,
2470  length, new_capacity);
2471 
2472  Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
2473  new_elements);
2474 
2475  return new_elements;
2476 }
2477 
2478 
2479 void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
2480  ElementsKind elements_kind,
2481  HValue* from,
2482  HValue* to) {
2483  // Fast elements kinds need to be initialized in case statements below cause
2484  // a garbage collection.
2485  Factory* factory = isolate()->factory();
2486 
2487  double nan_double = FixedDoubleArray::hole_nan_as_double();
2488  HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
2489  ? Add<HConstant>(factory->the_hole_value())
2490  : Add<HConstant>(nan_double);
2491 
2492  // Special loop unfolding case
2493  static const int kLoopUnfoldLimit = 8;
2494  STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= kLoopUnfoldLimit);
2495  int initial_capacity = -1;
2496  if (from->IsInteger32Constant() && to->IsInteger32Constant()) {
2497  int constant_from = from->GetInteger32Constant();
2498  int constant_to = to->GetInteger32Constant();
2499 
2500  if (constant_from == 0 && constant_to <= kLoopUnfoldLimit) {
2501  initial_capacity = constant_to;
2502  }
2503  }
2504 
2505  // Since we're about to store a hole value, the store instruction below must
2506  // assume an elements kind that supports heap object values.
2507  if (IsFastSmiOrObjectElementsKind(elements_kind)) {
2508  elements_kind = FAST_HOLEY_ELEMENTS;
2509  }
2510 
2511  if (initial_capacity >= 0) {
2512  for (int i = 0; i < initial_capacity; i++) {
2513  HInstruction* key = Add<HConstant>(i);
2514  Add<HStoreKeyed>(elements, key, hole, elements_kind);
2515  }
2516  } else {
2517  LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
2518 
2519  HValue* key = builder.BeginBody(from, to, Token::LT);
2520 
2521  Add<HStoreKeyed>(elements, key, hole, elements_kind);
2522 
2523  builder.EndBody();
2524  }
2525 }
2526 
2527 
2528 void HGraphBuilder::BuildCopyElements(HValue* from_elements,
2529  ElementsKind from_elements_kind,
2530  HValue* to_elements,
2531  ElementsKind to_elements_kind,
2532  HValue* length,
2533  HValue* capacity) {
2534  bool pre_fill_with_holes =
2535  IsFastDoubleElementsKind(from_elements_kind) &&
2536  IsFastObjectElementsKind(to_elements_kind);
2537 
2538  if (pre_fill_with_holes) {
2539  // If the copy might trigger a GC, make sure that the FixedArray is
2540  // pre-initialized with holes to make sure that it's always in a consistent
2541  // state.
2542  BuildFillElementsWithHole(to_elements, to_elements_kind,
2543  graph()->GetConstant0(), capacity);
2544  }
2545 
2546  LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
2547 
2548  HValue* key = builder.BeginBody(graph()->GetConstant0(), length, Token::LT);
2549 
2550  HValue* element = Add<HLoadKeyed>(from_elements, key,
2551  static_cast<HValue*>(NULL),
2552  from_elements_kind,
2554 
2555  ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
2556  IsFastSmiElementsKind(to_elements_kind))
2557  ? FAST_HOLEY_ELEMENTS : to_elements_kind;
2558 
2559  if (IsHoleyElementsKind(from_elements_kind) &&
2560  from_elements_kind != to_elements_kind) {
2561  IfBuilder if_hole(this);
2562  if_hole.If<HCompareHoleAndBranch>(element);
2563  if_hole.Then();
2564  HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
2565  ? Add<HConstant>(FixedDoubleArray::hole_nan_as_double())
2566  : graph()->GetConstantHole();
2567  Add<HStoreKeyed>(to_elements, key, hole_constant, kind);
2568  if_hole.Else();
2569  HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
2570  store->SetFlag(HValue::kAllowUndefinedAsNaN);
2571  if_hole.End();
2572  } else {
2573  HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
2574  store->SetFlag(HValue::kAllowUndefinedAsNaN);
2575  }
2576 
2577  builder.EndBody();
2578 
2579  if (!pre_fill_with_holes && length != capacity) {
2580  // Fill unused capacity with the hole.
2581  BuildFillElementsWithHole(to_elements, to_elements_kind,
2582  key, capacity);
2583  }
2584 }
2585 
2586 
2587 HValue* HGraphBuilder::BuildCloneShallowArray(HValue* boilerplate,
2588  HValue* allocation_site,
2590  ElementsKind kind,
2591  int length) {
2592  NoObservableSideEffectsScope no_effects(this);
2593 
2594  // All sizes here are multiples of kPointerSize.
2595  int size = JSArray::kSize;
2596  if (mode == TRACK_ALLOCATION_SITE) {
2597  size += AllocationMemento::kSize;
2598  }
2599 
2600  HValue* size_in_bytes = Add<HConstant>(size);
2601  HInstruction* object = Add<HAllocate>(size_in_bytes,
2602  HType::JSObject(),
2603  NOT_TENURED,
2604  JS_OBJECT_TYPE);
2605 
2606  // Copy the JS array part.
2607  for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
2608  if ((i != JSArray::kElementsOffset) || (length == 0)) {
2609  HObjectAccess access = HObjectAccess::ForJSArrayOffset(i);
2610  Add<HStoreNamedField>(
2611  object, access, Add<HLoadNamedField>(
2612  boilerplate, static_cast<HValue*>(NULL), access));
2613  }
2614  }
2615 
2616  // Create an allocation site info if requested.
2617  if (mode == TRACK_ALLOCATION_SITE) {
2618  BuildCreateAllocationMemento(
2619  object, Add<HConstant>(JSArray::kSize), allocation_site);
2620  }
2621 
2622  if (length > 0) {
2623  HValue* boilerplate_elements = AddLoadElements(boilerplate);
2624  HValue* object_elements;
2625  if (IsFastDoubleElementsKind(kind)) {
2626  HValue* elems_size = Add<HConstant>(FixedDoubleArray::SizeFor(length));
2627  object_elements = Add<HAllocate>(elems_size, HType::Tagged(),
2629  } else {
2630  HValue* elems_size = Add<HConstant>(FixedArray::SizeFor(length));
2631  object_elements = Add<HAllocate>(elems_size, HType::Tagged(),
2633  }
2634  Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
2635  object_elements);
2636 
2637  // Copy the elements array header.
2638  for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
2639  HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
2640  Add<HStoreNamedField>(
2641  object_elements, access, Add<HLoadNamedField>(
2642  boilerplate_elements, static_cast<HValue*>(NULL), access));
2643  }
2644 
2645  // Copy the elements array contents.
2646  // TODO(mstarzinger): Teach HGraphBuilder::BuildCopyElements to unfold
2647  // copying loops with constant length up to a given boundary and use this
2648  // helper here instead.
2649  for (int i = 0; i < length; i++) {
2650  HValue* key_constant = Add<HConstant>(i);
2651  HInstruction* value = Add<HLoadKeyed>(boilerplate_elements, key_constant,
2652  static_cast<HValue*>(NULL), kind);
2653  Add<HStoreKeyed>(object_elements, key_constant, value, kind);
2654  }
2655  }
2656 
2657  return object;
2658 }
2659 
2660 
2661 void HGraphBuilder::BuildCompareNil(
2662  HValue* value,
2663  Type* type,
2664  HIfContinuation* continuation) {
2665  IfBuilder if_nil(this);
2666  bool some_case_handled = false;
2667  bool some_case_missing = false;
2668 
2669  if (type->Maybe(Type::Null())) {
2670  if (some_case_handled) if_nil.Or();
2671  if_nil.If<HCompareObjectEqAndBranch>(value, graph()->GetConstantNull());
2672  some_case_handled = true;
2673  } else {
2674  some_case_missing = true;
2675  }
2676 
2677  if (type->Maybe(Type::Undefined())) {
2678  if (some_case_handled) if_nil.Or();
2679  if_nil.If<HCompareObjectEqAndBranch>(value,
2680  graph()->GetConstantUndefined());
2681  some_case_handled = true;
2682  } else {
2683  some_case_missing = true;
2684  }
2685 
2686  if (type->Maybe(Type::Undetectable())) {
2687  if (some_case_handled) if_nil.Or();
2688  if_nil.If<HIsUndetectableAndBranch>(value);
2689  some_case_handled = true;
2690  } else {
2691  some_case_missing = true;
2692  }
2693 
2694  if (some_case_missing) {
2695  if_nil.Then();
2696  if_nil.Else();
2697  if (type->NumClasses() == 1) {
2698  BuildCheckHeapObject(value);
2699  // For ICs, the map checked below is a sentinel map that gets replaced by
2700  // the monomorphic map when the code is used as a template to generate a
2701  // new IC. For optimized functions, there is no sentinel map, the map
2702  // emitted below is the actual monomorphic map.
2703  BuildCheckMap(value, type->Classes().Current());
2704  } else {
2705  if_nil.Deopt("Too many undetectable types");
2706  }
2707  }
2708 
2709  if_nil.CaptureContinuation(continuation);
2710 }
2711 
2712 
2713 void HGraphBuilder::BuildCreateAllocationMemento(
2714  HValue* previous_object,
2715  HValue* previous_object_size,
2716  HValue* allocation_site) {
2717  ASSERT(allocation_site != NULL);
2718  HInnerAllocatedObject* allocation_memento = Add<HInnerAllocatedObject>(
2719  previous_object, previous_object_size);
2720  AddStoreMapConstant(
2721  allocation_memento, isolate()->factory()->allocation_memento_map());
2722  Add<HStoreNamedField>(
2723  allocation_memento,
2724  HObjectAccess::ForAllocationMementoSite(),
2725  allocation_site);
2726  if (FLAG_allocation_site_pretenuring) {
2727  HValue* memento_create_count = Add<HLoadNamedField>(
2728  allocation_site, static_cast<HValue*>(NULL),
2729  HObjectAccess::ForAllocationSiteOffset(
2730  AllocationSite::kPretenureCreateCountOffset));
2731  memento_create_count = AddUncasted<HAdd>(
2732  memento_create_count, graph()->GetConstant1());
2733  // This smi value is reset to zero after every gc, overflow isn't a problem
2734  // since the counter is bounded by the new space size.
2735  memento_create_count->ClearFlag(HValue::kCanOverflow);
2736  HStoreNamedField* store = Add<HStoreNamedField>(
2737  allocation_site, HObjectAccess::ForAllocationSiteOffset(
2738  AllocationSite::kPretenureCreateCountOffset), memento_create_count);
2739  // No write barrier needed to store a smi.
2740  store->SkipWriteBarrier();
2741  }
2742 }
2743 
2744 
2745 HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* closure) {
2746  // Get the global context, then the native context
2747  HInstruction* context =
2748  Add<HLoadNamedField>(closure, static_cast<HValue*>(NULL),
2749  HObjectAccess::ForFunctionContextPointer());
2750  HInstruction* global_object = Add<HLoadNamedField>(
2751  context, static_cast<HValue*>(NULL),
2752  HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
2753  HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
2754  GlobalObject::kNativeContextOffset);
2755  return Add<HLoadNamedField>(
2756  global_object, static_cast<HValue*>(NULL), access);
2757 }
2758 
2759 
2760 HInstruction* HGraphBuilder::BuildGetNativeContext() {
2761  // Get the global context, then the native context
2762  HValue* global_object = Add<HLoadNamedField>(
2763  context(), static_cast<HValue*>(NULL),
2764  HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
2765  return Add<HLoadNamedField>(
2766  global_object, static_cast<HValue*>(NULL),
2767  HObjectAccess::ForObservableJSObjectOffset(
2768  GlobalObject::kNativeContextOffset));
2769 }
2770 
2771 
2772 HInstruction* HGraphBuilder::BuildGetArrayFunction() {
2773  HInstruction* native_context = BuildGetNativeContext();
2774  HInstruction* index =
2775  Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX));
2776  return Add<HLoadKeyed>(
2777  native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
2778 }
2779 
2780 
2781 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
2782  ElementsKind kind,
2783  HValue* allocation_site_payload,
2784  HValue* constructor_function,
2785  AllocationSiteOverrideMode override_mode) :
2786  builder_(builder),
2787  kind_(kind),
2788  allocation_site_payload_(allocation_site_payload),
2789  constructor_function_(constructor_function) {
2790  ASSERT(!allocation_site_payload->IsConstant() ||
2791  HConstant::cast(allocation_site_payload)->handle(
2792  builder_->isolate())->IsAllocationSite());
2793  mode_ = override_mode == DISABLE_ALLOCATION_SITES
2795  : AllocationSite::GetMode(kind);
2796 }
2797 
2798 
2799 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
2800  ElementsKind kind,
2801  HValue* constructor_function) :
2802  builder_(builder),
2803  kind_(kind),
2805  allocation_site_payload_(NULL),
2806  constructor_function_(constructor_function) {
2807 }
2808 
2809 
2810 HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() {
2811  if (!builder()->top_info()->IsStub()) {
2812  // A constant map is fine.
2813  Handle<Map> map(builder()->isolate()->get_initial_js_array_map(kind_),
2814  builder()->isolate());
2815  return builder()->Add<HConstant>(map);
2816  }
2817 
2818  if (constructor_function_ != NULL && kind_ == GetInitialFastElementsKind()) {
2819  // No need for a context lookup if the kind_ matches the initial
2820  // map, because we can just load the map in that case.
2821  HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
2822  return builder()->Add<HLoadNamedField>(
2823  constructor_function_, static_cast<HValue*>(NULL), access);
2824  }
2825 
2826  // TODO(mvstanton): we should always have a constructor function if we
2827  // are creating a stub.
2828  HInstruction* native_context = constructor_function_ != NULL
2829  ? builder()->BuildGetNativeContext(constructor_function_)
2830  : builder()->BuildGetNativeContext();
2831 
2832  HInstruction* index = builder()->Add<HConstant>(
2833  static_cast<int32_t>(Context::JS_ARRAY_MAPS_INDEX));
2834 
2835  HInstruction* map_array = builder()->Add<HLoadKeyed>(
2836  native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
2837 
2838  HInstruction* kind_index = builder()->Add<HConstant>(kind_);
2839 
2840  return builder()->Add<HLoadKeyed>(
2841  map_array, kind_index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
2842 }
2843 
2844 
2845 HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() {
2846  // Find the map near the constructor function
2847  HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
2848  return builder()->Add<HLoadNamedField>(
2849  constructor_function_, static_cast<HValue*>(NULL), access);
2850 }
2851 
2852 
2853 HValue* HGraphBuilder::JSArrayBuilder::EstablishAllocationSize(
2854  HValue* length_node) {
2855  ASSERT(length_node != NULL);
2856 
2857  int base_size = JSArray::kSize;
2858  if (mode_ == TRACK_ALLOCATION_SITE) {
2859  base_size += AllocationMemento::kSize;
2860  }
2861 
2863  base_size += FixedArray::kHeaderSize;
2864 
2865  HInstruction* elements_size_value =
2866  builder()->Add<HConstant>(elements_size());
2867  HInstruction* mul = HMul::NewImul(builder()->zone(), builder()->context(),
2868  length_node, elements_size_value);
2869  builder()->AddInstruction(mul);
2870  HInstruction* base = builder()->Add<HConstant>(base_size);
2871  HInstruction* total_size = HAdd::New(builder()->zone(), builder()->context(),
2872  base, mul);
2873  total_size->ClearFlag(HValue::kCanOverflow);
2874  builder()->AddInstruction(total_size);
2875  return total_size;
2876 }
2877 
2878 
2879 HValue* HGraphBuilder::JSArrayBuilder::EstablishEmptyArrayAllocationSize() {
2880  int base_size = JSArray::kSize;
2881  if (mode_ == TRACK_ALLOCATION_SITE) {
2882  base_size += AllocationMemento::kSize;
2883  }
2884 
2885  base_size += IsFastDoubleElementsKind(kind_)
2886  ? FixedDoubleArray::SizeFor(initial_capacity())
2887  : FixedArray::SizeFor(initial_capacity());
2888 
2889  return builder()->Add<HConstant>(base_size);
2890 }
2891 
2892 
2893 HValue* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() {
2894  HValue* size_in_bytes = EstablishEmptyArrayAllocationSize();
2895  HConstant* capacity = builder()->Add<HConstant>(initial_capacity());
2896  return AllocateArray(size_in_bytes,
2897  capacity,
2898  builder()->graph()->GetConstant0());
2899 }
2900 
2901 
2902 HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* capacity,
2903  HValue* length_field,
2904  FillMode fill_mode) {
2905  HValue* size_in_bytes = EstablishAllocationSize(capacity);
2906  return AllocateArray(size_in_bytes, capacity, length_field, fill_mode);
2907 }
2908 
2909 
2910 HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes,
2911  HValue* capacity,
2912  HValue* length_field,
2913  FillMode fill_mode) {
2914  // These HForceRepresentations are because we store these as fields in the
2915  // objects we construct, and an int32-to-smi HChange could deopt. Accept
2916  // the deopt possibility now, before allocation occurs.
2917  capacity =
2918  builder()->AddUncasted<HForceRepresentation>(capacity,
2920  length_field =
2921  builder()->AddUncasted<HForceRepresentation>(length_field,
2923  // Allocate (dealing with failure appropriately)
2924  HAllocate* new_object = builder()->Add<HAllocate>(size_in_bytes,
2925  HType::JSArray(), NOT_TENURED, JS_ARRAY_TYPE);
2926 
2927  // Folded array allocation should be aligned if it has fast double elements.
2928  if (IsFastDoubleElementsKind(kind_)) {
2929  new_object->MakeDoubleAligned();
2930  }
2931 
2932  // Fill in the fields: map, properties, length
2933  HValue* map;
2934  if (allocation_site_payload_ == NULL) {
2935  map = EmitInternalMapCode();
2936  } else {
2937  map = EmitMapCode();
2938  }
2939  elements_location_ = builder()->BuildJSArrayHeader(new_object,
2940  map,
2941  mode_,
2942  kind_,
2943  allocation_site_payload_,
2944  length_field);
2945 
2946  // Initialize the elements
2947  builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity);
2948 
2949  if (fill_mode == FILL_WITH_HOLE) {
2950  builder()->BuildFillElementsWithHole(elements_location_, kind_,
2951  graph()->GetConstant0(), capacity);
2952  }
2953 
2954  return new_object;
2955 }
2956 
2957 
2958 HStoreNamedField* HGraphBuilder::AddStoreMapConstant(HValue *object,
2959  Handle<Map> map) {
2960  return Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
2961  Add<HConstant>(map));
2962 }
2963 
2964 
2966  HValue* global_object = Add<HLoadNamedField>(
2967  context(), static_cast<HValue*>(NULL),
2968  HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
2969  HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
2971  HValue* builtins = Add<HLoadNamedField>(
2972  global_object, static_cast<HValue*>(NULL), access);
2973  HObjectAccess function_access = HObjectAccess::ForObservableJSObjectOffset(
2975  return Add<HLoadNamedField>(
2976  builtins, static_cast<HValue*>(NULL), function_access);
2977 }
2978 
2979 
2981  : HGraphBuilder(info),
2982  function_state_(NULL),
2983  initial_function_state_(this, info, NORMAL_RETURN, 0),
2984  ast_context_(NULL),
2985  break_scope_(NULL),
2986  inlined_count_(0),
2987  globals_(10, info->zone()),
2988  inline_bailout_(false),
2989  osr_(new(info->zone()) HOsrBuilder(this)) {
2990  // This is not initialized in the initializer list because the
2991  // constructor for the initial state relies on function_state_ == NULL
2992  // to know it's the initial state.
2993  function_state_= &initial_function_state_;
2994  InitializeAstVisitor(info->zone());
2995  if (FLAG_hydrogen_track_positions) {
2996  SetSourcePosition(info->shared_info()->start_position());
2997  }
2998 }
2999 
3000 
3001 HBasicBlock* HOptimizedGraphBuilder::CreateJoin(HBasicBlock* first,
3002  HBasicBlock* second,
3003  BailoutId join_id) {
3004  if (first == NULL) {
3005  return second;
3006  } else if (second == NULL) {
3007  return first;
3008  } else {
3009  HBasicBlock* join_block = graph()->CreateBasicBlock();
3010  Goto(first, join_block);
3011  Goto(second, join_block);
3012  join_block->SetJoinId(join_id);
3013  return join_block;
3014  }
3015 }
3016 
3017 
3019  HBasicBlock* exit_block,
3020  HBasicBlock* continue_block) {
3021  if (continue_block != NULL) {
3022  if (exit_block != NULL) Goto(exit_block, continue_block);
3023  continue_block->SetJoinId(statement->ContinueId());
3024  return continue_block;
3025  }
3026  return exit_block;
3027 }
3028 
3029 
3031  HBasicBlock* loop_entry,
3032  HBasicBlock* body_exit,
3033  HBasicBlock* loop_successor,
3034  HBasicBlock* break_block) {
3035  if (body_exit != NULL) Goto(body_exit, loop_entry);
3036  loop_entry->PostProcessLoopHeader(statement);
3037  if (break_block != NULL) {
3038  if (loop_successor != NULL) Goto(loop_successor, break_block);
3039  break_block->SetJoinId(statement->ExitId());
3040  return break_block;
3041  }
3042  return loop_successor;
3043 }
3044 
3045 
3046 // Build a new loop header block and set it as the current block.
3048  HBasicBlock* loop_entry = CreateLoopHeaderBlock();
3049  Goto(loop_entry);
3050  set_current_block(loop_entry);
3051  return loop_entry;
3052 }
3053 
3054 
3056  IterationStatement* statement) {
3057  HBasicBlock* loop_entry = osr()->HasOsrEntryAt(statement)
3058  ? osr()->BuildOsrLoopEntry(statement)
3059  : BuildLoopEntry();
3060  return loop_entry;
3061 }
3062 
3063 
3064 void HBasicBlock::FinishExit(HControlInstruction* instruction,
3065  HSourcePosition position) {
3066  Finish(instruction, position);
3067  ClearEnvironment();
3068 }
3069 
3070 
3071 HGraph::HGraph(CompilationInfo* info)
3072  : isolate_(info->isolate()),
3073  next_block_id_(0),
3074  entry_block_(NULL),
3075  blocks_(8, info->zone()),
3076  values_(16, info->zone()),
3077  phi_list_(NULL),
3078  uint32_instructions_(NULL),
3079  osr_(NULL),
3080  info_(info),
3081  zone_(info->zone()),
3082  is_recursive_(false),
3083  use_optimistic_licm_(false),
3084  depends_on_empty_array_proto_elements_(false),
3085  type_change_checksum_(0),
3086  maximum_environment_size_(0),
3087  no_side_effects_scope_count_(0),
3088  disallow_adding_new_values_(false),
3089  next_inline_id_(0),
3090  inlined_functions_(5, info->zone()) {
3091  if (info->IsStub()) {
3092  HydrogenCodeStub* stub = info->code_stub();
3093  CodeStubInterfaceDescriptor* descriptor =
3094  stub->GetInterfaceDescriptor(isolate_);
3095  start_environment_ =
3096  new(zone_) HEnvironment(zone_, descriptor->environment_length());
3097  } else {
3098  TraceInlinedFunction(info->shared_info(), HSourcePosition::Unknown());
3099  start_environment_ =
3100  new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_);
3101  }
3102  start_environment_->set_ast_id(BailoutId::FunctionEntry());
3103  entry_block_ = CreateBasicBlock();
3104  entry_block_->SetInitialEnvironment(start_environment_);
3105 }
3106 
3107 
3108 HBasicBlock* HGraph::CreateBasicBlock() {
3109  HBasicBlock* result = new(zone()) HBasicBlock(this);
3110  blocks_.Add(result, zone());
3111  return result;
3112 }
3113 
3114 
3115 void HGraph::FinalizeUniqueness() {
3116  DisallowHeapAllocation no_gc;
3117  ASSERT(!OptimizingCompilerThread::IsOptimizerThread(isolate()));
3118  for (int i = 0; i < blocks()->length(); ++i) {
3119  for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) {
3120  it.Current()->FinalizeUniqueness();
3121  }
3122  }
3123 }
3124 
3125 
3126 int HGraph::TraceInlinedFunction(
3127  Handle<SharedFunctionInfo> shared,
3128  HSourcePosition position) {
3129  if (!FLAG_hydrogen_track_positions) {
3130  return 0;
3131  }
3132 
3133  int id = 0;
3134  for (; id < inlined_functions_.length(); id++) {
3135  if (inlined_functions_[id].shared().is_identical_to(shared)) {
3136  break;
3137  }
3138  }
3139 
3140  if (id == inlined_functions_.length()) {
3141  inlined_functions_.Add(InlinedFunctionInfo(shared), zone());
3142 
3143  if (!shared->script()->IsUndefined()) {
3144  Handle<Script> script(Script::cast(shared->script()));
3145  if (!script->source()->IsUndefined()) {
3146  CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
3147  PrintF(tracing_scope.file(),
3148  "--- FUNCTION SOURCE (%s) id{%d,%d} ---\n",
3149  shared->DebugName()->ToCString().get(),
3150  info()->optimization_id(),
3151  id);
3152 
3153  {
3154  ConsStringIteratorOp op;
3155  StringCharacterStream stream(String::cast(script->source()),
3156  &op,
3157  shared->start_position());
3158  // fun->end_position() points to the last character in the stream. We
3159  // need to compensate by adding one to calculate the length.
3160  int source_len =
3161  shared->end_position() - shared->start_position() + 1;
3162  for (int i = 0; i < source_len; i++) {
3163  if (stream.HasMore()) {
3164  PrintF(tracing_scope.file(), "%c", stream.GetNext());
3165  }
3166  }
3167  }
3168 
3169  PrintF(tracing_scope.file(), "\n--- END ---\n");
3170  }
3171  }
3172  }
3173 
3174  int inline_id = next_inline_id_++;
3175 
3176  if (inline_id != 0) {
3177  CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
3178  PrintF(tracing_scope.file(), "INLINE (%s) id{%d,%d} AS %d AT ",
3179  shared->DebugName()->ToCString().get(),
3180  info()->optimization_id(),
3181  id,
3182  inline_id);
3183  position.PrintTo(tracing_scope.file());
3184  PrintF(tracing_scope.file(), "\n");
3185  }
3186 
3187  return inline_id;
3188 }
3189 
3190 
3191 int HGraph::SourcePositionToScriptPosition(HSourcePosition pos) {
3192  if (!FLAG_hydrogen_track_positions || pos.IsUnknown()) {
3193  return pos.raw();
3194  }
3195 
3196  return inlined_functions_[pos.inlining_id()].start_position() +
3197  pos.position();
3198 }
3199 
3200 
3201 // Block ordering was implemented with two mutually recursive methods,
3202 // HGraph::Postorder and HGraph::PostorderLoopBlocks.
3203 // The recursion could lead to stack overflow so the algorithm has been
3204 // implemented iteratively.
3205 // At a high level the algorithm looks like this:
3206 //
3207 // Postorder(block, loop_header) : {
3208 // if (block has already been visited or is of another loop) return;
3209 // mark block as visited;
3210 // if (block is a loop header) {
3211 // VisitLoopMembers(block, loop_header);
3212 // VisitSuccessorsOfLoopHeader(block);
3213 // } else {
3214 // VisitSuccessors(block)
3215 // }
3216 // put block in result list;
3217 // }
3218 //
3219 // VisitLoopMembers(block, outer_loop_header) {
3220 // foreach (block b in block loop members) {
3221 // VisitSuccessorsOfLoopMember(b, outer_loop_header);
3222 // if (b is loop header) VisitLoopMembers(b);
3223 // }
3224 // }
3225 //
3226 // VisitSuccessorsOfLoopMember(block, outer_loop_header) {
3227 // foreach (block b in block successors) Postorder(b, outer_loop_header)
3228 // }
3229 //
3230 // VisitSuccessorsOfLoopHeader(block) {
3231 // foreach (block b in block successors) Postorder(b, block)
3232 // }
3233 //
3234 // VisitSuccessors(block, loop_header) {
3235 // foreach (block b in block successors) Postorder(b, loop_header)
3236 // }
3237 //
3238 // The ordering is started calling Postorder(entry, NULL).
3239 //
3240 // Each instance of PostorderProcessor represents the "stack frame" of the
3241 // recursion, and particularly keeps the state of the loop (iteration) of the
3242 // "Visit..." function it represents.
3243 // To recycle memory we keep all the frames in a double linked list but
3244 // this means that we cannot use constructors to initialize the frames.
3245 //
3247  public:
3248  // Back link (towards the stack bottom).
3249  PostorderProcessor* parent() {return father_; }
3250  // Forward link (towards the stack top).
3251  PostorderProcessor* child() {return child_; }
3252  HBasicBlock* block() { return block_; }
3253  HLoopInformation* loop() { return loop_; }
3254  HBasicBlock* loop_header() { return loop_header_; }
3255 
3257  HBasicBlock* block,
3258  BitVector* visited) {
3259  PostorderProcessor* result = new(zone) PostorderProcessor(NULL);
3260  return result->SetupSuccessors(zone, block, NULL, visited);
3261  }
3262 
3264  BitVector* visited,
3265  ZoneList<HBasicBlock*>* order) {
3266  PostorderProcessor* next =
3267  PerformNonBacktrackingStep(zone, visited, order);
3268  if (next != NULL) {
3269  return next;
3270  } else {
3271  return Backtrack(zone, visited, order);
3272  }
3273  }
3274 
3275  private:
3276  explicit PostorderProcessor(PostorderProcessor* father)
3277  : father_(father), child_(NULL), successor_iterator(NULL) { }
3278 
3279  // Each enum value states the cycle whose state is kept by this instance.
3280  enum LoopKind {
3281  NONE,
3282  SUCCESSORS,
3283  SUCCESSORS_OF_LOOP_HEADER,
3284  LOOP_MEMBERS,
3285  SUCCESSORS_OF_LOOP_MEMBER
3286  };
3287 
3288  // Each "Setup..." method is like a constructor for a cycle state.
3289  PostorderProcessor* SetupSuccessors(Zone* zone,
3290  HBasicBlock* block,
3291  HBasicBlock* loop_header,
3292  BitVector* visited) {
3293  if (block == NULL || visited->Contains(block->block_id()) ||
3294  block->parent_loop_header() != loop_header) {
3295  kind_ = NONE;
3296  block_ = NULL;
3297  loop_ = NULL;
3298  loop_header_ = NULL;
3299  return this;
3300  } else {
3301  block_ = block;
3302  loop_ = NULL;
3303  visited->Add(block->block_id());
3304 
3305  if (block->IsLoopHeader()) {
3306  kind_ = SUCCESSORS_OF_LOOP_HEADER;
3307  loop_header_ = block;
3308  InitializeSuccessors();
3309  PostorderProcessor* result = Push(zone);
3310  return result->SetupLoopMembers(zone, block, block->loop_information(),
3311  loop_header);
3312  } else {
3313  ASSERT(block->IsFinished());
3314  kind_ = SUCCESSORS;
3315  loop_header_ = loop_header;
3316  InitializeSuccessors();
3317  return this;
3318  }
3319  }
3320  }
3321 
3322  PostorderProcessor* SetupLoopMembers(Zone* zone,
3323  HBasicBlock* block,
3324  HLoopInformation* loop,
3325  HBasicBlock* loop_header) {
3326  kind_ = LOOP_MEMBERS;
3327  block_ = block;
3328  loop_ = loop;
3329  loop_header_ = loop_header;
3330  InitializeLoopMembers();
3331  return this;
3332  }
3333 
3334  PostorderProcessor* SetupSuccessorsOfLoopMember(
3335  HBasicBlock* block,
3336  HLoopInformation* loop,
3337  HBasicBlock* loop_header) {
3338  kind_ = SUCCESSORS_OF_LOOP_MEMBER;
3339  block_ = block;
3340  loop_ = loop;
3341  loop_header_ = loop_header;
3342  InitializeSuccessors();
3343  return this;
3344  }
3345 
3346  // This method "allocates" a new stack frame.
3347  PostorderProcessor* Push(Zone* zone) {
3348  if (child_ == NULL) {
3349  child_ = new(zone) PostorderProcessor(this);
3350  }
3351  return child_;
3352  }
3353 
3354  void ClosePostorder(ZoneList<HBasicBlock*>* order, Zone* zone) {
3355  ASSERT(block_->end()->FirstSuccessor() == NULL ||
3356  order->Contains(block_->end()->FirstSuccessor()) ||
3357  block_->end()->FirstSuccessor()->IsLoopHeader());
3358  ASSERT(block_->end()->SecondSuccessor() == NULL ||
3359  order->Contains(block_->end()->SecondSuccessor()) ||
3360  block_->end()->SecondSuccessor()->IsLoopHeader());
3361  order->Add(block_, zone);
3362  }
3363 
3364  // This method is the basic block to walk up the stack.
3365  PostorderProcessor* Pop(Zone* zone,
3366  BitVector* visited,
3367  ZoneList<HBasicBlock*>* order) {
3368  switch (kind_) {
3369  case SUCCESSORS:
3370  case SUCCESSORS_OF_LOOP_HEADER:
3371  ClosePostorder(order, zone);
3372  return father_;
3373  case LOOP_MEMBERS:
3374  return father_;
3375  case SUCCESSORS_OF_LOOP_MEMBER:
3376  if (block()->IsLoopHeader() && block() != loop_->loop_header()) {
3377  // In this case we need to perform a LOOP_MEMBERS cycle so we
3378  // initialize it and return this instead of father.
3379  return SetupLoopMembers(zone, block(),
3380  block()->loop_information(), loop_header_);
3381  } else {
3382  return father_;
3383  }
3384  case NONE:
3385  return father_;
3386  }
3387  UNREACHABLE();
3388  return NULL;
3389  }
3390 
3391  // Walks up the stack.
3392  PostorderProcessor* Backtrack(Zone* zone,
3393  BitVector* visited,
3394  ZoneList<HBasicBlock*>* order) {
3395  PostorderProcessor* parent = Pop(zone, visited, order);
3396  while (parent != NULL) {
3397  PostorderProcessor* next =
3398  parent->PerformNonBacktrackingStep(zone, visited, order);
3399  if (next != NULL) {
3400  return next;
3401  } else {
3402  parent = parent->Pop(zone, visited, order);
3403  }
3404  }
3405  return NULL;
3406  }
3407 
3408  PostorderProcessor* PerformNonBacktrackingStep(
3409  Zone* zone,
3410  BitVector* visited,
3411  ZoneList<HBasicBlock*>* order) {
3412  HBasicBlock* next_block;
3413  switch (kind_) {
3414  case SUCCESSORS:
3415  next_block = AdvanceSuccessors();
3416  if (next_block != NULL) {
3417  PostorderProcessor* result = Push(zone);
3418  return result->SetupSuccessors(zone, next_block,
3419  loop_header_, visited);
3420  }
3421  break;
3422  case SUCCESSORS_OF_LOOP_HEADER:
3423  next_block = AdvanceSuccessors();
3424  if (next_block != NULL) {
3425  PostorderProcessor* result = Push(zone);
3426  return result->SetupSuccessors(zone, next_block,
3427  block(), visited);
3428  }
3429  break;
3430  case LOOP_MEMBERS:
3431  next_block = AdvanceLoopMembers();
3432  if (next_block != NULL) {
3433  PostorderProcessor* result = Push(zone);
3434  return result->SetupSuccessorsOfLoopMember(next_block,
3435  loop_, loop_header_);
3436  }
3437  break;
3438  case SUCCESSORS_OF_LOOP_MEMBER:
3439  next_block = AdvanceSuccessors();
3440  if (next_block != NULL) {
3441  PostorderProcessor* result = Push(zone);
3442  return result->SetupSuccessors(zone, next_block,
3443  loop_header_, visited);
3444  }
3445  break;
3446  case NONE:
3447  return NULL;
3448  }
3449  return NULL;
3450  }
3451 
3452  // The following two methods implement a "foreach b in successors" cycle.
3453  void InitializeSuccessors() {
3454  loop_index = 0;
3455  loop_length = 0;
3456  successor_iterator = HSuccessorIterator(block_->end());
3457  }
3458 
3459  HBasicBlock* AdvanceSuccessors() {
3460  if (!successor_iterator.Done()) {
3461  HBasicBlock* result = successor_iterator.Current();
3462  successor_iterator.Advance();
3463  return result;
3464  }
3465  return NULL;
3466  }
3467 
3468  // The following two methods implement a "foreach b in loop members" cycle.
3469  void InitializeLoopMembers() {
3470  loop_index = 0;
3471  loop_length = loop_->blocks()->length();
3472  }
3473 
3474  HBasicBlock* AdvanceLoopMembers() {
3475  if (loop_index < loop_length) {
3476  HBasicBlock* result = loop_->blocks()->at(loop_index);
3477  loop_index++;
3478  return result;
3479  } else {
3480  return NULL;
3481  }
3482  }
3483 
3484  LoopKind kind_;
3485  PostorderProcessor* father_;
3486  PostorderProcessor* child_;
3487  HLoopInformation* loop_;
3488  HBasicBlock* block_;
3489  HBasicBlock* loop_header_;
3490  int loop_index;
3491  int loop_length;
3492  HSuccessorIterator successor_iterator;
3493 };
3494 
3495 
3496 void HGraph::OrderBlocks() {
3497  CompilationPhase phase("H_Block ordering", info());
3498  BitVector visited(blocks_.length(), zone());
3499 
3500  ZoneList<HBasicBlock*> reverse_result(8, zone());
3501  HBasicBlock* start = blocks_[0];
3502  PostorderProcessor* postorder =
3503  PostorderProcessor::CreateEntryProcessor(zone(), start, &visited);
3504  while (postorder != NULL) {
3505  postorder = postorder->PerformStep(zone(), &visited, &reverse_result);
3506  }
3507  blocks_.Rewind(0);
3508  int index = 0;
3509  for (int i = reverse_result.length() - 1; i >= 0; --i) {
3510  HBasicBlock* b = reverse_result[i];
3511  blocks_.Add(b, zone());
3512  b->set_block_id(index++);
3513  }
3514 }
3515 
3516 
3517 void HGraph::AssignDominators() {
3518  HPhase phase("H_Assign dominators", this);
3519  for (int i = 0; i < blocks_.length(); ++i) {
3520  HBasicBlock* block = blocks_[i];
3521  if (block->IsLoopHeader()) {
3522  // Only the first predecessor of a loop header is from outside the loop.
3523  // All others are back edges, and thus cannot dominate the loop header.
3524  block->AssignCommonDominator(block->predecessors()->first());
3525  block->AssignLoopSuccessorDominators();
3526  } else {
3527  for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
3528  blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
3529  }
3530  }
3531  }
3532 }
3533 
3534 
3535 bool HGraph::CheckArgumentsPhiUses() {
3536  int block_count = blocks_.length();
3537  for (int i = 0; i < block_count; ++i) {
3538  for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3539  HPhi* phi = blocks_[i]->phis()->at(j);
3540  // We don't support phi uses of arguments for now.
3541  if (phi->CheckFlag(HValue::kIsArguments)) return false;
3542  }
3543  }
3544  return true;
3545 }
3546 
3547 
3548 bool HGraph::CheckConstPhiUses() {
3549  int block_count = blocks_.length();
3550  for (int i = 0; i < block_count; ++i) {
3551  for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3552  HPhi* phi = blocks_[i]->phis()->at(j);
3553  // Check for the hole value (from an uninitialized const).
3554  for (int k = 0; k < phi->OperandCount(); k++) {
3555  if (phi->OperandAt(k) == GetConstantHole()) return false;
3556  }
3557  }
3558  }
3559  return true;
3560 }
3561 
3562 
3563 void HGraph::CollectPhis() {
3564  int block_count = blocks_.length();
3565  phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone());
3566  for (int i = 0; i < block_count; ++i) {
3567  for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3568  HPhi* phi = blocks_[i]->phis()->at(j);
3569  phi_list_->Add(phi, zone());
3570  }
3571  }
3572 }
3573 
3574 
3575 // Implementation of utility class to encapsulate the translation state for
3576 // a (possibly inlined) function.
3577 FunctionState::FunctionState(HOptimizedGraphBuilder* owner,
3578  CompilationInfo* info,
3579  InliningKind inlining_kind,
3580  int inlining_id)
3581  : owner_(owner),
3582  compilation_info_(info),
3583  call_context_(NULL),
3584  inlining_kind_(inlining_kind),
3585  function_return_(NULL),
3586  test_context_(NULL),
3587  entry_(NULL),
3588  arguments_object_(NULL),
3589  arguments_elements_(NULL),
3590  inlining_id_(inlining_id),
3591  outer_source_position_(HSourcePosition::Unknown()),
3592  outer_(owner->function_state()) {
3593  if (outer_ != NULL) {
3594  // State for an inline function.
3595  if (owner->ast_context()->IsTest()) {
3596  HBasicBlock* if_true = owner->graph()->CreateBasicBlock();
3597  HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
3598  if_true->MarkAsInlineReturnTarget(owner->current_block());
3599  if_false->MarkAsInlineReturnTarget(owner->current_block());
3600  TestContext* outer_test_context = TestContext::cast(owner->ast_context());
3601  Expression* cond = outer_test_context->condition();
3602  // The AstContext constructor pushed on the context stack. This newed
3603  // instance is the reason that AstContext can't be BASE_EMBEDDED.
3604  test_context_ = new TestContext(owner, cond, if_true, if_false);
3605  } else {
3606  function_return_ = owner->graph()->CreateBasicBlock();
3607  function_return()->MarkAsInlineReturnTarget(owner->current_block());
3608  }
3609  // Set this after possibly allocating a new TestContext above.
3610  call_context_ = owner->ast_context();
3611  }
3612 
3613  // Push on the state stack.
3614  owner->set_function_state(this);
3615 
3616  if (FLAG_hydrogen_track_positions) {
3617  outer_source_position_ = owner->source_position();
3618  owner->EnterInlinedSource(
3619  info->shared_info()->start_position(),
3620  inlining_id);
3621  owner->SetSourcePosition(info->shared_info()->start_position());
3622  }
3623 }
3624 
3625 
3626 FunctionState::~FunctionState() {
3627  delete test_context_;
3628  owner_->set_function_state(outer_);
3629 
3630  if (FLAG_hydrogen_track_positions) {
3631  owner_->set_source_position(outer_source_position_);
3632  owner_->EnterInlinedSource(
3633  outer_->compilation_info()->shared_info()->start_position(),
3634  outer_->inlining_id());
3635  }
3636 }
3637 
3638 
3639 // Implementation of utility classes to represent an expression's context in
3640 // the AST.
3642  : owner_(owner),
3643  kind_(kind),
3644  outer_(owner->ast_context()),
3645  for_typeof_(false) {
3646  owner->set_ast_context(this); // Push.
3647 #ifdef DEBUG
3648  ASSERT(owner->environment()->frame_type() == JS_FUNCTION);
3649  original_length_ = owner->environment()->length();
3650 #endif
3651 }
3652 
3653 
3655  owner_->set_ast_context(outer_); // Pop.
3656 }
3657 
3658 
3659 EffectContext::~EffectContext() {
3660  ASSERT(owner()->HasStackOverflow() ||
3661  owner()->current_block() == NULL ||
3662  (owner()->environment()->length() == original_length_ &&
3663  owner()->environment()->frame_type() == JS_FUNCTION));
3664 }
3665 
3666 
3667 ValueContext::~ValueContext() {
3668  ASSERT(owner()->HasStackOverflow() ||
3669  owner()->current_block() == NULL ||
3670  (owner()->environment()->length() == original_length_ + 1 &&
3671  owner()->environment()->frame_type() == JS_FUNCTION));
3672 }
3673 
3674 
3675 void EffectContext::ReturnValue(HValue* value) {
3676  // The value is simply ignored.
3677 }
3678 
3679 
3680 void ValueContext::ReturnValue(HValue* value) {
3681  // The value is tracked in the bailout environment, and communicated
3682  // through the environment as the result of the expression.
3683  if (!arguments_allowed() && value->CheckFlag(HValue::kIsArguments)) {
3684  owner()->Bailout(kBadValueContextForArgumentsValue);
3685  }
3686  owner()->Push(value);
3687 }
3688 
3689 
3690 void TestContext::ReturnValue(HValue* value) {
3691  BuildBranch(value);
3692 }
3693 
3694 
3695 void EffectContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3696  ASSERT(!instr->IsControlInstruction());
3697  owner()->AddInstruction(instr);
3698  if (instr->HasObservableSideEffects()) {
3699  owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
3700  }
3701 }
3702 
3703 
3704 void EffectContext::ReturnControl(HControlInstruction* instr,
3705  BailoutId ast_id) {
3706  ASSERT(!instr->HasObservableSideEffects());
3707  HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
3708  HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
3709  instr->SetSuccessorAt(0, empty_true);
3710  instr->SetSuccessorAt(1, empty_false);
3711  owner()->FinishCurrentBlock(instr);
3712  HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id);
3713  owner()->set_current_block(join);
3714 }
3715 
3716 
3717 void EffectContext::ReturnContinuation(HIfContinuation* continuation,
3718  BailoutId ast_id) {
3719  HBasicBlock* true_branch = NULL;
3720  HBasicBlock* false_branch = NULL;
3721  continuation->Continue(&true_branch, &false_branch);
3722  if (!continuation->IsTrueReachable()) {
3723  owner()->set_current_block(false_branch);
3724  } else if (!continuation->IsFalseReachable()) {
3725  owner()->set_current_block(true_branch);
3726  } else {
3727  HBasicBlock* join = owner()->CreateJoin(true_branch, false_branch, ast_id);
3728  owner()->set_current_block(join);
3729  }
3730 }
3731 
3732 
3733 void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3734  ASSERT(!instr->IsControlInstruction());
3735  if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
3736  return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
3737  }
3738  owner()->AddInstruction(instr);
3739  owner()->Push(instr);
3740  if (instr->HasObservableSideEffects()) {
3741  owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
3742  }
3743 }
3744 
3745 
3746 void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
3747  ASSERT(!instr->HasObservableSideEffects());
3748  if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
3749  return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
3750  }
3751  HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
3752  HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
3753  instr->SetSuccessorAt(0, materialize_true);
3754  instr->SetSuccessorAt(1, materialize_false);
3755  owner()->FinishCurrentBlock(instr);
3756  owner()->set_current_block(materialize_true);
3757  owner()->Push(owner()->graph()->GetConstantTrue());
3758  owner()->set_current_block(materialize_false);
3759  owner()->Push(owner()->graph()->GetConstantFalse());
3760  HBasicBlock* join =
3761  owner()->CreateJoin(materialize_true, materialize_false, ast_id);
3762  owner()->set_current_block(join);
3763 }
3764 
3765 
3766 void ValueContext::ReturnContinuation(HIfContinuation* continuation,
3767  BailoutId ast_id) {
3768  HBasicBlock* materialize_true = NULL;
3769  HBasicBlock* materialize_false = NULL;
3770  continuation->Continue(&materialize_true, &materialize_false);
3771  if (continuation->IsTrueReachable()) {
3772  owner()->set_current_block(materialize_true);
3773  owner()->Push(owner()->graph()->GetConstantTrue());
3774  owner()->set_current_block(materialize_true);
3775  }
3776  if (continuation->IsFalseReachable()) {
3777  owner()->set_current_block(materialize_false);
3778  owner()->Push(owner()->graph()->GetConstantFalse());
3779  owner()->set_current_block(materialize_false);
3780  }
3781  if (continuation->TrueAndFalseReachable()) {
3782  HBasicBlock* join =
3783  owner()->CreateJoin(materialize_true, materialize_false, ast_id);
3784  owner()->set_current_block(join);
3785  }
3786 }
3787 
3788 
3789 void TestContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3790  ASSERT(!instr->IsControlInstruction());
3791  HOptimizedGraphBuilder* builder = owner();
3792  builder->AddInstruction(instr);
3793  // We expect a simulate after every expression with side effects, though
3794  // this one isn't actually needed (and wouldn't work if it were targeted).
3795  if (instr->HasObservableSideEffects()) {
3796  builder->Push(instr);
3797  builder->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
3798  builder->Pop();
3799  }
3800  BuildBranch(instr);
3801 }
3802 
3803 
3804 void TestContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
3805  ASSERT(!instr->HasObservableSideEffects());
3806  HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
3807  HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
3808  instr->SetSuccessorAt(0, empty_true);
3809  instr->SetSuccessorAt(1, empty_false);
3810  owner()->FinishCurrentBlock(instr);
3811  owner()->Goto(empty_true, if_true(), owner()->function_state());
3812  owner()->Goto(empty_false, if_false(), owner()->function_state());
3813  owner()->set_current_block(NULL);
3814 }
3815 
3816 
3817 void TestContext::ReturnContinuation(HIfContinuation* continuation,
3818  BailoutId ast_id) {
3819  HBasicBlock* true_branch = NULL;
3820  HBasicBlock* false_branch = NULL;
3821  continuation->Continue(&true_branch, &false_branch);
3822  if (continuation->IsTrueReachable()) {
3823  owner()->Goto(true_branch, if_true(), owner()->function_state());
3824  }
3825  if (continuation->IsFalseReachable()) {
3826  owner()->Goto(false_branch, if_false(), owner()->function_state());
3827  }
3828  owner()->set_current_block(NULL);
3829 }
3830 
3831 
3832 void TestContext::BuildBranch(HValue* value) {
3833  // We expect the graph to be in edge-split form: there is no edge that
3834  // connects a branch node to a join node. We conservatively ensure that
3835  // property by always adding an empty block on the outgoing edges of this
3836  // branch.
3837  HOptimizedGraphBuilder* builder = owner();
3838  if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
3839  builder->Bailout(kArgumentsObjectValueInATestContext);
3840  }
3841  ToBooleanStub::Types expected(condition()->to_boolean_types());
3842  ReturnControl(owner()->New<HBranch>(value, expected), BailoutId::None());
3843 }
3844 
3845 
3846 // HOptimizedGraphBuilder infrastructure for bailing out and checking bailouts.
3847 #define CHECK_BAILOUT(call) \
3848  do { \
3849  call; \
3850  if (HasStackOverflow()) return; \
3851  } while (false)
3852 
3853 
3854 #define CHECK_ALIVE(call) \
3855  do { \
3856  call; \
3857  if (HasStackOverflow() || current_block() == NULL) return; \
3858  } while (false)
3859 
3860 
3861 #define CHECK_ALIVE_OR_RETURN(call, value) \
3862  do { \
3863  call; \
3864  if (HasStackOverflow() || current_block() == NULL) return value; \
3865  } while (false)
3866 
3867 
3869  current_info()->set_bailout_reason(reason);
3870  SetStackOverflow();
3871 }
3872 
3873 
3875  EffectContext for_effect(this);
3876  Visit(expr);
3877 }
3878 
3879 
3882  ValueContext for_value(this, flag);
3883  Visit(expr);
3884 }
3885 
3886 
3888  ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
3889  for_value.set_for_typeof(true);
3890  Visit(expr);
3891 }
3892 
3893 
3895  HBasicBlock* true_block,
3896  HBasicBlock* false_block) {
3897  TestContext for_test(this, expr, true_block, false_block);
3898  Visit(expr);
3899 }
3900 
3901 
3903  ZoneList<Expression*>* exprs) {
3904  for (int i = 0; i < exprs->length(); ++i) {
3905  CHECK_ALIVE(VisitForValue(exprs->at(i)));
3906  }
3907 }
3908 
3909 
3911  if (current_info()->function()->is_generator()) {
3912  Bailout(kFunctionIsAGenerator);
3913  return false;
3914  }
3915  Scope* scope = current_info()->scope();
3916  if (scope->HasIllegalRedeclaration()) {
3917  Bailout(kFunctionWithIllegalRedeclaration);
3918  return false;
3919  }
3920  if (scope->calls_eval()) {
3921  Bailout(kFunctionCallsEval);
3922  return false;
3923  }
3924  SetUpScope(scope);
3925 
3926  // Add an edge to the body entry. This is warty: the graph's start
3927  // environment will be used by the Lithium translation as the initial
3928  // environment on graph entry, but it has now been mutated by the
3929  // Hydrogen translation of the instructions in the start block. This
3930  // environment uses values which have not been defined yet. These
3931  // Hydrogen instructions will then be replayed by the Lithium
3932  // translation, so they cannot have an environment effect. The edge to
3933  // the body's entry block (along with some special logic for the start
3934  // block in HInstruction::InsertAfter) seals the start block from
3935  // getting unwanted instructions inserted.
3936  //
3937  // TODO(kmillikin): Fix this. Stop mutating the initial environment.
3938  // Make the Hydrogen instructions in the initial block into Hydrogen
3939  // values (but not instructions), present in the initial environment and
3940  // not replayed by the Lithium translation.
3941  HEnvironment* initial_env = environment()->CopyWithoutHistory();
3942  HBasicBlock* body_entry = CreateBasicBlock(initial_env);
3943  Goto(body_entry);
3944  body_entry->SetJoinId(BailoutId::FunctionEntry());
3945  set_current_block(body_entry);
3946 
3947  // Handle implicit declaration of the function name in named function
3948  // expressions before other declarations.
3949  if (scope->is_function_scope() && scope->function() != NULL) {
3950  VisitVariableDeclaration(scope->function());
3951  }
3952  VisitDeclarations(scope->declarations());
3953  Add<HSimulate>(BailoutId::Declarations());
3954 
3955  Add<HStackCheck>(HStackCheck::kFunctionEntry);
3956 
3957  VisitStatements(current_info()->function()->body());
3958  if (HasStackOverflow()) return false;
3959 
3960  if (current_block() != NULL) {
3961  Add<HReturn>(graph()->GetConstantUndefined());
3963  }
3964 
3965  // If the checksum of the number of type info changes is the same as the
3966  // last time this function was compiled, then this recompile is likely not
3967  // due to missing/inadequate type feedback, but rather too aggressive
3968  // optimization. Disable optimistic LICM in that case.
3969  Handle<Code> unoptimized_code(current_info()->shared_info()->code());
3970  ASSERT(unoptimized_code->kind() == Code::FUNCTION);
3971  Handle<TypeFeedbackInfo> type_info(
3972  TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
3973  int checksum = type_info->own_type_change_checksum();
3974  int composite_checksum = graph()->update_type_change_checksum(checksum);
3975  graph()->set_use_optimistic_licm(
3976  !type_info->matches_inlined_type_change_checksum(composite_checksum));
3977  type_info->set_inlined_type_change_checksum(composite_checksum);
3978 
3979  // Perform any necessary OSR-specific cleanups or changes to the graph.
3980  osr()->FinishGraph();
3981 
3982  return true;
3983 }
3984 
3985 
3986 bool HGraph::Optimize(BailoutReason* bailout_reason) {
3987  OrderBlocks();
3988  AssignDominators();
3989 
3990  // We need to create a HConstant "zero" now so that GVN will fold every
3991  // zero-valued constant in the graph together.
3992  // The constant is needed to make idef-based bounds check work: the pass
3993  // evaluates relations with "zero" and that zero cannot be created after GVN.
3994  GetConstant0();
3995 
3996 #ifdef DEBUG
3997  // Do a full verify after building the graph and computing dominators.
3998  Verify(true);
3999 #endif
4000 
4001  if (FLAG_analyze_environment_liveness && maximum_environment_size() != 0) {
4002  Run<HEnvironmentLivenessAnalysisPhase>();
4003  }
4004 
4005  if (!CheckConstPhiUses()) {
4006  *bailout_reason = kUnsupportedPhiUseOfConstVariable;
4007  return false;
4008  }
4009  Run<HRedundantPhiEliminationPhase>();
4010  if (!CheckArgumentsPhiUses()) {
4011  *bailout_reason = kUnsupportedPhiUseOfArguments;
4012  return false;
4013  }
4014 
4015  // Find and mark unreachable code to simplify optimizations, especially gvn,
4016  // where unreachable code could unnecessarily defeat LICM.
4017  Run<HMarkUnreachableBlocksPhase>();
4018 
4019  if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4020  if (FLAG_use_escape_analysis) Run<HEscapeAnalysisPhase>();
4021 
4022  if (FLAG_load_elimination) Run<HLoadEliminationPhase>();
4023 
4024  CollectPhis();
4025 
4026  if (has_osr()) osr()->FinishOsrValues();
4027 
4028  Run<HInferRepresentationPhase>();
4029 
4030  // Remove HSimulate instructions that have turned out not to be needed
4031  // after all by folding them into the following HSimulate.
4032  // This must happen after inferring representations.
4033  Run<HMergeRemovableSimulatesPhase>();
4034 
4035  Run<HMarkDeoptimizeOnUndefinedPhase>();
4036  Run<HRepresentationChangesPhase>();
4037 
4038  Run<HInferTypesPhase>();
4039 
4040  // Must be performed before canonicalization to ensure that Canonicalize
4041  // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with
4042  // zero.
4043  if (FLAG_opt_safe_uint32_operations) Run<HUint32AnalysisPhase>();
4044 
4045  if (FLAG_use_canonicalizing) Run<HCanonicalizePhase>();
4046 
4047  if (FLAG_use_gvn) Run<HGlobalValueNumberingPhase>();
4048 
4049  if (FLAG_check_elimination) Run<HCheckEliminationPhase>();
4050 
4051  if (FLAG_store_elimination) Run<HStoreEliminationPhase>();
4052 
4053  Run<HRangeAnalysisPhase>();
4054 
4055  Run<HComputeChangeUndefinedToNaN>();
4056 
4057  // Eliminate redundant stack checks on backwards branches.
4058  Run<HStackCheckEliminationPhase>();
4059 
4060  if (FLAG_array_bounds_checks_elimination) Run<HBoundsCheckEliminationPhase>();
4061  if (FLAG_array_bounds_checks_hoisting) Run<HBoundsCheckHoistingPhase>();
4062  if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>();
4063  if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4064 
4065  RestoreActualValues();
4066 
4067  // Find unreachable code a second time, GVN and other optimizations may have
4068  // made blocks unreachable that were previously reachable.
4069  Run<HMarkUnreachableBlocksPhase>();
4070 
4071  return true;
4072 }
4073 
4074 
4075 void HGraph::RestoreActualValues() {
4076  HPhase phase("H_Restore actual values", this);
4077 
4078  for (int block_index = 0; block_index < blocks()->length(); block_index++) {
4079  HBasicBlock* block = blocks()->at(block_index);
4080 
4081 #ifdef DEBUG
4082  for (int i = 0; i < block->phis()->length(); i++) {
4083  HPhi* phi = block->phis()->at(i);
4084  ASSERT(phi->ActualValue() == phi);
4085  }
4086 #endif
4087 
4088  for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
4089  HInstruction* instruction = it.Current();
4090  if (instruction->ActualValue() == instruction) continue;
4091  if (instruction->CheckFlag(HValue::kIsDead)) {
4092  // The instruction was marked as deleted but left in the graph
4093  // as a control flow dependency point for subsequent
4094  // instructions.
4095  instruction->DeleteAndReplaceWith(instruction->ActualValue());
4096  } else {
4097  ASSERT(instruction->IsInformativeDefinition());
4098  if (instruction->IsPurelyInformativeDefinition()) {
4099  instruction->DeleteAndReplaceWith(instruction->RedefinedOperand());
4100  } else {
4101  instruction->ReplaceAllUsesWith(instruction->ActualValue());
4102  }
4103  }
4104  }
4105  }
4106 }
4107 
4108 
4110  ZoneList<HValue*> arguments(count, zone());
4111  for (int i = 0; i < count; ++i) {
4112  arguments.Add(Pop(), zone());
4113  }
4114 
4115  while (!arguments.is_empty()) {
4116  Add<HPushArgument>(arguments.RemoveLast());
4117  }
4118 }
4119 
4120 
4121 template <class Instruction>
4123  PushArgumentsFromEnvironment(call->argument_count());
4124  return call;
4125 }
4126 
4127 
4129  // First special is HContext.
4130  HInstruction* context = Add<HContext>();
4131  environment()->BindContext(context);
4132 
4133  // Create an arguments object containing the initial parameters. Set the
4134  // initial values of parameters including "this" having parameter index 0.
4135  ASSERT_EQ(scope->num_parameters() + 1, environment()->parameter_count());
4136  HArgumentsObject* arguments_object =
4137  New<HArgumentsObject>(environment()->parameter_count());
4138  for (int i = 0; i < environment()->parameter_count(); ++i) {
4139  HInstruction* parameter = Add<HParameter>(i);
4140  arguments_object->AddArgument(parameter, zone());
4141  environment()->Bind(i, parameter);
4142  }
4143  AddInstruction(arguments_object);
4144  graph()->SetArgumentsObject(arguments_object);
4145 
4146  HConstant* undefined_constant = graph()->GetConstantUndefined();
4147  // Initialize specials and locals to undefined.
4148  for (int i = environment()->parameter_count() + 1;
4149  i < environment()->length();
4150  ++i) {
4151  environment()->Bind(i, undefined_constant);
4152  }
4153 
4154  // Handle the arguments and arguments shadow variables specially (they do
4155  // not have declarations).
4156  if (scope->arguments() != NULL) {
4157  if (!scope->arguments()->IsStackAllocated()) {
4158  return Bailout(kContextAllocatedArguments);
4159  }
4160 
4161  environment()->Bind(scope->arguments(),
4162  graph()->GetArgumentsObject());
4163  }
4164 }
4165 
4166 
4168  for (int i = 0; i < statements->length(); i++) {
4169  Statement* stmt = statements->at(i);
4170  CHECK_ALIVE(Visit(stmt));
4171  if (stmt->IsJump()) break;
4172  }
4173 }
4174 
4175 
4176 void HOptimizedGraphBuilder::VisitBlock(Block* stmt) {
4177  ASSERT(!HasStackOverflow());
4178  ASSERT(current_block() != NULL);
4179  ASSERT(current_block()->HasPredecessor());
4180  if (stmt->scope() != NULL) {
4181  return Bailout(kScopedBlock);
4182  }
4183  BreakAndContinueInfo break_info(stmt);
4184  { BreakAndContinueScope push(&break_info, this);
4185  CHECK_BAILOUT(VisitStatements(stmt->statements()));
4186  }
4187  HBasicBlock* break_block = break_info.break_block();
4188  if (break_block != NULL) {
4189  if (current_block() != NULL) Goto(break_block);
4190  break_block->SetJoinId(stmt->ExitId());
4191  set_current_block(break_block);
4192  }
4193 }
4194 
4195 
4196 void HOptimizedGraphBuilder::VisitExpressionStatement(
4197  ExpressionStatement* stmt) {
4198  ASSERT(!HasStackOverflow());
4199  ASSERT(current_block() != NULL);
4200  ASSERT(current_block()->HasPredecessor());
4201  VisitForEffect(stmt->expression());
4202 }
4203 
4204 
4205 void HOptimizedGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
4206  ASSERT(!HasStackOverflow());
4207  ASSERT(current_block() != NULL);
4208  ASSERT(current_block()->HasPredecessor());
4209 }
4210 
4211 
4212 void HOptimizedGraphBuilder::VisitIfStatement(IfStatement* stmt) {
4213  ASSERT(!HasStackOverflow());
4214  ASSERT(current_block() != NULL);
4215  ASSERT(current_block()->HasPredecessor());
4216  if (stmt->condition()->ToBooleanIsTrue()) {
4217  Add<HSimulate>(stmt->ThenId());
4218  Visit(stmt->then_statement());
4219  } else if (stmt->condition()->ToBooleanIsFalse()) {
4220  Add<HSimulate>(stmt->ElseId());
4221  Visit(stmt->else_statement());
4222  } else {
4223  HBasicBlock* cond_true = graph()->CreateBasicBlock();
4224  HBasicBlock* cond_false = graph()->CreateBasicBlock();
4225  CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false));
4226 
4227  if (cond_true->HasPredecessor()) {
4228  cond_true->SetJoinId(stmt->ThenId());
4229  set_current_block(cond_true);
4230  CHECK_BAILOUT(Visit(stmt->then_statement()));
4231  cond_true = current_block();
4232  } else {
4233  cond_true = NULL;
4234  }
4235 
4236  if (cond_false->HasPredecessor()) {
4237  cond_false->SetJoinId(stmt->ElseId());
4238  set_current_block(cond_false);
4239  CHECK_BAILOUT(Visit(stmt->else_statement()));
4240  cond_false = current_block();
4241  } else {
4242  cond_false = NULL;
4243  }
4244 
4245  HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId());
4246  set_current_block(join);
4247  }
4248 }
4249 
4250 
4251 HBasicBlock* HOptimizedGraphBuilder::BreakAndContinueScope::Get(
4252  BreakableStatement* stmt,
4253  BreakType type,
4254  int* drop_extra) {
4255  *drop_extra = 0;
4256  BreakAndContinueScope* current = this;
4257  while (current != NULL && current->info()->target() != stmt) {
4258  *drop_extra += current->info()->drop_extra();
4259  current = current->next();
4260  }
4261  ASSERT(current != NULL); // Always found (unless stack is malformed).
4262 
4263  if (type == BREAK) {
4264  *drop_extra += current->info()->drop_extra();
4265  }
4266 
4267  HBasicBlock* block = NULL;
4268  switch (type) {
4269  case BREAK:
4270  block = current->info()->break_block();
4271  if (block == NULL) {
4272  block = current->owner()->graph()->CreateBasicBlock();
4273  current->info()->set_break_block(block);
4274  }
4275  break;
4276 
4277  case CONTINUE:
4278  block = current->info()->continue_block();
4279  if (block == NULL) {
4280  block = current->owner()->graph()->CreateBasicBlock();
4281  current->info()->set_continue_block(block);
4282  }
4283  break;
4284  }
4285 
4286  return block;
4287 }
4288 
4289 
4290 void HOptimizedGraphBuilder::VisitContinueStatement(
4291  ContinueStatement* stmt) {
4292  ASSERT(!HasStackOverflow());
4293  ASSERT(current_block() != NULL);
4294  ASSERT(current_block()->HasPredecessor());
4295  int drop_extra = 0;
4296  HBasicBlock* continue_block = break_scope()->Get(
4297  stmt->target(), BreakAndContinueScope::CONTINUE, &drop_extra);
4298  Drop(drop_extra);
4299  Goto(continue_block);
4301 }
4302 
4303 
4304 void HOptimizedGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
4305  ASSERT(!HasStackOverflow());
4306  ASSERT(current_block() != NULL);
4307  ASSERT(current_block()->HasPredecessor());
4308  int drop_extra = 0;
4309  HBasicBlock* break_block = break_scope()->Get(
4310  stmt->target(), BreakAndContinueScope::BREAK, &drop_extra);
4311  Drop(drop_extra);
4312  Goto(break_block);
4314 }
4315 
4316 
4317 void HOptimizedGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
4318  ASSERT(!HasStackOverflow());
4319  ASSERT(current_block() != NULL);
4320  ASSERT(current_block()->HasPredecessor());
4321  FunctionState* state = function_state();
4322  AstContext* context = call_context();
4323  if (context == NULL) {
4324  // Not an inlined return, so an actual one.
4325  CHECK_ALIVE(VisitForValue(stmt->expression()));
4326  HValue* result = environment()->Pop();
4327  Add<HReturn>(result);
4328  } else if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
4329  // Return from an inlined construct call. In a test context the return value
4330  // will always evaluate to true, in a value context the return value needs
4331  // to be a JSObject.
4332  if (context->IsTest()) {
4333  TestContext* test = TestContext::cast(context);
4334  CHECK_ALIVE(VisitForEffect(stmt->expression()));
4335  Goto(test->if_true(), state);
4336  } else if (context->IsEffect()) {
4337  CHECK_ALIVE(VisitForEffect(stmt->expression()));
4338  Goto(function_return(), state);
4339  } else {
4340  ASSERT(context->IsValue());
4341  CHECK_ALIVE(VisitForValue(stmt->expression()));
4342  HValue* return_value = Pop();
4343  HValue* receiver = environment()->arguments_environment()->Lookup(0);
4344  HHasInstanceTypeAndBranch* typecheck =
4345  New<HHasInstanceTypeAndBranch>(return_value,
4348  HBasicBlock* if_spec_object = graph()->CreateBasicBlock();
4349  HBasicBlock* not_spec_object = graph()->CreateBasicBlock();
4350  typecheck->SetSuccessorAt(0, if_spec_object);
4351  typecheck->SetSuccessorAt(1, not_spec_object);
4352  FinishCurrentBlock(typecheck);
4353  AddLeaveInlined(if_spec_object, return_value, state);
4354  AddLeaveInlined(not_spec_object, receiver, state);
4355  }
4356  } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
4357  // Return from an inlined setter call. The returned value is never used, the
4358  // value of an assignment is always the value of the RHS of the assignment.
4359  CHECK_ALIVE(VisitForEffect(stmt->expression()));
4360  if (context->IsTest()) {
4361  HValue* rhs = environment()->arguments_environment()->Lookup(1);
4362  context->ReturnValue(rhs);
4363  } else if (context->IsEffect()) {
4364  Goto(function_return(), state);
4365  } else {
4366  ASSERT(context->IsValue());
4367  HValue* rhs = environment()->arguments_environment()->Lookup(1);
4368  AddLeaveInlined(rhs, state);
4369  }
4370  } else {
4371  // Return from a normal inlined function. Visit the subexpression in the
4372  // expression context of the call.
4373  if (context->IsTest()) {
4374  TestContext* test = TestContext::cast(context);
4375  VisitForControl(stmt->expression(), test->if_true(), test->if_false());
4376  } else if (context->IsEffect()) {
4377  // Visit in value context and ignore the result. This is needed to keep
4378  // environment in sync with full-codegen since some visitors (e.g.
4379  // VisitCountOperation) use the operand stack differently depending on
4380  // context.
4381  CHECK_ALIVE(VisitForValue(stmt->expression()));
4382  Pop();
4383  Goto(function_return(), state);
4384  } else {
4385  ASSERT(context->IsValue());
4386  CHECK_ALIVE(VisitForValue(stmt->expression()));
4387  AddLeaveInlined(Pop(), state);
4388  }
4389  }
4391 }
4392 
4393 
4394 void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) {
4395  ASSERT(!HasStackOverflow());
4396  ASSERT(current_block() != NULL);
4397  ASSERT(current_block()->HasPredecessor());
4398  return Bailout(kWithStatement);
4399 }
4400 
4401 
4402 void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
4403  ASSERT(!HasStackOverflow());
4404  ASSERT(current_block() != NULL);
4405  ASSERT(current_block()->HasPredecessor());
4406 
4407  // We only optimize switch statements with a bounded number of clauses.
4408  const int kCaseClauseLimit = 128;
4409  ZoneList<CaseClause*>* clauses = stmt->cases();
4410  int clause_count = clauses->length();
4411  ZoneList<HBasicBlock*> body_blocks(clause_count, zone());
4412  if (clause_count > kCaseClauseLimit) {
4413  return Bailout(kSwitchStatementTooManyClauses);
4414  }
4415 
4416  CHECK_ALIVE(VisitForValue(stmt->tag()));
4417  Add<HSimulate>(stmt->EntryId());
4418  HValue* tag_value = Top();
4419  Type* tag_type = stmt->tag()->bounds().lower;
4420 
4421  // 1. Build all the tests, with dangling true branches
4422  BailoutId default_id = BailoutId::None();
4423  for (int i = 0; i < clause_count; ++i) {
4424  CaseClause* clause = clauses->at(i);
4425  if (clause->is_default()) {
4426  body_blocks.Add(NULL, zone());
4427  if (default_id.IsNone()) default_id = clause->EntryId();
4428  continue;
4429  }
4430 
4431  // Generate a compare and branch.
4432  CHECK_ALIVE(VisitForValue(clause->label()));
4433  HValue* label_value = Pop();
4434 
4435  Type* label_type = clause->label()->bounds().lower;
4436  Type* combined_type = clause->compare_type();
4437  HControlInstruction* compare = BuildCompareInstruction(
4438  Token::EQ_STRICT, tag_value, label_value, tag_type, label_type,
4439  combined_type,
4440  ScriptPositionToSourcePosition(stmt->tag()->position()),
4441  ScriptPositionToSourcePosition(clause->label()->position()),
4442  PUSH_BEFORE_SIMULATE, clause->id());
4443 
4444  HBasicBlock* next_test_block = graph()->CreateBasicBlock();
4445  HBasicBlock* body_block = graph()->CreateBasicBlock();
4446  body_blocks.Add(body_block, zone());
4447  compare->SetSuccessorAt(0, body_block);
4448  compare->SetSuccessorAt(1, next_test_block);
4449  FinishCurrentBlock(compare);
4450 
4451  set_current_block(body_block);
4452  Drop(1); // tag_value
4453 
4454  set_current_block(next_test_block);
4455  }
4456 
4457  // Save the current block to use for the default or to join with the
4458  // exit.
4459  HBasicBlock* last_block = current_block();
4460  Drop(1); // tag_value
4461 
4462  // 2. Loop over the clauses and the linked list of tests in lockstep,
4463  // translating the clause bodies.
4464  HBasicBlock* fall_through_block = NULL;
4465 
4466  BreakAndContinueInfo break_info(stmt);
4467  { BreakAndContinueScope push(&break_info, this);
4468  for (int i = 0; i < clause_count; ++i) {
4469  CaseClause* clause = clauses->at(i);
4470 
4471  // Identify the block where normal (non-fall-through) control flow
4472  // goes to.
4473  HBasicBlock* normal_block = NULL;
4474  if (clause->is_default()) {
4475  if (last_block == NULL) continue;
4476  normal_block = last_block;
4477  last_block = NULL; // Cleared to indicate we've handled it.
4478  } else {
4479  normal_block = body_blocks[i];
4480  }
4481 
4482  if (fall_through_block == NULL) {
4483  set_current_block(normal_block);
4484  } else {
4485  HBasicBlock* join = CreateJoin(fall_through_block,
4486  normal_block,
4487  clause->EntryId());
4488  set_current_block(join);
4489  }
4490 
4491  CHECK_BAILOUT(VisitStatements(clause->statements()));
4492  fall_through_block = current_block();
4493  }
4494  }
4495 
4496  // Create an up-to-3-way join. Use the break block if it exists since
4497  // it's already a join block.
4498  HBasicBlock* break_block = break_info.break_block();
4499  if (break_block == NULL) {
4500  set_current_block(CreateJoin(fall_through_block,
4501  last_block,
4502  stmt->ExitId()));
4503  } else {
4504  if (fall_through_block != NULL) Goto(fall_through_block, break_block);
4505  if (last_block != NULL) Goto(last_block, break_block);
4506  break_block->SetJoinId(stmt->ExitId());
4507  set_current_block(break_block);
4508  }
4509 }
4510 
4511 
4513  HBasicBlock* loop_entry,
4514  BreakAndContinueInfo* break_info) {
4515  BreakAndContinueScope push(break_info, this);
4516  Add<HSimulate>(stmt->StackCheckId());
4517  HStackCheck* stack_check =
4518  HStackCheck::cast(Add<HStackCheck>(HStackCheck::kBackwardsBranch));
4519  ASSERT(loop_entry->IsLoopHeader());
4520  loop_entry->loop_information()->set_stack_check(stack_check);
4521  CHECK_BAILOUT(Visit(stmt->body()));
4522 }
4523 
4524 
4525 void HOptimizedGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
4526  ASSERT(!HasStackOverflow());
4527  ASSERT(current_block() != NULL);
4528  ASSERT(current_block()->HasPredecessor());
4529  ASSERT(current_block() != NULL);
4530  HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4531 
4532  BreakAndContinueInfo break_info(stmt);
4533  CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4534  HBasicBlock* body_exit =
4535  JoinContinue(stmt, current_block(), break_info.continue_block());
4536  HBasicBlock* loop_successor = NULL;
4537  if (body_exit != NULL && !stmt->cond()->ToBooleanIsTrue()) {
4538  set_current_block(body_exit);
4539  loop_successor = graph()->CreateBasicBlock();
4540  if (stmt->cond()->ToBooleanIsFalse()) {
4541  Goto(loop_successor);
4542  body_exit = NULL;
4543  } else {
4544  // The block for a true condition, the actual predecessor block of the
4545  // back edge.
4546  body_exit = graph()->CreateBasicBlock();
4547  CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor));
4548  }
4549  if (body_exit != NULL && body_exit->HasPredecessor()) {
4550  body_exit->SetJoinId(stmt->BackEdgeId());
4551  } else {
4552  body_exit = NULL;
4553  }
4554  if (loop_successor->HasPredecessor()) {
4555  loop_successor->SetJoinId(stmt->ExitId());
4556  } else {
4557  loop_successor = NULL;
4558  }
4559  }
4560  HBasicBlock* loop_exit = CreateLoop(stmt,
4561  loop_entry,
4562  body_exit,
4563  loop_successor,
4564  break_info.break_block());
4565  set_current_block(loop_exit);
4566 }
4567 
4568 
4569 void HOptimizedGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
4570  ASSERT(!HasStackOverflow());
4571  ASSERT(current_block() != NULL);
4572  ASSERT(current_block()->HasPredecessor());
4573  ASSERT(current_block() != NULL);
4574  HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4575 
4576  // If the condition is constant true, do not generate a branch.
4577  HBasicBlock* loop_successor = NULL;
4578  if (!stmt->cond()->ToBooleanIsTrue()) {
4579  HBasicBlock* body_entry = graph()->CreateBasicBlock();
4580  loop_successor = graph()->CreateBasicBlock();
4581  CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
4582  if (body_entry->HasPredecessor()) {
4583  body_entry->SetJoinId(stmt->BodyId());
4584  set_current_block(body_entry);
4585  }
4586  if (loop_successor->HasPredecessor()) {
4587  loop_successor->SetJoinId(stmt->ExitId());
4588  } else {
4589  loop_successor = NULL;
4590  }
4591  }
4592 
4593  BreakAndContinueInfo break_info(stmt);
4594  if (current_block() != NULL) {
4595  CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4596  }
4597  HBasicBlock* body_exit =
4598  JoinContinue(stmt, current_block(), break_info.continue_block());
4599  HBasicBlock* loop_exit = CreateLoop(stmt,
4600  loop_entry,
4601  body_exit,
4602  loop_successor,
4603  break_info.break_block());
4604  set_current_block(loop_exit);
4605 }
4606 
4607 
4608 void HOptimizedGraphBuilder::VisitForStatement(ForStatement* stmt) {
4609  ASSERT(!HasStackOverflow());
4610  ASSERT(current_block() != NULL);
4611  ASSERT(current_block()->HasPredecessor());
4612  if (stmt->init() != NULL) {
4613  CHECK_ALIVE(Visit(stmt->init()));
4614  }
4615  ASSERT(current_block() != NULL);
4616  HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4617 
4618  HBasicBlock* loop_successor = NULL;
4619  if (stmt->cond() != NULL) {
4620  HBasicBlock* body_entry = graph()->CreateBasicBlock();
4621  loop_successor = graph()->CreateBasicBlock();
4622  CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
4623  if (body_entry->HasPredecessor()) {
4624  body_entry->SetJoinId(stmt->BodyId());
4625  set_current_block(body_entry);
4626  }
4627  if (loop_successor->HasPredecessor()) {
4628  loop_successor->SetJoinId(stmt->ExitId());
4629  } else {
4630  loop_successor = NULL;
4631  }
4632  }
4633 
4634  BreakAndContinueInfo break_info(stmt);
4635  if (current_block() != NULL) {
4636  CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4637  }
4638  HBasicBlock* body_exit =
4639  JoinContinue(stmt, current_block(), break_info.continue_block());
4640 
4641  if (stmt->next() != NULL && body_exit != NULL) {
4642  set_current_block(body_exit);
4643  CHECK_BAILOUT(Visit(stmt->next()));
4644  body_exit = current_block();
4645  }
4646 
4647  HBasicBlock* loop_exit = CreateLoop(stmt,
4648  loop_entry,
4649  body_exit,
4650  loop_successor,
4651  break_info.break_block());
4652  set_current_block(loop_exit);
4653 }
4654 
4655 
4656 void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
4657  ASSERT(!HasStackOverflow());
4658  ASSERT(current_block() != NULL);
4659  ASSERT(current_block()->HasPredecessor());
4660 
4661  if (!FLAG_optimize_for_in) {
4662  return Bailout(kForInStatementOptimizationIsDisabled);
4663  }
4664 
4665  if (stmt->for_in_type() != ForInStatement::FAST_FOR_IN) {
4666  return Bailout(kForInStatementIsNotFastCase);
4667  }
4668 
4669  if (!stmt->each()->IsVariableProxy() ||
4670  !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
4671  return Bailout(kForInStatementWithNonLocalEachVariable);
4672  }
4673 
4674  Variable* each_var = stmt->each()->AsVariableProxy()->var();
4675 
4676  CHECK_ALIVE(VisitForValue(stmt->enumerable()));
4677  HValue* enumerable = Top(); // Leave enumerable at the top.
4678 
4679  HInstruction* map = Add<HForInPrepareMap>(enumerable);
4680  Add<HSimulate>(stmt->PrepareId());
4681 
4682  HInstruction* array = Add<HForInCacheArray>(
4684 
4685  HInstruction* enum_length = Add<HMapEnumLength>(map);
4686 
4687  HInstruction* start_index = Add<HConstant>(0);
4688 
4689  Push(map);
4690  Push(array);
4691  Push(enum_length);
4692  Push(start_index);
4693 
4694  HInstruction* index_cache = Add<HForInCacheArray>(
4696  HForInCacheArray::cast(array)->set_index_cache(
4697  HForInCacheArray::cast(index_cache));
4698 
4699  HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4700 
4701  HValue* index = environment()->ExpressionStackAt(0);
4702  HValue* limit = environment()->ExpressionStackAt(1);
4703 
4704  // Check that we still have more keys.
4705  HCompareNumericAndBranch* compare_index =
4706  New<HCompareNumericAndBranch>(index, limit, Token::LT);
4707  compare_index->set_observed_input_representation(
4709 
4710  HBasicBlock* loop_body = graph()->CreateBasicBlock();
4711  HBasicBlock* loop_successor = graph()->CreateBasicBlock();
4712 
4713  compare_index->SetSuccessorAt(0, loop_body);
4714  compare_index->SetSuccessorAt(1, loop_successor);
4715  FinishCurrentBlock(compare_index);
4716 
4717  set_current_block(loop_successor);
4718  Drop(5);
4719 
4720  set_current_block(loop_body);
4721 
4722  HValue* key = Add<HLoadKeyed>(
4723  environment()->ExpressionStackAt(2), // Enum cache.
4724  environment()->ExpressionStackAt(0), // Iteration index.
4725  environment()->ExpressionStackAt(0),
4726  FAST_ELEMENTS);
4727 
4728  // Check if the expected map still matches that of the enumerable.
4729  // If not just deoptimize.
4730  Add<HCheckMapValue>(environment()->ExpressionStackAt(4),
4731  environment()->ExpressionStackAt(3));
4732 
4733  Bind(each_var, key);
4734 
4735  BreakAndContinueInfo break_info(stmt, 5);
4736  CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4737 
4738  HBasicBlock* body_exit =
4739  JoinContinue(stmt, current_block(), break_info.continue_block());
4740 
4741  if (body_exit != NULL) {
4742  set_current_block(body_exit);
4743 
4744  HValue* current_index = Pop();
4745  Push(AddUncasted<HAdd>(current_index, graph()->GetConstant1()));
4746  body_exit = current_block();
4747  }
4748 
4749  HBasicBlock* loop_exit = CreateLoop(stmt,
4750  loop_entry,
4751  body_exit,
4752  loop_successor,
4753  break_info.break_block());
4754 
4755  set_current_block(loop_exit);
4756 }
4757 
4758 
4759 void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) {
4760  ASSERT(!HasStackOverflow());
4761  ASSERT(current_block() != NULL);
4762  ASSERT(current_block()->HasPredecessor());
4763  return Bailout(kForOfStatement);
4764 }
4765 
4766 
4767 void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
4768  ASSERT(!HasStackOverflow());
4769  ASSERT(current_block() != NULL);
4770  ASSERT(current_block()->HasPredecessor());
4771  return Bailout(kTryCatchStatement);
4772 }
4773 
4774 
4775 void HOptimizedGraphBuilder::VisitTryFinallyStatement(
4776  TryFinallyStatement* stmt) {
4777  ASSERT(!HasStackOverflow());
4778  ASSERT(current_block() != NULL);
4779  ASSERT(current_block()->HasPredecessor());
4780  return Bailout(kTryFinallyStatement);
4781 }
4782 
4783 
4784 void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
4785  ASSERT(!HasStackOverflow());
4786  ASSERT(current_block() != NULL);
4787  ASSERT(current_block()->HasPredecessor());
4788  return Bailout(kDebuggerStatement);
4789 }
4790 
4791 
4792 void HOptimizedGraphBuilder::VisitCaseClause(CaseClause* clause) {
4793  UNREACHABLE();
4794 }
4795 
4796 
4797 void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
4798  ASSERT(!HasStackOverflow());
4799  ASSERT(current_block() != NULL);
4800  ASSERT(current_block()->HasPredecessor());
4801  Handle<SharedFunctionInfo> shared_info = expr->shared_info();
4802  if (shared_info.is_null()) {
4803  shared_info = Compiler::BuildFunctionInfo(expr, current_info()->script());
4804  }
4805  // We also have a stack overflow if the recursive compilation did.
4806  if (HasStackOverflow()) return;
4807  HFunctionLiteral* instr =
4808  New<HFunctionLiteral>(shared_info, expr->pretenure());
4809  return ast_context()->ReturnInstruction(instr, expr->id());
4810 }
4811 
4812 
4813 void HOptimizedGraphBuilder::VisitNativeFunctionLiteral(
4814  NativeFunctionLiteral* expr) {
4815  ASSERT(!HasStackOverflow());
4816  ASSERT(current_block() != NULL);
4817  ASSERT(current_block()->HasPredecessor());
4818  return Bailout(kNativeFunctionLiteral);
4819 }
4820 
4821 
4822 void HOptimizedGraphBuilder::VisitConditional(Conditional* expr) {
4823  ASSERT(!HasStackOverflow());
4824  ASSERT(current_block() != NULL);
4825  ASSERT(current_block()->HasPredecessor());
4826  HBasicBlock* cond_true = graph()->CreateBasicBlock();
4827  HBasicBlock* cond_false = graph()->CreateBasicBlock();
4828  CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false));
4829 
4830  // Visit the true and false subexpressions in the same AST context as the
4831  // whole expression.
4832  if (cond_true->HasPredecessor()) {
4833  cond_true->SetJoinId(expr->ThenId());
4834  set_current_block(cond_true);
4835  CHECK_BAILOUT(Visit(expr->then_expression()));
4836  cond_true = current_block();
4837  } else {
4838  cond_true = NULL;
4839  }
4840 
4841  if (cond_false->HasPredecessor()) {
4842  cond_false->SetJoinId(expr->ElseId());
4843  set_current_block(cond_false);
4844  CHECK_BAILOUT(Visit(expr->else_expression()));
4845  cond_false = current_block();
4846  } else {
4847  cond_false = NULL;
4848  }
4849 
4850  if (!ast_context()->IsTest()) {
4851  HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id());
4852  set_current_block(join);
4853  if (join != NULL && !ast_context()->IsEffect()) {
4854  return ast_context()->ReturnValue(Pop());
4855  }
4856  }
4857 }
4858 
4859 
4860 HOptimizedGraphBuilder::GlobalPropertyAccess
4861  HOptimizedGraphBuilder::LookupGlobalProperty(
4862  Variable* var, LookupResult* lookup, PropertyAccessType access_type) {
4863  if (var->is_this() || !current_info()->has_global_object()) {
4864  return kUseGeneric;
4865  }
4866  Handle<GlobalObject> global(current_info()->global_object());
4867  global->Lookup(*var->name(), lookup);
4868  if (!lookup->IsNormal() ||
4869  (access_type == STORE && lookup->IsReadOnly()) ||
4870  lookup->holder() != *global) {
4871  return kUseGeneric;
4872  }
4873 
4874  return kUseCell;
4875 }
4876 
4877 
4878 HValue* HOptimizedGraphBuilder::BuildContextChainWalk(Variable* var) {
4879  ASSERT(var->IsContextSlot());
4880  HValue* context = environment()->context();
4881  int length = current_info()->scope()->ContextChainLength(var->scope());
4882  while (length-- > 0) {
4883  context = Add<HLoadNamedField>(
4884  context, static_cast<HValue*>(NULL),
4885  HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4886  }
4887  return context;
4888 }
4889 
4890 
4891 void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
4892  if (expr->is_this()) {
4894  }
4895 
4896  ASSERT(!HasStackOverflow());
4897  ASSERT(current_block() != NULL);
4898  ASSERT(current_block()->HasPredecessor());
4899  Variable* variable = expr->var();
4900  switch (variable->location()) {
4901  case Variable::UNALLOCATED: {
4902  if (IsLexicalVariableMode(variable->mode())) {
4903  // TODO(rossberg): should this be an ASSERT?
4904  return Bailout(kReferenceToGlobalLexicalVariable);
4905  }
4906  // Handle known global constants like 'undefined' specially to avoid a
4907  // load from a global cell for them.
4908  Handle<Object> constant_value =
4909  isolate()->factory()->GlobalConstantFor(variable->name());
4910  if (!constant_value.is_null()) {
4911  HConstant* instr = New<HConstant>(constant_value);
4912  return ast_context()->ReturnInstruction(instr, expr->id());
4913  }
4914 
4915  LookupResult lookup(isolate());
4916  GlobalPropertyAccess type = LookupGlobalProperty(variable, &lookup, LOAD);
4917 
4918  if (type == kUseCell &&
4919  current_info()->global_object()->IsAccessCheckNeeded()) {
4920  type = kUseGeneric;
4921  }
4922 
4923  if (type == kUseCell) {
4924  Handle<GlobalObject> global(current_info()->global_object());
4925  Handle<PropertyCell> cell(global->GetPropertyCell(&lookup));
4926  if (cell->type()->IsConstant()) {
4927  cell->AddDependentCompilationInfo(top_info());
4928  Handle<Object> constant_object = cell->type()->AsConstant();
4929  if (constant_object->IsConsString()) {
4930  constant_object =
4931  FlattenGetString(Handle<String>::cast(constant_object));
4932  }
4933  HConstant* constant = New<HConstant>(constant_object);
4934  return ast_context()->ReturnInstruction(constant, expr->id());
4935  } else {
4936  HLoadGlobalCell* instr =
4937  New<HLoadGlobalCell>(cell, lookup.GetPropertyDetails());
4938  return ast_context()->ReturnInstruction(instr, expr->id());
4939  }
4940  } else {
4941  HValue* global_object = Add<HLoadNamedField>(
4942  context(), static_cast<HValue*>(NULL),
4943  HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
4944  HLoadGlobalGeneric* instr =
4945  New<HLoadGlobalGeneric>(global_object,
4946  variable->name(),
4947  ast_context()->is_for_typeof());
4948  return ast_context()->ReturnInstruction(instr, expr->id());
4949  }
4950  }
4951 
4952  case Variable::PARAMETER:
4953  case Variable::LOCAL: {
4954  HValue* value = LookupAndMakeLive(variable);
4955  if (value == graph()->GetConstantHole()) {
4956  ASSERT(IsDeclaredVariableMode(variable->mode()) &&
4957  variable->mode() != VAR);
4958  return Bailout(kReferenceToUninitializedVariable);
4959  }
4960  return ast_context()->ReturnValue(value);
4961  }
4962 
4963  case Variable::CONTEXT: {
4964  HValue* context = BuildContextChainWalk(variable);
4965  HLoadContextSlot* instr = new(zone()) HLoadContextSlot(context, variable);
4966  return ast_context()->ReturnInstruction(instr, expr->id());
4967  }
4968 
4969  case Variable::LOOKUP:
4970  return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup);
4971  }
4972 }
4973 
4974 
4975 void HOptimizedGraphBuilder::VisitLiteral(Literal* expr) {
4976  ASSERT(!HasStackOverflow());
4977  ASSERT(current_block() != NULL);
4978  ASSERT(current_block()->HasPredecessor());
4979  HConstant* instr = New<HConstant>(expr->value());
4980  return ast_context()->ReturnInstruction(instr, expr->id());
4981 }
4982 
4983 
4984 void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
4985  ASSERT(!HasStackOverflow());
4986  ASSERT(current_block() != NULL);
4987  ASSERT(current_block()->HasPredecessor());
4988  Handle<JSFunction> closure = function_state()->compilation_info()->closure();
4989  Handle<FixedArray> literals(closure->literals());
4990  HRegExpLiteral* instr = New<HRegExpLiteral>(literals,
4991  expr->pattern(),
4992  expr->flags(),
4993  expr->literal_index());
4994  return ast_context()->ReturnInstruction(instr, expr->id());
4995 }
4996 
4997 
4998 static bool CanInlinePropertyAccess(Type* type) {
4999  if (type->Is(Type::NumberOrString())) return true;
5000  if (!type->IsClass()) return false;
5001  Handle<Map> map = type->AsClass();
5002  return map->IsJSObjectMap() &&
5003  !map->is_dictionary_map() &&
5004  !map->has_named_interceptor();
5005 }
5006 
5007 
5008 // Determines whether the given array or object literal boilerplate satisfies
5009 // all limits to be considered for fast deep-copying and computes the total
5010 // size of all objects that are part of the graph.
5011 static bool IsFastLiteral(Handle<JSObject> boilerplate,
5012  int max_depth,
5013  int* max_properties) {
5014  if (boilerplate->map()->is_deprecated()) {
5015  Handle<Object> result = JSObject::TryMigrateInstance(boilerplate);
5016  if (result.is_null()) return false;
5017  }
5018 
5019  ASSERT(max_depth >= 0 && *max_properties >= 0);
5020  if (max_depth == 0) return false;
5021 
5022  Isolate* isolate = boilerplate->GetIsolate();
5023  Handle<FixedArrayBase> elements(boilerplate->elements());
5024  if (elements->length() > 0 &&
5025  elements->map() != isolate->heap()->fixed_cow_array_map()) {
5026  if (boilerplate->HasFastObjectElements()) {
5027  Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
5028  int length = elements->length();
5029  for (int i = 0; i < length; i++) {
5030  if ((*max_properties)-- == 0) return false;
5031  Handle<Object> value(fast_elements->get(i), isolate);
5032  if (value->IsJSObject()) {
5033  Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5034  if (!IsFastLiteral(value_object,
5035  max_depth - 1,
5036  max_properties)) {
5037  return false;
5038  }
5039  }
5040  }
5041  } else if (!boilerplate->HasFastDoubleElements()) {
5042  return false;
5043  }
5044  }
5045 
5046  Handle<FixedArray> properties(boilerplate->properties());
5047  if (properties->length() > 0) {
5048  return false;
5049  } else {
5050  Handle<DescriptorArray> descriptors(
5051  boilerplate->map()->instance_descriptors());
5052  int limit = boilerplate->map()->NumberOfOwnDescriptors();
5053  for (int i = 0; i < limit; i++) {
5054  PropertyDetails details = descriptors->GetDetails(i);
5055  if (details.type() != FIELD) continue;
5056  int index = descriptors->GetFieldIndex(i);
5057  if ((*max_properties)-- == 0) return false;
5058  Handle<Object> value(boilerplate->InObjectPropertyAt(index), isolate);
5059  if (value->IsJSObject()) {
5060  Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5061  if (!IsFastLiteral(value_object,
5062  max_depth - 1,
5063  max_properties)) {
5064  return false;
5065  }
5066  }
5067  }
5068  }
5069  return true;
5070 }
5071 
5072 
5073 void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
5074  ASSERT(!HasStackOverflow());
5075  ASSERT(current_block() != NULL);
5076  ASSERT(current_block()->HasPredecessor());
5077  expr->BuildConstantProperties(isolate());
5078  Handle<JSFunction> closure = function_state()->compilation_info()->closure();
5079  HInstruction* literal;
5080 
5081  // Check whether to use fast or slow deep-copying for boilerplate.
5082  int max_properties = kMaxFastLiteralProperties;
5083  Handle<Object> literals_cell(closure->literals()->get(expr->literal_index()),
5084  isolate());
5085  Handle<AllocationSite> site;
5086  Handle<JSObject> boilerplate;
5087  if (!literals_cell->IsUndefined()) {
5088  // Retrieve the boilerplate
5089  site = Handle<AllocationSite>::cast(literals_cell);
5090  boilerplate = Handle<JSObject>(JSObject::cast(site->transition_info()),
5091  isolate());
5092  }
5093 
5094  if (!boilerplate.is_null() &&
5095  IsFastLiteral(boilerplate, kMaxFastLiteralDepth, &max_properties)) {
5096  AllocationSiteUsageContext usage_context(isolate(), site, false);
5097  usage_context.EnterNewScope();
5098  literal = BuildFastLiteral(boilerplate, &usage_context);
5099  usage_context.ExitScope(site, boilerplate);
5100  } else {
5101  NoObservableSideEffectsScope no_effects(this);
5102  Handle<FixedArray> closure_literals(closure->literals(), isolate());
5103  Handle<FixedArray> constant_properties = expr->constant_properties();
5104  int literal_index = expr->literal_index();
5105  int flags = expr->fast_elements()
5106  ? ObjectLiteral::kFastElements : ObjectLiteral::kNoFlags;
5107  flags |= expr->has_function()
5108  ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags;
5109 
5110  Add<HPushArgument>(Add<HConstant>(closure_literals));
5111  Add<HPushArgument>(Add<HConstant>(literal_index));
5112  Add<HPushArgument>(Add<HConstant>(constant_properties));
5113  Add<HPushArgument>(Add<HConstant>(flags));
5114 
5115  // TODO(mvstanton): Add a flag to turn off creation of any
5116  // AllocationMementos for this call: we are in crankshaft and should have
5117  // learned enough about transition behavior to stop emitting mementos.
5118  Runtime::FunctionId function_id = Runtime::kHiddenCreateObjectLiteral;
5119  literal = Add<HCallRuntime>(isolate()->factory()->empty_string(),
5120  Runtime::FunctionForId(function_id),
5121  4);
5122  }
5123 
5124  // The object is expected in the bailout environment during computation
5125  // of the property values and is the value of the entire expression.
5126  Push(literal);
5127 
5128  expr->CalculateEmitStore(zone());
5129 
5130  for (int i = 0; i < expr->properties()->length(); i++) {
5131  ObjectLiteral::Property* property = expr->properties()->at(i);
5132  if (property->IsCompileTimeValue()) continue;
5133 
5134  Literal* key = property->key();
5135  Expression* value = property->value();
5136 
5137  switch (property->kind()) {
5138  case ObjectLiteral::Property::MATERIALIZED_LITERAL:
5140  // Fall through.
5141  case ObjectLiteral::Property::COMPUTED:
5142  if (key->value()->IsInternalizedString()) {
5143  if (property->emit_store()) {
5144  CHECK_ALIVE(VisitForValue(value));
5145  HValue* value = Pop();
5146  Handle<Map> map = property->GetReceiverType();
5147  Handle<String> name = property->key()->AsPropertyName();
5148  HInstruction* store;
5149  if (map.is_null()) {
5150  // If we don't know the monomorphic type, do a generic store.
5151  CHECK_ALIVE(store = BuildNamedGeneric(
5152  STORE, literal, name, value));
5153  } else {
5154  PropertyAccessInfo info(this, STORE, ToType(map), name);
5155  if (info.CanAccessMonomorphic()) {
5156  HValue* checked_literal = BuildCheckMap(literal, map);
5157  ASSERT(!info.lookup()->IsPropertyCallbacks());
5158  store = BuildMonomorphicAccess(
5159  &info, literal, checked_literal, value,
5161  } else {
5162  CHECK_ALIVE(store = BuildNamedGeneric(
5163  STORE, literal, name, value));
5164  }
5165  }
5166  AddInstruction(store);
5167  if (store->HasObservableSideEffects()) {
5168  Add<HSimulate>(key->id(), REMOVABLE_SIMULATE);
5169  }
5170  } else {
5171  CHECK_ALIVE(VisitForEffect(value));
5172  }
5173  break;
5174  }
5175  // Fall through.
5176  case ObjectLiteral::Property::PROTOTYPE:
5177  case ObjectLiteral::Property::SETTER:
5178  case ObjectLiteral::Property::GETTER:
5179  return Bailout(kObjectLiteralWithComplexProperty);
5180  default: UNREACHABLE();
5181  }
5182  }
5183 
5184  if (expr->has_function()) {
5185  // Return the result of the transformation to fast properties
5186  // instead of the original since this operation changes the map
5187  // of the object. This makes sure that the original object won't
5188  // be used by other optimized code before it is transformed
5189  // (e.g. because of code motion).
5190  HToFastProperties* result = Add<HToFastProperties>(Pop());
5191  return ast_context()->ReturnValue(result);
5192  } else {
5193  return ast_context()->ReturnValue(Pop());
5194  }
5195 }
5196 
5197 
5198 void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
5199  ASSERT(!HasStackOverflow());
5200  ASSERT(current_block() != NULL);
5201  ASSERT(current_block()->HasPredecessor());
5202  expr->BuildConstantElements(isolate());
5203  ZoneList<Expression*>* subexprs = expr->values();
5204  int length = subexprs->length();
5205  HInstruction* literal;
5206 
5207  Handle<AllocationSite> site;
5208  Handle<FixedArray> literals(environment()->closure()->literals(), isolate());
5209  bool uninitialized = false;
5210  Handle<Object> literals_cell(literals->get(expr->literal_index()),
5211  isolate());
5212  Handle<JSObject> boilerplate_object;
5213  if (literals_cell->IsUndefined()) {
5214  uninitialized = true;
5215  Handle<Object> raw_boilerplate = Runtime::CreateArrayLiteralBoilerplate(
5216  isolate(), literals, expr->constant_elements());
5217  if (raw_boilerplate.is_null()) {
5218  return Bailout(kArrayBoilerplateCreationFailed);
5219  }
5220 
5221  boilerplate_object = Handle<JSObject>::cast(raw_boilerplate);
5222  AllocationSiteCreationContext creation_context(isolate());
5223  site = creation_context.EnterNewScope();
5224  if (JSObject::DeepWalk(boilerplate_object, &creation_context).is_null()) {
5225  return Bailout(kArrayBoilerplateCreationFailed);
5226  }
5227  creation_context.ExitScope(site, boilerplate_object);
5228  literals->set(expr->literal_index(), *site);
5229 
5230  if (boilerplate_object->elements()->map() ==
5231  isolate()->heap()->fixed_cow_array_map()) {
5232  isolate()->counters()->cow_arrays_created_runtime()->Increment();
5233  }
5234  } else {
5235  ASSERT(literals_cell->IsAllocationSite());
5236  site = Handle<AllocationSite>::cast(literals_cell);
5237  boilerplate_object = Handle<JSObject>(
5238  JSObject::cast(site->transition_info()), isolate());
5239  }
5240 
5241  ASSERT(!boilerplate_object.is_null());
5242  ASSERT(site->SitePointsToLiteral());
5243 
5244  ElementsKind boilerplate_elements_kind =
5245  boilerplate_object->GetElementsKind();
5246 
5247  // Check whether to use fast or slow deep-copying for boilerplate.
5248  int max_properties = kMaxFastLiteralProperties;
5249  if (IsFastLiteral(boilerplate_object,
5251  &max_properties)) {
5252  AllocationSiteUsageContext usage_context(isolate(), site, false);
5253  usage_context.EnterNewScope();
5254  literal = BuildFastLiteral(boilerplate_object, &usage_context);
5255  usage_context.ExitScope(site, boilerplate_object);
5256  } else {
5257  NoObservableSideEffectsScope no_effects(this);
5258  // Boilerplate already exists and constant elements are never accessed,
5259  // pass an empty fixed array to the runtime function instead.
5260  Handle<FixedArray> constants = isolate()->factory()->empty_fixed_array();
5261  int literal_index = expr->literal_index();
5262  int flags = expr->depth() == 1
5263  ? ArrayLiteral::kShallowElements
5264  : ArrayLiteral::kNoFlags;
5265  flags |= ArrayLiteral::kDisableMementos;
5266 
5267  Add<HPushArgument>(Add<HConstant>(literals));
5268  Add<HPushArgument>(Add<HConstant>(literal_index));
5269  Add<HPushArgument>(Add<HConstant>(constants));
5270  Add<HPushArgument>(Add<HConstant>(flags));
5271 
5272  // TODO(mvstanton): Consider a flag to turn off creation of any
5273  // AllocationMementos for this call: we are in crankshaft and should have
5274  // learned enough about transition behavior to stop emitting mementos.
5275  Runtime::FunctionId function_id = Runtime::kHiddenCreateArrayLiteral;
5276  literal = Add<HCallRuntime>(isolate()->factory()->empty_string(),
5277  Runtime::FunctionForId(function_id),
5278  4);
5279 
5280  // De-opt if elements kind changed from boilerplate_elements_kind.
5281  Handle<Map> map = Handle<Map>(boilerplate_object->map(), isolate());
5282  literal = Add<HCheckMaps>(literal, map, top_info());
5283  }
5284 
5285  // The array is expected in the bailout environment during computation
5286  // of the property values and is the value of the entire expression.
5287  Push(literal);
5288  // The literal index is on the stack, too.
5289  Push(Add<HConstant>(expr->literal_index()));
5290 
5291  HInstruction* elements = NULL;
5292 
5293  for (int i = 0; i < length; i++) {
5294  Expression* subexpr = subexprs->at(i);
5295  // If the subexpression is a literal or a simple materialized literal it
5296  // is already set in the cloned array.
5297  if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
5298 
5299  CHECK_ALIVE(VisitForValue(subexpr));
5300  HValue* value = Pop();
5301  if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral);
5302 
5303  elements = AddLoadElements(literal);
5304 
5305  HValue* key = Add<HConstant>(i);
5306 
5307  switch (boilerplate_elements_kind) {
5308  case FAST_SMI_ELEMENTS:
5310  case FAST_ELEMENTS:
5311  case FAST_HOLEY_ELEMENTS:
5312  case FAST_DOUBLE_ELEMENTS:
5314  HStoreKeyed* instr = Add<HStoreKeyed>(elements, key, value,
5315  boilerplate_elements_kind);
5316  instr->SetUninitialized(uninitialized);
5317  break;
5318  }
5319  default:
5320  UNREACHABLE();
5321  break;
5322  }
5323 
5324  Add<HSimulate>(expr->GetIdForElement(i));
5325  }
5326 
5327  Drop(1); // array literal index
5328  return ast_context()->ReturnValue(Pop());
5329 }
5330 
5331 
5332 HCheckMaps* HOptimizedGraphBuilder::AddCheckMap(HValue* object,
5333  Handle<Map> map) {
5334  BuildCheckHeapObject(object);
5335  return Add<HCheckMaps>(object, map, top_info());
5336 }
5337 
5338 
5339 HInstruction* HOptimizedGraphBuilder::BuildLoadNamedField(
5340  PropertyAccessInfo* info,
5341  HValue* checked_object) {
5342  HObjectAccess access = info->access();
5343  if (access.representation().IsDouble()) {
5344  // Load the heap number.
5345  checked_object = Add<HLoadNamedField>(
5346  checked_object, static_cast<HValue*>(NULL),
5347  access.WithRepresentation(Representation::Tagged()));
5348  checked_object->set_type(HType::HeapNumber());
5349  // Load the double value from it.
5350  access = HObjectAccess::ForHeapNumberValue();
5351  }
5352  return New<HLoadNamedField>(
5353  checked_object, static_cast<HValue*>(NULL), access);
5354 }
5355 
5356 
5357 HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
5358  PropertyAccessInfo* info,
5359  HValue* checked_object,
5360  HValue* value) {
5361  bool transition_to_field = info->lookup()->IsTransition();
5362  // TODO(verwaest): Move this logic into PropertyAccessInfo.
5363  HObjectAccess field_access = HObjectAccess::ForField(
5364  info->map(), info->lookup(), info->name());
5365 
5366  HStoreNamedField *instr;
5367  if (field_access.representation().IsDouble()) {
5368  HObjectAccess heap_number_access =
5369  field_access.WithRepresentation(Representation::Tagged());
5370  if (transition_to_field) {
5371  // The store requires a mutable HeapNumber to be allocated.
5372  NoObservableSideEffectsScope no_side_effects(this);
5373  HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
5374 
5375  PretenureFlag pretenure_flag = !FLAG_allocation_site_pretenuring ?
5377 
5378  HInstruction* heap_number = Add<HAllocate>(heap_number_size,
5379  HType::HeapNumber(),
5380  pretenure_flag,
5382  AddStoreMapConstant(heap_number, isolate()->factory()->heap_number_map());
5383  Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
5384  value);
5385  instr = New<HStoreNamedField>(checked_object->ActualValue(),
5386  heap_number_access,
5387  heap_number);
5388  } else {
5389  // Already holds a HeapNumber; load the box and write its value field.
5390  HInstruction* heap_number = Add<HLoadNamedField>(
5391  checked_object, static_cast<HValue*>(NULL), heap_number_access);
5392  heap_number->set_type(HType::HeapNumber());
5393  instr = New<HStoreNamedField>(heap_number,
5394  HObjectAccess::ForHeapNumberValue(),
5396  }
5397  } else {
5398  // This is a normal store.
5399  instr = New<HStoreNamedField>(
5400  checked_object->ActualValue(), field_access, value,
5401  transition_to_field ? INITIALIZING_STORE : STORE_TO_INITIALIZED_ENTRY);
5402  }
5403 
5404  if (transition_to_field) {
5405  HConstant* transition_constant = Add<HConstant>(info->transition());
5406  instr->SetTransition(transition_constant, top_info());
5407  instr->SetChangesFlag(kMaps);
5408  }
5409  return instr;
5410 }
5411 
5412 
5413 bool HOptimizedGraphBuilder::PropertyAccessInfo::IsCompatible(
5414  PropertyAccessInfo* info) {
5415  if (!CanInlinePropertyAccess(type_)) return false;
5416 
5417  // Currently only handle Type::Number as a polymorphic case.
5418  // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
5419  // instruction.
5420  if (type_->Is(Type::Number())) return false;
5421 
5422  // Values are only compatible for monomorphic load if they all behave the same
5423  // regarding value wrappers.
5424  if (type_->Is(Type::NumberOrString())) {
5425  if (!info->type_->Is(Type::NumberOrString())) return false;
5426  } else {
5427  if (info->type_->Is(Type::NumberOrString())) return false;
5428  }
5429 
5430  if (!LookupDescriptor()) return false;
5431 
5432  if (!lookup_.IsFound()) {
5433  return (!info->lookup_.IsFound() || info->has_holder()) &&
5434  map()->prototype() == info->map()->prototype();
5435  }
5436 
5437  // Mismatch if the other access info found the property in the prototype
5438  // chain.
5439  if (info->has_holder()) return false;
5440 
5441  if (lookup_.IsPropertyCallbacks()) {
5442  return accessor_.is_identical_to(info->accessor_) &&
5443  api_holder_.is_identical_to(info->api_holder_);
5444  }
5445 
5446  if (lookup_.IsConstant()) {
5447  return constant_.is_identical_to(info->constant_);
5448  }
5449 
5450  ASSERT(lookup_.IsField());
5451  if (!info->lookup_.IsField()) return false;
5452 
5453  Representation r = access_.representation();
5454  if (IsLoad()) {
5455  if (!info->access_.representation().IsCompatibleForLoad(r)) return false;
5456  } else {
5457  if (!info->access_.representation().IsCompatibleForStore(r)) return false;
5458  }
5459  if (info->access_.offset() != access_.offset()) return false;
5460  if (info->access_.IsInobject() != access_.IsInobject()) return false;
5461  info->GeneralizeRepresentation(r);
5462  return true;
5463 }
5464 
5465 
5466 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupDescriptor() {
5467  if (!type_->IsClass()) return true;
5468  map()->LookupDescriptor(NULL, *name_, &lookup_);
5469  return LoadResult(map());
5470 }
5471 
5472 
5473 bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadResult(Handle<Map> map) {
5474  if (!IsLoad() && lookup_.IsProperty() &&
5475  (lookup_.IsReadOnly() || !lookup_.IsCacheable())) {
5476  return false;
5477  }
5478 
5479  if (lookup_.IsField()) {
5480  access_ = HObjectAccess::ForField(map, &lookup_, name_);
5481  } else if (lookup_.IsPropertyCallbacks()) {
5482  Handle<Object> callback(lookup_.GetValueFromMap(*map), isolate());
5483  if (!callback->IsAccessorPair()) return false;
5484  Object* raw_accessor = IsLoad()
5485  ? Handle<AccessorPair>::cast(callback)->getter()
5486  : Handle<AccessorPair>::cast(callback)->setter();
5487  if (!raw_accessor->IsJSFunction()) return false;
5488  Handle<JSFunction> accessor = handle(JSFunction::cast(raw_accessor));
5489  if (accessor->shared()->IsApiFunction()) {
5490  CallOptimization call_optimization(accessor);
5491  if (!call_optimization.is_simple_api_call()) return false;
5492  CallOptimization::HolderLookup holder_lookup;
5493  api_holder_ = call_optimization.LookupHolderOfExpectedType(
5494  map, &holder_lookup);
5495  switch (holder_lookup) {
5496  case CallOptimization::kHolderNotFound:
5497  return false;
5498  case CallOptimization::kHolderIsReceiver:
5499  case CallOptimization::kHolderFound:
5500  break;
5501  }
5502  }
5503  accessor_ = accessor;
5504  } else if (lookup_.IsConstant()) {
5505  constant_ = handle(lookup_.GetConstantFromMap(*map), isolate());
5506  }
5507 
5508  return true;
5509 }
5510 
5511 
5512 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupInPrototypes() {
5513  Handle<Map> map = this->map();
5514 
5515  while (map->prototype()->IsJSObject()) {
5516  holder_ = handle(JSObject::cast(map->prototype()));
5517  if (holder_->map()->is_deprecated()) {
5519  }
5520  map = Handle<Map>(holder_->map());
5521  if (!CanInlinePropertyAccess(ToType(map))) {
5522  lookup_.NotFound();
5523  return false;
5524  }
5525  map->LookupDescriptor(*holder_, *name_, &lookup_);
5526  if (lookup_.IsFound()) return LoadResult(map);
5527  }
5528  lookup_.NotFound();
5529  return true;
5530 }
5531 
5532 
5533 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessMonomorphic() {
5534  if (!CanInlinePropertyAccess(type_)) return false;
5535  if (IsJSObjectFieldAccessor()) return IsLoad();
5536  if (!LookupDescriptor()) return false;
5537  if (lookup_.IsFound()) {
5538  if (IsLoad()) return true;
5539  return !lookup_.IsReadOnly() && lookup_.IsCacheable();
5540  }
5541  if (!LookupInPrototypes()) return false;
5542  if (IsLoad()) return true;
5543 
5544  if (lookup_.IsPropertyCallbacks()) return true;
5545  Handle<Map> map = this->map();
5546  map->LookupTransition(NULL, *name_, &lookup_);
5547  if (lookup_.IsTransitionToField() && map->unused_property_fields() > 0) {
5548  return true;
5549  }
5550  return false;
5551 }
5552 
5553 
5554 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessAsMonomorphic(
5555  SmallMapList* types) {
5556  ASSERT(type_->Is(ToType(types->first())));
5557  if (!CanAccessMonomorphic()) return false;
5558  STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
5559  if (types->length() > kMaxLoadPolymorphism) return false;
5560 
5561  HObjectAccess access = HObjectAccess::ForMap(); // bogus default
5562  if (GetJSObjectFieldAccess(&access)) {
5563  for (int i = 1; i < types->length(); ++i) {
5564  PropertyAccessInfo test_info(
5565  builder_, access_type_, ToType(types->at(i)), name_);
5566  HObjectAccess test_access = HObjectAccess::ForMap(); // bogus default
5567  if (!test_info.GetJSObjectFieldAccess(&test_access)) return false;
5568  if (!access.Equals(test_access)) return false;
5569  }
5570  return true;
5571  }
5572 
5573  // Currently only handle Type::Number as a polymorphic case.
5574  // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
5575  // instruction.
5576  if (type_->Is(Type::Number())) return false;
5577 
5578  // Multiple maps cannot transition to the same target map.
5579  ASSERT(!IsLoad() || !lookup_.IsTransition());
5580  if (lookup_.IsTransition() && types->length() > 1) return false;
5581 
5582  for (int i = 1; i < types->length(); ++i) {
5583  PropertyAccessInfo test_info(
5584  builder_, access_type_, ToType(types->at(i)), name_);
5585  if (!test_info.IsCompatible(this)) return false;
5586  }
5587 
5588  return true;
5589 }
5590 
5591 
5592 static bool NeedsWrappingFor(Type* type, Handle<JSFunction> target) {
5593  return type->Is(Type::NumberOrString()) &&
5594  target->shared()->strict_mode() == SLOPPY &&
5595  !target->shared()->native();
5596 }
5597 
5598 
5599 HInstruction* HOptimizedGraphBuilder::BuildMonomorphicAccess(
5600  PropertyAccessInfo* info,
5601  HValue* object,
5602  HValue* checked_object,
5603  HValue* value,
5604  BailoutId ast_id,
5605  BailoutId return_id,
5606  bool can_inline_accessor) {
5607 
5608  HObjectAccess access = HObjectAccess::ForMap(); // bogus default
5609  if (info->GetJSObjectFieldAccess(&access)) {
5610  ASSERT(info->IsLoad());
5611  return New<HLoadNamedField>(object, checked_object, access);
5612  }
5613 
5614  HValue* checked_holder = checked_object;
5615  if (info->has_holder()) {
5616  Handle<JSObject> prototype(JSObject::cast(info->map()->prototype()));
5617  checked_holder = BuildCheckPrototypeMaps(prototype, info->holder());
5618  }
5619 
5620  if (!info->lookup()->IsFound()) {
5621  ASSERT(info->IsLoad());
5622  return graph()->GetConstantUndefined();
5623  }
5624 
5625  if (info->lookup()->IsField()) {
5626  if (info->IsLoad()) {
5627  return BuildLoadNamedField(info, checked_holder);
5628  } else {
5629  return BuildStoreNamedField(info, checked_object, value);
5630  }
5631  }
5632 
5633  if (info->lookup()->IsTransition()) {
5634  ASSERT(!info->IsLoad());
5635  return BuildStoreNamedField(info, checked_object, value);
5636  }
5637 
5638  if (info->lookup()->IsPropertyCallbacks()) {
5639  Push(checked_object);
5640  int argument_count = 1;
5641  if (!info->IsLoad()) {
5642  argument_count = 2;
5643  Push(value);
5644  }
5645 
5646  if (NeedsWrappingFor(info->type(), info->accessor())) {
5647  HValue* function = Add<HConstant>(info->accessor());
5648  PushArgumentsFromEnvironment(argument_count);
5649  return New<HCallFunction>(function, argument_count, WRAP_AND_CALL);
5650  } else if (FLAG_inline_accessors && can_inline_accessor) {
5651  bool success = info->IsLoad()
5652  ? TryInlineGetter(info->accessor(), info->map(), ast_id, return_id)
5653  : TryInlineSetter(
5654  info->accessor(), info->map(), ast_id, return_id, value);
5655  if (success) return NULL;
5656  }
5657 
5658  PushArgumentsFromEnvironment(argument_count);
5659  return BuildCallConstantFunction(info->accessor(), argument_count);
5660  }
5661 
5662  ASSERT(info->lookup()->IsConstant());
5663  if (info->IsLoad()) {
5664  return New<HConstant>(info->constant());
5665  } else {
5666  return New<HCheckValue>(value, Handle<JSFunction>::cast(info->constant()));
5667  }
5668 }
5669 
5670 
5671 void HOptimizedGraphBuilder::HandlePolymorphicNamedFieldAccess(
5672  PropertyAccessType access_type,
5673  BailoutId ast_id,
5674  BailoutId return_id,
5675  HValue* object,
5676  HValue* value,
5677  SmallMapList* types,
5678  Handle<String> name) {
5679  // Something did not match; must use a polymorphic load.
5680  int count = 0;
5681  HBasicBlock* join = NULL;
5682  HBasicBlock* number_block = NULL;
5683  bool handled_string = false;
5684 
5685  bool handle_smi = false;
5687  for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) {
5688  PropertyAccessInfo info(this, access_type, ToType(types->at(i)), name);
5689  if (info.type()->Is(Type::String())) {
5690  if (handled_string) continue;
5691  handled_string = true;
5692  }
5693  if (info.CanAccessMonomorphic()) {
5694  count++;
5695  if (info.type()->Is(Type::Number())) {
5696  handle_smi = true;
5697  break;
5698  }
5699  }
5700  }
5701 
5702  count = 0;
5703  HControlInstruction* smi_check = NULL;
5704  handled_string = false;
5705 
5706  for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) {
5707  PropertyAccessInfo info(this, access_type, ToType(types->at(i)), name);
5708  if (info.type()->Is(Type::String())) {
5709  if (handled_string) continue;
5710  handled_string = true;
5711  }
5712  if (!info.CanAccessMonomorphic()) continue;
5713 
5714  if (count == 0) {
5715  join = graph()->CreateBasicBlock();
5716  if (handle_smi) {
5717  HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
5718  HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
5719  number_block = graph()->CreateBasicBlock();
5720  smi_check = New<HIsSmiAndBranch>(
5721  object, empty_smi_block, not_smi_block);
5722  FinishCurrentBlock(smi_check);
5723  GotoNoSimulate(empty_smi_block, number_block);
5724  set_current_block(not_smi_block);
5725  } else {
5726  BuildCheckHeapObject(object);
5727  }
5728  }
5729  ++count;
5730  HBasicBlock* if_true = graph()->CreateBasicBlock();
5731  HBasicBlock* if_false = graph()->CreateBasicBlock();
5732  HUnaryControlInstruction* compare;
5733 
5734  HValue* dependency;
5735  if (info.type()->Is(Type::Number())) {
5736  Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
5737  compare = New<HCompareMap>(object, heap_number_map, if_true, if_false);
5738  dependency = smi_check;
5739  } else if (info.type()->Is(Type::String())) {
5740  compare = New<HIsStringAndBranch>(object, if_true, if_false);
5741  dependency = compare;
5742  } else {
5743  compare = New<HCompareMap>(object, info.map(), if_true, if_false);
5744  dependency = compare;
5745  }
5746  FinishCurrentBlock(compare);
5747 
5748  if (info.type()->Is(Type::Number())) {
5749  GotoNoSimulate(if_true, number_block);
5750  if_true = number_block;
5751  }
5752 
5753  set_current_block(if_true);
5754 
5755  HInstruction* access = BuildMonomorphicAccess(
5756  &info, object, dependency, value, ast_id,
5757  return_id, FLAG_polymorphic_inlining);
5758 
5759  HValue* result = NULL;
5760  switch (access_type) {
5761  case LOAD:
5762  result = access;
5763  break;
5764  case STORE:
5765  result = value;
5766  break;
5767  }
5768 
5769  if (access == NULL) {
5770  if (HasStackOverflow()) return;
5771  } else {
5772  if (!access->IsLinked()) AddInstruction(access);
5773  if (!ast_context()->IsEffect()) Push(result);
5774  }
5775 
5776  if (current_block() != NULL) Goto(join);
5777  set_current_block(if_false);
5778  }
5779 
5780  // Finish up. Unconditionally deoptimize if we've handled all the maps we
5781  // know about and do not want to handle ones we've never seen. Otherwise
5782  // use a generic IC.
5783  if (count == types->length() && FLAG_deoptimize_uncommon_cases) {
5784  FinishExitWithHardDeoptimization("Uknown map in polymorphic access");
5785  } else {
5786  HInstruction* instr = BuildNamedGeneric(access_type, object, name, value);
5787  AddInstruction(instr);
5788  if (!ast_context()->IsEffect()) Push(access_type == LOAD ? instr : value);
5789 
5790  if (join != NULL) {
5791  Goto(join);
5792  } else {
5793  Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
5794  if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
5795  return;
5796  }
5797  }
5798 
5799  ASSERT(join != NULL);
5800  if (join->HasPredecessor()) {
5801  join->SetJoinId(ast_id);
5802  set_current_block(join);
5803  if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
5804  } else {
5806  }
5807 }
5808 
5809 
5810 static bool ComputeReceiverTypes(Expression* expr,
5811  HValue* receiver,
5812  SmallMapList** t,
5813  Zone* zone) {
5814  SmallMapList* types = expr->GetReceiverTypes();
5815  *t = types;
5816  bool monomorphic = expr->IsMonomorphic();
5817  if (types != NULL && receiver->HasMonomorphicJSObjectType()) {
5818  Map* root_map = receiver->GetMonomorphicJSObjectMap()->FindRootMap();
5819  types->FilterForPossibleTransitions(root_map);
5820  monomorphic = types->length() == 1;
5821  }
5822  return monomorphic && CanInlinePropertyAccess(
5823  IC::MapToType<Type>(types->first(), zone));
5824 }
5825 
5826 
5827 static bool AreStringTypes(SmallMapList* types) {
5828  for (int i = 0; i < types->length(); i++) {
5829  if (types->at(i)->instance_type() >= FIRST_NONSTRING_TYPE) return false;
5830  }
5831  return true;
5832 }
5833 
5834 
5835 void HOptimizedGraphBuilder::BuildStore(Expression* expr,
5836  Property* prop,
5837  BailoutId ast_id,
5838  BailoutId return_id,
5839  bool is_uninitialized) {
5840  if (!prop->key()->IsPropertyName()) {
5841  // Keyed store.
5842  HValue* value = environment()->ExpressionStackAt(0);
5843  HValue* key = environment()->ExpressionStackAt(1);
5844  HValue* object = environment()->ExpressionStackAt(2);
5845  bool has_side_effects = false;
5846  HandleKeyedElementAccess(object, key, value, expr,
5847  STORE, &has_side_effects);
5848  Drop(3);
5849  Push(value);
5850  Add<HSimulate>(return_id, REMOVABLE_SIMULATE);
5851  return ast_context()->ReturnValue(Pop());
5852  }
5853 
5854  // Named store.
5855  HValue* value = Pop();
5856  HValue* object = Pop();
5857 
5858  Literal* key = prop->key()->AsLiteral();
5859  Handle<String> name = Handle<String>::cast(key->value());
5860  ASSERT(!name.is_null());
5861 
5862  HInstruction* instr = BuildNamedAccess(STORE, ast_id, return_id, expr,
5863  object, name, value, is_uninitialized);
5864  if (instr == NULL) return;
5865 
5866  if (!ast_context()->IsEffect()) Push(value);
5867  AddInstruction(instr);
5868  if (instr->HasObservableSideEffects()) {
5869  Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
5870  }
5871  if (!ast_context()->IsEffect()) Drop(1);
5872  return ast_context()->ReturnValue(value);
5873 }
5874 
5875 
5876 void HOptimizedGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
5877  Property* prop = expr->target()->AsProperty();
5878  ASSERT(prop != NULL);
5879  CHECK_ALIVE(VisitForValue(prop->obj()));
5880  if (!prop->key()->IsPropertyName()) {
5881  CHECK_ALIVE(VisitForValue(prop->key()));
5882  }
5883  CHECK_ALIVE(VisitForValue(expr->value()));
5884  BuildStore(expr, prop, expr->id(),
5885  expr->AssignmentId(), expr->IsUninitialized());
5886 }
5887 
5888 
5889 // Because not every expression has a position and there is not common
5890 // superclass of Assignment and CountOperation, we cannot just pass the
5891 // owning expression instead of position and ast_id separately.
5892 void HOptimizedGraphBuilder::HandleGlobalVariableAssignment(
5893  Variable* var,
5894  HValue* value,
5895  BailoutId ast_id) {
5896  LookupResult lookup(isolate());
5897  GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, STORE);
5898  if (type == kUseCell) {
5899  Handle<GlobalObject> global(current_info()->global_object());
5900  Handle<PropertyCell> cell(global->GetPropertyCell(&lookup));
5901  if (cell->type()->IsConstant()) {
5902  Handle<Object> constant = cell->type()->AsConstant();
5903  if (value->IsConstant()) {
5904  HConstant* c_value = HConstant::cast(value);
5905  if (!constant.is_identical_to(c_value->handle(isolate()))) {
5906  Add<HDeoptimize>("Constant global variable assignment",
5908  }
5909  } else {
5910  HValue* c_constant = Add<HConstant>(constant);
5911  IfBuilder builder(this);
5912  if (constant->IsNumber()) {
5913  builder.If<HCompareNumericAndBranch>(value, c_constant, Token::EQ);
5914  } else {
5915  builder.If<HCompareObjectEqAndBranch>(value, c_constant);
5916  }
5917  builder.Then();
5918  builder.Else();
5919  Add<HDeoptimize>("Constant global variable assignment",
5921  builder.End();
5922  }
5923  }
5924  HInstruction* instr =
5925  Add<HStoreGlobalCell>(value, cell, lookup.GetPropertyDetails());
5926  if (instr->HasObservableSideEffects()) {
5927  Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
5928  }
5929  } else {
5930  HValue* global_object = Add<HLoadNamedField>(
5931  context(), static_cast<HValue*>(NULL),
5932  HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
5933  HStoreNamedGeneric* instr =
5934  Add<HStoreNamedGeneric>(global_object, var->name(),
5935  value, function_strict_mode());
5936  USE(instr);
5937  ASSERT(instr->HasObservableSideEffects());
5938  Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
5939  }
5940 }
5941 
5942 
5943 void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
5944  Expression* target = expr->target();
5945  VariableProxy* proxy = target->AsVariableProxy();
5946  Property* prop = target->AsProperty();
5947  ASSERT(proxy == NULL || prop == NULL);
5948 
5949  // We have a second position recorded in the FullCodeGenerator to have
5950  // type feedback for the binary operation.
5951  BinaryOperation* operation = expr->binary_operation();
5952 
5953  if (proxy != NULL) {
5954  Variable* var = proxy->var();
5955  if (var->mode() == LET) {
5956  return Bailout(kUnsupportedLetCompoundAssignment);
5957  }
5958 
5959  CHECK_ALIVE(VisitForValue(operation));
5960 
5961  switch (var->location()) {
5962  case Variable::UNALLOCATED:
5963  HandleGlobalVariableAssignment(var,
5964  Top(),
5965  expr->AssignmentId());
5966  break;
5967 
5968  case Variable::PARAMETER:
5969  case Variable::LOCAL:
5970  if (var->mode() == CONST_LEGACY) {
5971  return Bailout(kUnsupportedConstCompoundAssignment);
5972  }
5973  BindIfLive(var, Top());
5974  break;
5975 
5976  case Variable::CONTEXT: {
5977  // Bail out if we try to mutate a parameter value in a function
5978  // using the arguments object. We do not (yet) correctly handle the
5979  // arguments property of the function.
5980  if (current_info()->scope()->arguments() != NULL) {
5981  // Parameters will be allocated to context slots. We have no
5982  // direct way to detect that the variable is a parameter so we do
5983  // a linear search of the parameter variables.
5984  int count = current_info()->scope()->num_parameters();
5985  for (int i = 0; i < count; ++i) {
5986  if (var == current_info()->scope()->parameter(i)) {
5987  Bailout(kAssignmentToParameterFunctionUsesArgumentsObject);
5988  }
5989  }
5990  }
5991 
5992  HStoreContextSlot::Mode mode;
5993 
5994  switch (var->mode()) {
5995  case LET:
5996  mode = HStoreContextSlot::kCheckDeoptimize;
5997  break;
5998  case CONST:
5999  // This case is checked statically so no need to
6000  // perform checks here
6001  UNREACHABLE();
6002  case CONST_LEGACY:
6003  return ast_context()->ReturnValue(Pop());
6004  default:
6005  mode = HStoreContextSlot::kNoCheck;
6006  }
6007 
6008  HValue* context = BuildContextChainWalk(var);
6009  HStoreContextSlot* instr = Add<HStoreContextSlot>(
6010  context, var->index(), mode, Top());
6011  if (instr->HasObservableSideEffects()) {
6012  Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
6013  }
6014  break;
6015  }
6016 
6017  case Variable::LOOKUP:
6018  return Bailout(kCompoundAssignmentToLookupSlot);
6019  }
6020  return ast_context()->ReturnValue(Pop());
6021 
6022  } else if (prop != NULL) {
6023  CHECK_ALIVE(VisitForValue(prop->obj()));
6024  HValue* object = Top();
6025  HValue* key = NULL;
6026  if ((!prop->IsFunctionPrototype() && !prop->key()->IsPropertyName()) ||
6027  prop->IsStringAccess()) {
6028  CHECK_ALIVE(VisitForValue(prop->key()));
6029  key = Top();
6030  }
6031 
6032  CHECK_ALIVE(PushLoad(prop, object, key));
6033 
6034  CHECK_ALIVE(VisitForValue(expr->value()));
6035  HValue* right = Pop();
6036  HValue* left = Pop();
6037 
6038  Push(BuildBinaryOperation(operation, left, right, PUSH_BEFORE_SIMULATE));
6039 
6040  BuildStore(expr, prop, expr->id(),
6041  expr->AssignmentId(), expr->IsUninitialized());
6042  } else {
6043  return Bailout(kInvalidLhsInCompoundAssignment);
6044  }
6045 }
6046 
6047 
6048 void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
6049  ASSERT(!HasStackOverflow());
6050  ASSERT(current_block() != NULL);
6051  ASSERT(current_block()->HasPredecessor());
6052  VariableProxy* proxy = expr->target()->AsVariableProxy();
6053  Property* prop = expr->target()->AsProperty();
6054  ASSERT(proxy == NULL || prop == NULL);
6055 
6056  if (expr->is_compound()) {
6057  HandleCompoundAssignment(expr);
6058  return;
6059  }
6060 
6061  if (prop != NULL) {
6062  HandlePropertyAssignment(expr);
6063  } else if (proxy != NULL) {
6064  Variable* var = proxy->var();
6065 
6066  if (var->mode() == CONST) {
6067  if (expr->op() != Token::INIT_CONST) {
6068  return Bailout(kNonInitializerAssignmentToConst);
6069  }
6070  } else if (var->mode() == CONST_LEGACY) {
6071  if (expr->op() != Token::INIT_CONST_LEGACY) {
6072  CHECK_ALIVE(VisitForValue(expr->value()));
6073  return ast_context()->ReturnValue(Pop());
6074  }
6075 
6076  if (var->IsStackAllocated()) {
6077  // We insert a use of the old value to detect unsupported uses of const
6078  // variables (e.g. initialization inside a loop).
6079  HValue* old_value = environment()->Lookup(var);
6080  Add<HUseConst>(old_value);
6081  }
6082  }
6083 
6084  if (proxy->IsArguments()) return Bailout(kAssignmentToArguments);
6085 
6086  // Handle the assignment.
6087  switch (var->location()) {
6088  case Variable::UNALLOCATED:
6089  CHECK_ALIVE(VisitForValue(expr->value()));
6090  HandleGlobalVariableAssignment(var,
6091  Top(),
6092  expr->AssignmentId());
6093  return ast_context()->ReturnValue(Pop());
6094 
6095  case Variable::PARAMETER:
6096  case Variable::LOCAL: {
6097  // Perform an initialization check for let declared variables
6098  // or parameters.
6099  if (var->mode() == LET && expr->op() == Token::ASSIGN) {
6100  HValue* env_value = environment()->Lookup(var);
6101  if (env_value == graph()->GetConstantHole()) {
6102  return Bailout(kAssignmentToLetVariableBeforeInitialization);
6103  }
6104  }
6105  // We do not allow the arguments object to occur in a context where it
6106  // may escape, but assignments to stack-allocated locals are
6107  // permitted.
6108  CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED));
6109  HValue* value = Pop();
6110  BindIfLive(var, value);
6111  return ast_context()->ReturnValue(value);
6112  }
6113 
6114  case Variable::CONTEXT: {
6115  // Bail out if we try to mutate a parameter value in a function using
6116  // the arguments object. We do not (yet) correctly handle the
6117  // arguments property of the function.
6118  if (current_info()->scope()->arguments() != NULL) {
6119  // Parameters will rewrite to context slots. We have no direct way
6120  // to detect that the variable is a parameter.
6121  int count = current_info()->scope()->num_parameters();
6122  for (int i = 0; i < count; ++i) {
6123  if (var == current_info()->scope()->parameter(i)) {
6124  return Bailout(kAssignmentToParameterInArgumentsObject);
6125  }
6126  }
6127  }
6128 
6129  CHECK_ALIVE(VisitForValue(expr->value()));
6130  HStoreContextSlot::Mode mode;
6131  if (expr->op() == Token::ASSIGN) {
6132  switch (var->mode()) {
6133  case LET:
6134  mode = HStoreContextSlot::kCheckDeoptimize;
6135  break;
6136  case CONST:
6137  // This case is checked statically so no need to
6138  // perform checks here
6139  UNREACHABLE();
6140  case CONST_LEGACY:
6141  return ast_context()->ReturnValue(Pop());
6142  default:
6143  mode = HStoreContextSlot::kNoCheck;
6144  }
6145  } else if (expr->op() == Token::INIT_VAR ||
6146  expr->op() == Token::INIT_LET ||
6147  expr->op() == Token::INIT_CONST) {
6148  mode = HStoreContextSlot::kNoCheck;
6149  } else {
6150  ASSERT(expr->op() == Token::INIT_CONST_LEGACY);
6151 
6152  mode = HStoreContextSlot::kCheckIgnoreAssignment;
6153  }
6154 
6155  HValue* context = BuildContextChainWalk(var);
6156  HStoreContextSlot* instr = Add<HStoreContextSlot>(
6157  context, var->index(), mode, Top());
6158  if (instr->HasObservableSideEffects()) {
6159  Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
6160  }
6161  return ast_context()->ReturnValue(Pop());
6162  }
6163 
6164  case Variable::LOOKUP:
6165  return Bailout(kAssignmentToLOOKUPVariable);
6166  }
6167  } else {
6168  return Bailout(kInvalidLeftHandSideInAssignment);
6169  }
6170 }
6171 
6172 
6173 void HOptimizedGraphBuilder::VisitYield(Yield* expr) {
6174  // Generators are not optimized, so we should never get here.
6175  UNREACHABLE();
6176 }
6177 
6178 
6179 void HOptimizedGraphBuilder::VisitThrow(Throw* expr) {
6180  ASSERT(!HasStackOverflow());
6181  ASSERT(current_block() != NULL);
6182  ASSERT(current_block()->HasPredecessor());
6183  // We don't optimize functions with invalid left-hand sides in
6184  // assignments, count operations, or for-in. Consequently throw can
6185  // currently only occur in an effect context.
6186  ASSERT(ast_context()->IsEffect());
6187  CHECK_ALIVE(VisitForValue(expr->exception()));
6188 
6189  HValue* value = environment()->Pop();
6190  if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
6191  Add<HPushArgument>(value);
6192  Add<HCallRuntime>(isolate()->factory()->empty_string(),
6193  Runtime::FunctionForId(Runtime::kHiddenThrow), 1);
6194  Add<HSimulate>(expr->id());
6195 
6196  // If the throw definitely exits the function, we can finish with a dummy
6197  // control flow at this point. This is not the case if the throw is inside
6198  // an inlined function which may be replaced.
6199  if (call_context() == NULL) {
6200  FinishExitCurrentBlock(New<HAbnormalExit>());
6201  }
6202 }
6203 
6204 
6206  if (string->IsConstant()) {
6207  HConstant* c_string = HConstant::cast(string);
6208  if (c_string->HasStringValue()) {
6209  return Add<HConstant>(c_string->StringValue()->map()->instance_type());
6210  }
6211  }
6212  return Add<HLoadNamedField>(
6213  Add<HLoadNamedField>(string, static_cast<HValue*>(NULL),
6214  HObjectAccess::ForMap()),
6215  static_cast<HValue*>(NULL), HObjectAccess::ForMapInstanceType());
6216 }
6217 
6218 
6220  if (string->IsConstant()) {
6221  HConstant* c_string = HConstant::cast(string);
6222  if (c_string->HasStringValue()) {
6223  return Add<HConstant>(c_string->StringValue()->length());
6224  }
6225  }
6226  return Add<HLoadNamedField>(string, static_cast<HValue*>(NULL),
6227  HObjectAccess::ForStringLength());
6228 }
6229 
6230 
6231 HInstruction* HOptimizedGraphBuilder::BuildNamedGeneric(
6232  PropertyAccessType access_type,
6233  HValue* object,
6234  Handle<String> name,
6235  HValue* value,
6236  bool is_uninitialized) {
6237  if (is_uninitialized) {
6238  Add<HDeoptimize>("Insufficient type feedback for generic named access",
6240  }
6241  if (access_type == LOAD) {
6242  return New<HLoadNamedGeneric>(object, name);
6243  } else {
6244  return New<HStoreNamedGeneric>(object, name, value, function_strict_mode());
6245  }
6246 }
6247 
6248 
6249 
6250 HInstruction* HOptimizedGraphBuilder::BuildKeyedGeneric(
6251  PropertyAccessType access_type,
6252  HValue* object,
6253  HValue* key,
6254  HValue* value) {
6255  if (access_type == LOAD) {
6256  return New<HLoadKeyedGeneric>(object, key);
6257  } else {
6258  return New<HStoreKeyedGeneric>(object, key, value, function_strict_mode());
6259  }
6260 }
6261 
6262 
6263 LoadKeyedHoleMode HOptimizedGraphBuilder::BuildKeyedHoleMode(Handle<Map> map) {
6264  // Loads from a "stock" fast holey double arrays can elide the hole check.
6266  if (*map == isolate()->get_initial_js_array_map(FAST_HOLEY_DOUBLE_ELEMENTS) &&
6267  isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
6268  Handle<JSObject> prototype(JSObject::cast(map->prototype()), isolate());
6269  Handle<JSObject> object_prototype = isolate()->initial_object_prototype();
6270  BuildCheckPrototypeMaps(prototype, object_prototype);
6271  load_mode = ALLOW_RETURN_HOLE;
6272  graph()->MarkDependsOnEmptyArrayProtoElements();
6273  }
6274 
6275  return load_mode;
6276 }
6277 
6278 
6279 HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess(
6280  HValue* object,
6281  HValue* key,
6282  HValue* val,
6283  HValue* dependency,
6284  Handle<Map> map,
6285  PropertyAccessType access_type,
6286  KeyedAccessStoreMode store_mode) {
6287  HCheckMaps* checked_object = Add<HCheckMaps>(object, map, top_info(),
6288  dependency);
6289  if (dependency) {
6290  checked_object->ClearDependsOnFlag(kElementsKind);
6291  }
6292 
6293  if (access_type == STORE && map->prototype()->IsJSObject()) {
6294  // monomorphic stores need a prototype chain check because shape
6295  // changes could allow callbacks on elements in the chain that
6296  // aren't compatible with monomorphic keyed stores.
6297  Handle<JSObject> prototype(JSObject::cast(map->prototype()));
6298  Object* holder = map->prototype();
6299  while (holder->GetPrototype(isolate())->IsJSObject()) {
6300  holder = holder->GetPrototype(isolate());
6301  }
6302  ASSERT(holder->GetPrototype(isolate())->IsNull());
6303 
6304  BuildCheckPrototypeMaps(prototype,
6305  Handle<JSObject>(JSObject::cast(holder)));
6306  }
6307 
6308  LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
6310  checked_object, key, val,
6311  map->instance_type() == JS_ARRAY_TYPE,
6312  map->elements_kind(), access_type,
6313  load_mode, store_mode);
6314 }
6315 
6316 
6317 HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
6318  HValue* object,
6319  HValue* key,
6320  HValue* val,
6321  SmallMapList* maps) {
6322  // For polymorphic loads of similar elements kinds (i.e. all tagged or all
6323  // double), always use the "worst case" code without a transition. This is
6324  // much faster than transitioning the elements to the worst case, trading a
6325  // HTransitionElements for a HCheckMaps, and avoiding mutation of the array.
6326  bool has_double_maps = false;
6327  bool has_smi_or_object_maps = false;
6328  bool has_js_array_access = false;
6329  bool has_non_js_array_access = false;
6330  bool has_seen_holey_elements = false;
6331  Handle<Map> most_general_consolidated_map;
6332  for (int i = 0; i < maps->length(); ++i) {
6333  Handle<Map> map = maps->at(i);
6334  if (!map->IsJSObjectMap()) return NULL;
6335  // Don't allow mixing of JSArrays with JSObjects.
6336  if (map->instance_type() == JS_ARRAY_TYPE) {
6337  if (has_non_js_array_access) return NULL;
6338  has_js_array_access = true;
6339  } else if (has_js_array_access) {
6340  return NULL;
6341  } else {
6342  has_non_js_array_access = true;
6343  }
6344  // Don't allow mixed, incompatible elements kinds.
6345  if (map->has_fast_double_elements()) {
6346  if (has_smi_or_object_maps) return NULL;
6347  has_double_maps = true;
6348  } else if (map->has_fast_smi_or_object_elements()) {
6349  if (has_double_maps) return NULL;
6350  has_smi_or_object_maps = true;
6351  } else {
6352  return NULL;
6353  }
6354  // Remember if we've ever seen holey elements.
6355  if (IsHoleyElementsKind(map->elements_kind())) {
6356  has_seen_holey_elements = true;
6357  }
6358  // Remember the most general elements kind, the code for its load will
6359  // properly handle all of the more specific cases.
6360  if ((i == 0) || IsMoreGeneralElementsKindTransition(
6361  most_general_consolidated_map->elements_kind(),
6362  map->elements_kind())) {
6363  most_general_consolidated_map = map;
6364  }
6365  }
6366  if (!has_double_maps && !has_smi_or_object_maps) return NULL;
6367 
6368  HCheckMaps* checked_object = Add<HCheckMaps>(object, maps);
6369  // FAST_ELEMENTS is considered more general than FAST_HOLEY_SMI_ELEMENTS.
6370  // If we've seen both, the consolidated load must use FAST_HOLEY_ELEMENTS.
6371  ElementsKind consolidated_elements_kind = has_seen_holey_elements
6372  ? GetHoleyElementsKind(most_general_consolidated_map->elements_kind())
6373  : most_general_consolidated_map->elements_kind();
6374  HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
6375  checked_object, key, val,
6376  most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE,
6377  consolidated_elements_kind,
6379  return instr;
6380 }
6381 
6382 
6383 HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
6384  HValue* object,
6385  HValue* key,
6386  HValue* val,
6387  SmallMapList* maps,
6388  PropertyAccessType access_type,
6389  KeyedAccessStoreMode store_mode,
6390  bool* has_side_effects) {
6391  *has_side_effects = false;
6392  BuildCheckHeapObject(object);
6393 
6394  if (access_type == LOAD) {
6395  HInstruction* consolidated_load =
6396  TryBuildConsolidatedElementLoad(object, key, val, maps);
6397  if (consolidated_load != NULL) {
6398  *has_side_effects |= consolidated_load->HasObservableSideEffects();
6399  return consolidated_load;
6400  }
6401  }
6402 
6403  // Elements_kind transition support.
6404  MapHandleList transition_target(maps->length());
6405  // Collect possible transition targets.
6406  MapHandleList possible_transitioned_maps(maps->length());
6407  for (int i = 0; i < maps->length(); ++i) {
6408  Handle<Map> map = maps->at(i);
6409  ElementsKind elements_kind = map->elements_kind();
6410  if (IsFastElementsKind(elements_kind) &&
6411  elements_kind != GetInitialFastElementsKind()) {
6412  possible_transitioned_maps.Add(map);
6413  }
6414  if (elements_kind == SLOPPY_ARGUMENTS_ELEMENTS) {
6415  HInstruction* result = BuildKeyedGeneric(access_type, object, key, val);
6416  *has_side_effects = result->HasObservableSideEffects();
6417  return AddInstruction(result);
6418  }
6419  }
6420  // Get transition target for each map (NULL == no transition).
6421  for (int i = 0; i < maps->length(); ++i) {
6422  Handle<Map> map = maps->at(i);
6423  Handle<Map> transitioned_map =
6424  map->FindTransitionedMap(&possible_transitioned_maps);
6425  transition_target.Add(transitioned_map);
6426  }
6427 
6428  MapHandleList untransitionable_maps(maps->length());
6429  HTransitionElementsKind* transition = NULL;
6430  for (int i = 0; i < maps->length(); ++i) {
6431  Handle<Map> map = maps->at(i);
6432  ASSERT(map->IsMap());
6433  if (!transition_target.at(i).is_null()) {
6435  map->elements_kind(),
6436  transition_target.at(i)->elements_kind()));
6437  transition = Add<HTransitionElementsKind>(object, map,
6438  transition_target.at(i));
6439  } else {
6440  untransitionable_maps.Add(map);
6441  }
6442  }
6443 
6444  // If only one map is left after transitioning, handle this case
6445  // monomorphically.
6446  ASSERT(untransitionable_maps.length() >= 1);
6447  if (untransitionable_maps.length() == 1) {
6448  Handle<Map> untransitionable_map = untransitionable_maps[0];
6449  HInstruction* instr = NULL;
6450  if (untransitionable_map->has_slow_elements_kind() ||
6451  !untransitionable_map->IsJSObjectMap()) {
6452  instr = AddInstruction(BuildKeyedGeneric(access_type, object, key, val));
6453  } else {
6454  instr = BuildMonomorphicElementAccess(
6455  object, key, val, transition, untransitionable_map, access_type,
6456  store_mode);
6457  }
6458  *has_side_effects |= instr->HasObservableSideEffects();
6459  return access_type == STORE ? NULL : instr;
6460  }
6461 
6462  HBasicBlock* join = graph()->CreateBasicBlock();
6463 
6464  for (int i = 0; i < untransitionable_maps.length(); ++i) {
6465  Handle<Map> map = untransitionable_maps[i];
6466  if (!map->IsJSObjectMap()) continue;
6467  ElementsKind elements_kind = map->elements_kind();
6468  HBasicBlock* this_map = graph()->CreateBasicBlock();
6469  HBasicBlock* other_map = graph()->CreateBasicBlock();
6470  HCompareMap* mapcompare =
6471  New<HCompareMap>(object, map, this_map, other_map);
6472  FinishCurrentBlock(mapcompare);
6473 
6474  set_current_block(this_map);
6475  HInstruction* access = NULL;
6476  if (IsDictionaryElementsKind(elements_kind)) {
6477  access = AddInstruction(BuildKeyedGeneric(access_type, object, key, val));
6478  } else {
6479  ASSERT(IsFastElementsKind(elements_kind) ||
6480  IsExternalArrayElementsKind(elements_kind) ||
6481  IsFixedTypedArrayElementsKind(elements_kind));
6482  LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
6483  // Happily, mapcompare is a checked object.
6485  mapcompare, key, val,
6486  map->instance_type() == JS_ARRAY_TYPE,
6487  elements_kind, access_type,
6488  load_mode,
6489  store_mode);
6490  }
6491  *has_side_effects |= access->HasObservableSideEffects();
6492  // The caller will use has_side_effects and add a correct Simulate.
6493  access->SetFlag(HValue::kHasNoObservableSideEffects);
6494  if (access_type == LOAD) {
6495  Push(access);
6496  }
6497  NoObservableSideEffectsScope scope(this);
6498  GotoNoSimulate(join);
6499  set_current_block(other_map);
6500  }
6501 
6502  // Ensure that we visited at least one map above that goes to join. This is
6503  // necessary because FinishExitWithHardDeoptimization does an AbnormalExit
6504  // rather than joining the join block. If this becomes an issue, insert a
6505  // generic access in the case length() == 0.
6506  ASSERT(join->predecessors()->length() > 0);
6507  // Deopt if none of the cases matched.
6508  NoObservableSideEffectsScope scope(this);
6509  FinishExitWithHardDeoptimization("Unknown map in polymorphic element access");
6510  set_current_block(join);
6511  return access_type == STORE ? NULL : Pop();
6512 }
6513 
6514 
6515 HValue* HOptimizedGraphBuilder::HandleKeyedElementAccess(
6516  HValue* obj,
6517  HValue* key,
6518  HValue* val,
6519  Expression* expr,
6520  PropertyAccessType access_type,
6521  bool* has_side_effects) {
6522  ASSERT(!expr->IsPropertyName());
6523  HInstruction* instr = NULL;
6524 
6525  SmallMapList* types;
6526  bool monomorphic = ComputeReceiverTypes(expr, obj, &types, zone());
6527 
6528  bool force_generic = false;
6529  if (access_type == STORE &&
6530  (monomorphic || (types != NULL && !types->is_empty()))) {
6531  // Stores can't be mono/polymorphic if their prototype chain has dictionary
6532  // elements. However a receiver map that has dictionary elements itself
6533  // should be left to normal mono/poly behavior (the other maps may benefit
6534  // from highly optimized stores).
6535  for (int i = 0; i < types->length(); i++) {
6536  Handle<Map> current_map = types->at(i);
6537  if (current_map->DictionaryElementsInPrototypeChainOnly()) {
6538  force_generic = true;
6539  monomorphic = false;
6540  break;
6541  }
6542  }
6543  }
6544 
6545  if (monomorphic) {
6546  Handle<Map> map = types->first();
6547  if (map->has_slow_elements_kind() || !map->IsJSObjectMap()) {
6548  instr = AddInstruction(BuildKeyedGeneric(access_type, obj, key, val));
6549  } else {
6550  BuildCheckHeapObject(obj);
6551  instr = BuildMonomorphicElementAccess(
6552  obj, key, val, NULL, map, access_type, expr->GetStoreMode());
6553  }
6554  } else if (!force_generic && (types != NULL && !types->is_empty())) {
6555  return HandlePolymorphicElementAccess(
6556  obj, key, val, types, access_type,
6557  expr->GetStoreMode(), has_side_effects);
6558  } else {
6559  if (access_type == STORE) {
6560  if (expr->IsAssignment() &&
6561  expr->AsAssignment()->HasNoTypeInformation()) {
6562  Add<HDeoptimize>("Insufficient type feedback for keyed store",
6564  }
6565  } else {
6566  if (expr->AsProperty()->HasNoTypeInformation()) {
6567  Add<HDeoptimize>("Insufficient type feedback for keyed load",
6569  }
6570  }
6571  instr = AddInstruction(BuildKeyedGeneric(access_type, obj, key, val));
6572  }
6573  *has_side_effects = instr->HasObservableSideEffects();
6574  return instr;
6575 }
6576 
6577 
6578 void HOptimizedGraphBuilder::EnsureArgumentsArePushedForAccess() {
6579  // Outermost function already has arguments on the stack.
6580  if (function_state()->outer() == NULL) return;
6581 
6582  if (function_state()->arguments_pushed()) return;
6583 
6584  // Push arguments when entering inlined function.
6585  HEnterInlined* entry = function_state()->entry();
6586  entry->set_arguments_pushed();
6587 
6588  HArgumentsObject* arguments = entry->arguments_object();
6589  const ZoneList<HValue*>* arguments_values = arguments->arguments_values();
6590 
6591  HInstruction* insert_after = entry;
6592  for (int i = 0; i < arguments_values->length(); i++) {
6593  HValue* argument = arguments_values->at(i);
6594  HInstruction* push_argument = New<HPushArgument>(argument);
6595  push_argument->InsertAfter(insert_after);
6596  insert_after = push_argument;
6597  }
6598 
6599  HArgumentsElements* arguments_elements = New<HArgumentsElements>(true);
6600  arguments_elements->ClearFlag(HValue::kUseGVN);
6601  arguments_elements->InsertAfter(insert_after);
6602  function_state()->set_arguments_elements(arguments_elements);
6603 }
6604 
6605 
6606 bool HOptimizedGraphBuilder::TryArgumentsAccess(Property* expr) {
6607  VariableProxy* proxy = expr->obj()->AsVariableProxy();
6608  if (proxy == NULL) return false;
6609  if (!proxy->var()->IsStackAllocated()) return false;
6610  if (!environment()->Lookup(proxy->var())->CheckFlag(HValue::kIsArguments)) {
6611  return false;
6612  }
6613 
6614  HInstruction* result = NULL;
6615  if (expr->key()->IsPropertyName()) {
6616  Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
6617  if (!name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("length"))) return false;
6618 
6619  if (function_state()->outer() == NULL) {
6620  HInstruction* elements = Add<HArgumentsElements>(false);
6621  result = New<HArgumentsLength>(elements);
6622  } else {
6623  // Number of arguments without receiver.
6624  int argument_count = environment()->
6625  arguments_environment()->parameter_count() - 1;
6626  result = New<HConstant>(argument_count);
6627  }
6628  } else {
6629  Push(graph()->GetArgumentsObject());
6630  CHECK_ALIVE_OR_RETURN(VisitForValue(expr->key()), true);
6631  HValue* key = Pop();
6632  Drop(1); // Arguments object.
6633  if (function_state()->outer() == NULL) {
6634  HInstruction* elements = Add<HArgumentsElements>(false);
6635  HInstruction* length = Add<HArgumentsLength>(elements);
6636  HInstruction* checked_key = Add<HBoundsCheck>(key, length);
6637  result = New<HAccessArgumentsAt>(elements, length, checked_key);
6638  } else {
6639  EnsureArgumentsArePushedForAccess();
6640 
6641  // Number of arguments without receiver.
6642  HInstruction* elements = function_state()->arguments_elements();
6643  int argument_count = environment()->
6644  arguments_environment()->parameter_count() - 1;
6645  HInstruction* length = Add<HConstant>(argument_count);
6646  HInstruction* checked_key = Add<HBoundsCheck>(key, length);
6647  result = New<HAccessArgumentsAt>(elements, length, checked_key);
6648  }
6649  }
6650  ast_context()->ReturnInstruction(result, expr->id());
6651  return true;
6652 }
6653 
6654 
6655 HInstruction* HOptimizedGraphBuilder::BuildNamedAccess(
6656  PropertyAccessType access,
6657  BailoutId ast_id,
6658  BailoutId return_id,
6659  Expression* expr,
6660  HValue* object,
6661  Handle<String> name,
6662  HValue* value,
6663  bool is_uninitialized) {
6664  SmallMapList* types;
6665  ComputeReceiverTypes(expr, object, &types, zone());
6666  ASSERT(types != NULL);
6667 
6668  if (types->length() > 0) {
6669  PropertyAccessInfo info(this, access, ToType(types->first()), name);
6670  if (!info.CanAccessAsMonomorphic(types)) {
6671  HandlePolymorphicNamedFieldAccess(
6672  access, ast_id, return_id, object, value, types, name);
6673  return NULL;
6674  }
6675 
6676  HValue* checked_object;
6677  // Type::Number() is only supported by polymorphic load/call handling.
6678  ASSERT(!info.type()->Is(Type::Number()));
6679  BuildCheckHeapObject(object);
6680  if (AreStringTypes(types)) {
6681  checked_object =
6682  Add<HCheckInstanceType>(object, HCheckInstanceType::IS_STRING);
6683  } else {
6684  checked_object = Add<HCheckMaps>(object, types);
6685  }
6686  return BuildMonomorphicAccess(
6687  &info, object, checked_object, value, ast_id, return_id);
6688  }
6689 
6690  return BuildNamedGeneric(access, object, name, value, is_uninitialized);
6691 }
6692 
6693 
6694 void HOptimizedGraphBuilder::PushLoad(Property* expr,
6695  HValue* object,
6696  HValue* key) {
6697  ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
6698  Push(object);
6699  if (key != NULL) Push(key);
6700  BuildLoad(expr, expr->LoadId());
6701 }
6702 
6703 
6704 void HOptimizedGraphBuilder::BuildLoad(Property* expr,
6705  BailoutId ast_id) {
6706  HInstruction* instr = NULL;
6707  if (expr->IsStringAccess()) {
6708  HValue* index = Pop();
6709  HValue* string = Pop();
6710  HInstruction* char_code = BuildStringCharCodeAt(string, index);
6711  AddInstruction(char_code);
6712  instr = NewUncasted<HStringCharFromCode>(char_code);
6713 
6714  } else if (expr->IsFunctionPrototype()) {
6715  HValue* function = Pop();
6716  BuildCheckHeapObject(function);
6717  instr = New<HLoadFunctionPrototype>(function);
6718 
6719  } else if (expr->key()->IsPropertyName()) {
6720  Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
6721  HValue* object = Pop();
6722 
6723  instr = BuildNamedAccess(LOAD, ast_id, expr->LoadId(), expr,
6724  object, name, NULL, expr->IsUninitialized());
6725  if (instr == NULL) return;
6726  if (instr->IsLinked()) return ast_context()->ReturnValue(instr);
6727 
6728  } else {
6729  HValue* key = Pop();
6730  HValue* obj = Pop();
6731 
6732  bool has_side_effects = false;
6733  HValue* load = HandleKeyedElementAccess(
6734  obj, key, NULL, expr, LOAD, &has_side_effects);
6735  if (has_side_effects) {
6736  if (ast_context()->IsEffect()) {
6737  Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6738  } else {
6739  Push(load);
6740  Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6741  Drop(1);
6742  }
6743  }
6744  return ast_context()->ReturnValue(load);
6745  }
6746  return ast_context()->ReturnInstruction(instr, ast_id);
6747 }
6748 
6749 
6750 void HOptimizedGraphBuilder::VisitProperty(Property* expr) {
6751  ASSERT(!HasStackOverflow());
6752  ASSERT(current_block() != NULL);
6753  ASSERT(current_block()->HasPredecessor());
6754 
6755  if (TryArgumentsAccess(expr)) return;
6756 
6757  CHECK_ALIVE(VisitForValue(expr->obj()));
6758  if ((!expr->IsFunctionPrototype() && !expr->key()->IsPropertyName()) ||
6759  expr->IsStringAccess()) {
6760  CHECK_ALIVE(VisitForValue(expr->key()));
6761  }
6762 
6763  BuildLoad(expr, expr->id());
6764 }
6765 
6766 
6768  CompilationInfo* info) {
6769  HConstant* constant_value = New<HConstant>(constant);
6770 
6771  if (constant->map()->CanOmitMapChecks()) {
6772  constant->map()->AddDependentCompilationInfo(
6774  return constant_value;
6775  }
6776 
6777  AddInstruction(constant_value);
6778  HCheckMaps* check =
6779  Add<HCheckMaps>(constant_value, handle(constant->map()), info);
6780  check->ClearDependsOnFlag(kElementsKind);
6781  return check;
6782 }
6783 
6784 
6786  Handle<JSObject> holder) {
6787  while (!prototype.is_identical_to(holder)) {
6788  BuildConstantMapCheck(prototype, top_info());
6789  prototype = handle(JSObject::cast(prototype->GetPrototype()));
6790  }
6791 
6792  HInstruction* checked_object = BuildConstantMapCheck(prototype, top_info());
6793  if (!checked_object->IsLinked()) AddInstruction(checked_object);
6794  return checked_object;
6795 }
6796 
6797 
6798 void HOptimizedGraphBuilder::AddCheckPrototypeMaps(Handle<JSObject> holder,
6799  Handle<Map> receiver_map) {
6800  if (!holder.is_null()) {
6801  Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
6802  BuildCheckPrototypeMaps(prototype, holder);
6803  }
6804 }
6805 
6806 
6807 HInstruction* HOptimizedGraphBuilder::NewPlainFunctionCall(
6808  HValue* fun, int argument_count, bool pass_argument_count) {
6809  return New<HCallJSFunction>(
6810  fun, argument_count, pass_argument_count);
6811 }
6812 
6813 
6814 HInstruction* HOptimizedGraphBuilder::NewArgumentAdaptorCall(
6815  HValue* fun, HValue* context,
6816  int argument_count, HValue* expected_param_count) {
6817  CallInterfaceDescriptor* descriptor =
6819 
6820  HValue* arity = Add<HConstant>(argument_count - 1);
6821 
6822  HValue* op_vals[] = { fun, context, arity, expected_param_count };
6823 
6824  Handle<Code> adaptor =
6825  isolate()->builtins()->ArgumentsAdaptorTrampoline();
6826  HConstant* adaptor_value = Add<HConstant>(adaptor);
6827 
6828  return New<HCallWithDescriptor>(
6829  adaptor_value, argument_count, descriptor,
6830  Vector<HValue*>(op_vals, descriptor->environment_length()));
6831 }
6832 
6833 
6834 HInstruction* HOptimizedGraphBuilder::BuildCallConstantFunction(
6835  Handle<JSFunction> jsfun, int argument_count) {
6836  HValue* target = Add<HConstant>(jsfun);
6837  // For constant functions, we try to avoid calling the
6838  // argument adaptor and instead call the function directly
6839  int formal_parameter_count = jsfun->shared()->formal_parameter_count();
6840  bool dont_adapt_arguments =
6841  (formal_parameter_count ==
6843  int arity = argument_count - 1;
6844  bool can_invoke_directly =
6845  dont_adapt_arguments || formal_parameter_count == arity;
6846  if (can_invoke_directly) {
6847  if (jsfun.is_identical_to(current_info()->closure())) {
6848  graph()->MarkRecursive();
6849  }
6850  return NewPlainFunctionCall(target, argument_count, dont_adapt_arguments);
6851  } else {
6852  HValue* param_count_value = Add<HConstant>(formal_parameter_count);
6853  HValue* context = Add<HLoadNamedField>(
6854  target, static_cast<HValue*>(NULL),
6855  HObjectAccess::ForFunctionContextPointer());
6856  return NewArgumentAdaptorCall(target, context,
6857  argument_count, param_count_value);
6858  }
6859  UNREACHABLE();
6860  return NULL;
6861 }
6862 
6863 
6864 void HOptimizedGraphBuilder::HandlePolymorphicCallNamed(
6865  Call* expr,
6866  HValue* receiver,
6867  SmallMapList* types,
6868  Handle<String> name) {
6869  int argument_count = expr->arguments()->length() + 1; // Includes receiver.
6870  int order[kMaxCallPolymorphism];
6871 
6872  bool handle_smi = false;
6873  bool handled_string = false;
6874  int ordered_functions = 0;
6875 
6876  for (int i = 0;
6877  i < types->length() && ordered_functions < kMaxCallPolymorphism;
6878  ++i) {
6879  PropertyAccessInfo info(this, LOAD, ToType(types->at(i)), name);
6880  if (info.CanAccessMonomorphic() &&
6881  info.lookup()->IsConstant() &&
6882  info.constant()->IsJSFunction()) {
6883  if (info.type()->Is(Type::String())) {
6884  if (handled_string) continue;
6885  handled_string = true;
6886  }
6887  Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
6888  if (info.type()->Is(Type::Number())) {
6889  handle_smi = true;
6890  }
6891  expr->set_target(target);
6892  order[ordered_functions++] = i;
6893  }
6894  }
6895 
6896  HBasicBlock* number_block = NULL;
6897  HBasicBlock* join = NULL;
6898  handled_string = false;
6899  int count = 0;
6900 
6901  for (int fn = 0; fn < ordered_functions; ++fn) {
6902  int i = order[fn];
6903  PropertyAccessInfo info(this, LOAD, ToType(types->at(i)), name);
6904  if (info.type()->Is(Type::String())) {
6905  if (handled_string) continue;
6906  handled_string = true;
6907  }
6908  // Reloads the target.
6909  info.CanAccessMonomorphic();
6910  Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
6911 
6912  expr->set_target(target);
6913  if (count == 0) {
6914  // Only needed once.
6915  join = graph()->CreateBasicBlock();
6916  if (handle_smi) {
6917  HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
6918  HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
6919  number_block = graph()->CreateBasicBlock();
6920  FinishCurrentBlock(New<HIsSmiAndBranch>(
6921  receiver, empty_smi_block, not_smi_block));
6922  GotoNoSimulate(empty_smi_block, number_block);
6923  set_current_block(not_smi_block);
6924  } else {
6925  BuildCheckHeapObject(receiver);
6926  }
6927  }
6928  ++count;
6929  HBasicBlock* if_true = graph()->CreateBasicBlock();
6930  HBasicBlock* if_false = graph()->CreateBasicBlock();
6931  HUnaryControlInstruction* compare;
6932 
6933  Handle<Map> map = info.map();
6934  if (info.type()->Is(Type::Number())) {
6935  Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
6936  compare = New<HCompareMap>(receiver, heap_number_map, if_true, if_false);
6937  } else if (info.type()->Is(Type::String())) {
6938  compare = New<HIsStringAndBranch>(receiver, if_true, if_false);
6939  } else {
6940  compare = New<HCompareMap>(receiver, map, if_true, if_false);
6941  }
6942  FinishCurrentBlock(compare);
6943 
6944  if (info.type()->Is(Type::Number())) {
6945  GotoNoSimulate(if_true, number_block);
6946  if_true = number_block;
6947  }
6948 
6949  set_current_block(if_true);
6950 
6951  AddCheckPrototypeMaps(info.holder(), map);
6952 
6953  HValue* function = Add<HConstant>(expr->target());
6954  environment()->SetExpressionStackAt(0, function);
6955  Push(receiver);
6956  CHECK_ALIVE(VisitExpressions(expr->arguments()));
6957  bool needs_wrapping = NeedsWrappingFor(info.type(), target);
6958  bool try_inline = FLAG_polymorphic_inlining && !needs_wrapping;
6959  if (FLAG_trace_inlining && try_inline) {
6960  Handle<JSFunction> caller = current_info()->closure();
6961  SmartArrayPointer<char> caller_name =
6962  caller->shared()->DebugName()->ToCString();
6963  PrintF("Trying to inline the polymorphic call to %s from %s\n",
6964  name->ToCString().get(),
6965  caller_name.get());
6966  }
6967  if (try_inline && TryInlineCall(expr)) {
6968  // Trying to inline will signal that we should bailout from the
6969  // entire compilation by setting stack overflow on the visitor.
6970  if (HasStackOverflow()) return;
6971  } else {
6972  // Since HWrapReceiver currently cannot actually wrap numbers and strings,
6973  // use the regular CallFunctionStub for method calls to wrap the receiver.
6974  // TODO(verwaest): Support creation of value wrappers directly in
6975  // HWrapReceiver.
6976  HInstruction* call = needs_wrapping
6977  ? NewUncasted<HCallFunction>(
6978  function, argument_count, WRAP_AND_CALL)
6979  : BuildCallConstantFunction(target, argument_count);
6980  PushArgumentsFromEnvironment(argument_count);
6981  AddInstruction(call);
6982  Drop(1); // Drop the function.
6983  if (!ast_context()->IsEffect()) Push(call);
6984  }
6985 
6986  if (current_block() != NULL) Goto(join);
6987  set_current_block(if_false);
6988  }
6989 
6990  // Finish up. Unconditionally deoptimize if we've handled all the maps we
6991  // know about and do not want to handle ones we've never seen. Otherwise
6992  // use a generic IC.
6993  if (ordered_functions == types->length() && FLAG_deoptimize_uncommon_cases) {
6994  FinishExitWithHardDeoptimization("Unknown map in polymorphic call");
6995  } else {
6996  Property* prop = expr->expression()->AsProperty();
6997  HInstruction* function = BuildNamedGeneric(
6998  LOAD, receiver, name, NULL, prop->IsUninitialized());
6999  AddInstruction(function);
7000  Push(function);
7001  AddSimulate(prop->LoadId(), REMOVABLE_SIMULATE);
7002 
7003  environment()->SetExpressionStackAt(1, function);
7004  environment()->SetExpressionStackAt(0, receiver);
7005  CHECK_ALIVE(VisitExpressions(expr->arguments()));
7006 
7007  CallFunctionFlags flags = receiver->type().IsJSObject()
7009  HInstruction* call = New<HCallFunction>(
7010  function, argument_count, flags);
7011 
7012  PushArgumentsFromEnvironment(argument_count);
7013 
7014  Drop(1); // Function.
7015 
7016  if (join != NULL) {
7017  AddInstruction(call);
7018  if (!ast_context()->IsEffect()) Push(call);
7019  Goto(join);
7020  } else {
7021  return ast_context()->ReturnInstruction(call, expr->id());
7022  }
7023  }
7024 
7025  // We assume that control flow is always live after an expression. So
7026  // even without predecessors to the join block, we set it as the exit
7027  // block and continue by adding instructions there.
7028  ASSERT(join != NULL);
7029  if (join->HasPredecessor()) {
7030  set_current_block(join);
7031  join->SetJoinId(expr->id());
7032  if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop());
7033  } else {
7034  set_current_block(NULL);
7035  }
7036 }
7037 
7038 
7039 void HOptimizedGraphBuilder::TraceInline(Handle<JSFunction> target,
7040  Handle<JSFunction> caller,
7041  const char* reason) {
7042  if (FLAG_trace_inlining) {
7043  SmartArrayPointer<char> target_name =
7044  target->shared()->DebugName()->ToCString();
7045  SmartArrayPointer<char> caller_name =
7046  caller->shared()->DebugName()->ToCString();
7047  if (reason == NULL) {
7048  PrintF("Inlined %s called from %s.\n", target_name.get(),
7049  caller_name.get());
7050  } else {
7051  PrintF("Did not inline %s called from %s (%s).\n",
7052  target_name.get(), caller_name.get(), reason);
7053  }
7054  }
7055 }
7056 
7057 
7058 static const int kNotInlinable = 1000000000;
7059 
7060 
7061 int HOptimizedGraphBuilder::InliningAstSize(Handle<JSFunction> target) {
7062  if (!FLAG_use_inlining) return kNotInlinable;
7063 
7064  // Precondition: call is monomorphic and we have found a target with the
7065  // appropriate arity.
7066  Handle<JSFunction> caller = current_info()->closure();
7067  Handle<SharedFunctionInfo> target_shared(target->shared());
7068 
7069  // Always inline builtins marked for inlining.
7070  if (target->IsBuiltin()) {
7071  return target_shared->inline_builtin() ? 0 : kNotInlinable;
7072  }
7073 
7074  // Do a quick check on source code length to avoid parsing large
7075  // inlining candidates.
7076  if (target_shared->SourceSize() >
7077  Min(FLAG_max_inlined_source_size, kUnlimitedMaxInlinedSourceSize)) {
7078  TraceInline(target, caller, "target text too big");
7079  return kNotInlinable;
7080  }
7081 
7082  // Target must be inlineable.
7083  if (!target_shared->IsInlineable()) {
7084  TraceInline(target, caller, "target not inlineable");
7085  return kNotInlinable;
7086  }
7087  if (target_shared->dont_inline() || target_shared->dont_optimize()) {
7088  TraceInline(target, caller, "target contains unsupported syntax [early]");
7089  return kNotInlinable;
7090  }
7091 
7092  int nodes_added = target_shared->ast_node_count();
7093  return nodes_added;
7094 }
7095 
7096 
7097 bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target,
7098  int arguments_count,
7099  HValue* implicit_return_value,
7100  BailoutId ast_id,
7101  BailoutId return_id,
7102  InliningKind inlining_kind,
7103  HSourcePosition position) {
7104  int nodes_added = InliningAstSize(target);
7105  if (nodes_added == kNotInlinable) return false;
7106 
7107  Handle<JSFunction> caller = current_info()->closure();
7108 
7109  if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
7110  TraceInline(target, caller, "target AST is too large [early]");
7111  return false;
7112  }
7113 
7114  // Don't inline deeper than the maximum number of inlining levels.
7115  HEnvironment* env = environment();
7116  int current_level = 1;
7117  while (env->outer() != NULL) {
7118  if (current_level == FLAG_max_inlining_levels) {
7119  TraceInline(target, caller, "inline depth limit reached");
7120  return false;
7121  }
7122  if (env->outer()->frame_type() == JS_FUNCTION) {
7123  current_level++;
7124  }
7125  env = env->outer();
7126  }
7127 
7128  // Don't inline recursive functions.
7129  for (FunctionState* state = function_state();
7130  state != NULL;
7131  state = state->outer()) {
7132  if (*state->compilation_info()->closure() == *target) {
7133  TraceInline(target, caller, "target is recursive");
7134  return false;
7135  }
7136  }
7137 
7138  // We don't want to add more than a certain number of nodes from inlining.
7139  if (inlined_count_ > Min(FLAG_max_inlined_nodes_cumulative,
7141  TraceInline(target, caller, "cumulative AST node limit reached");
7142  return false;
7143  }
7144 
7145  // Parse and allocate variables.
7146  CompilationInfo target_info(target, zone());
7147  Handle<SharedFunctionInfo> target_shared(target->shared());
7148  if (!Parser::Parse(&target_info) || !Scope::Analyze(&target_info)) {
7149  if (target_info.isolate()->has_pending_exception()) {
7150  // Parse or scope error, never optimize this function.
7151  SetStackOverflow();
7152  target_shared->DisableOptimization(kParseScopeError);
7153  }
7154  TraceInline(target, caller, "parse failure");
7155  return false;
7156  }
7157 
7158  if (target_info.scope()->num_heap_slots() > 0) {
7159  TraceInline(target, caller, "target has context-allocated variables");
7160  return false;
7161  }
7162  FunctionLiteral* function = target_info.function();
7163 
7164  // The following conditions must be checked again after re-parsing, because
7165  // earlier the information might not have been complete due to lazy parsing.
7166  nodes_added = function->ast_node_count();
7167  if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
7168  TraceInline(target, caller, "target AST is too large [late]");
7169  return false;
7170  }
7171  AstProperties::Flags* flags(function->flags());
7172  if (flags->Contains(kDontInline) || function->dont_optimize()) {
7173  TraceInline(target, caller, "target contains unsupported syntax [late]");
7174  return false;
7175  }
7176 
7177  // If the function uses the arguments object check that inlining of functions
7178  // with arguments object is enabled and the arguments-variable is
7179  // stack allocated.
7180  if (function->scope()->arguments() != NULL) {
7181  if (!FLAG_inline_arguments) {
7182  TraceInline(target, caller, "target uses arguments object");
7183  return false;
7184  }
7185 
7186  if (!function->scope()->arguments()->IsStackAllocated()) {
7187  TraceInline(target,
7188  caller,
7189  "target uses non-stackallocated arguments object");
7190  return false;
7191  }
7192  }
7193 
7194  // All declarations must be inlineable.
7195  ZoneList<Declaration*>* decls = target_info.scope()->declarations();
7196  int decl_count = decls->length();
7197  for (int i = 0; i < decl_count; ++i) {
7198  if (!decls->at(i)->IsInlineable()) {
7199  TraceInline(target, caller, "target has non-trivial declaration");
7200  return false;
7201  }
7202  }
7203 
7204  // Generate the deoptimization data for the unoptimized version of
7205  // the target function if we don't already have it.
7206  if (!target_shared->has_deoptimization_support()) {
7207  // Note that we compile here using the same AST that we will use for
7208  // generating the optimized inline code.
7209  target_info.EnableDeoptimizationSupport();
7210  if (!FullCodeGenerator::MakeCode(&target_info)) {
7211  TraceInline(target, caller, "could not generate deoptimization info");
7212  return false;
7213  }
7214  if (target_shared->scope_info() == ScopeInfo::Empty(isolate())) {
7215  // The scope info might not have been set if a lazily compiled
7216  // function is inlined before being called for the first time.
7217  Handle<ScopeInfo> target_scope_info =
7218  ScopeInfo::Create(target_info.scope(), zone());
7219  target_shared->set_scope_info(*target_scope_info);
7220  }
7221  target_shared->EnableDeoptimizationSupport(*target_info.code());
7222  Compiler::RecordFunctionCompilation(Logger::FUNCTION_TAG,
7223  &target_info,
7224  target_shared);
7225  }
7226 
7227  // ----------------------------------------------------------------
7228  // After this point, we've made a decision to inline this function (so
7229  // TryInline should always return true).
7230 
7231  // Type-check the inlined function.
7232  ASSERT(target_shared->has_deoptimization_support());
7233  AstTyper::Run(&target_info);
7234 
7235  int function_id = graph()->TraceInlinedFunction(target_shared, position);
7236 
7237  // Save the pending call context. Set up new one for the inlined function.
7238  // The function state is new-allocated because we need to delete it
7239  // in two different places.
7240  FunctionState* target_state = new FunctionState(
7241  this, &target_info, inlining_kind, function_id);
7242 
7243  HConstant* undefined = graph()->GetConstantUndefined();
7244 
7245  HEnvironment* inner_env =
7246  environment()->CopyForInlining(target,
7247  arguments_count,
7248  function,
7249  undefined,
7250  function_state()->inlining_kind());
7251 
7252  HConstant* context = Add<HConstant>(Handle<Context>(target->context()));
7253  inner_env->BindContext(context);
7254 
7255  Add<HSimulate>(return_id);
7256  current_block()->UpdateEnvironment(inner_env);
7257  HArgumentsObject* arguments_object = NULL;
7258 
7259  // If the function uses arguments object create and bind one, also copy
7260  // current arguments values to use them for materialization.
7261  if (function->scope()->arguments() != NULL) {
7262  ASSERT(function->scope()->arguments()->IsStackAllocated());
7263  HEnvironment* arguments_env = inner_env->arguments_environment();
7264  int arguments_count = arguments_env->parameter_count();
7265  arguments_object = Add<HArgumentsObject>(arguments_count);
7266  inner_env->Bind(function->scope()->arguments(), arguments_object);
7267  for (int i = 0; i < arguments_count; i++) {
7268  arguments_object->AddArgument(arguments_env->Lookup(i), zone());
7269  }
7270  }
7271 
7272  HEnterInlined* enter_inlined =
7273  Add<HEnterInlined>(target, arguments_count, function,
7274  function_state()->inlining_kind(),
7275  function->scope()->arguments(),
7276  arguments_object);
7277  function_state()->set_entry(enter_inlined);
7278 
7279  VisitDeclarations(target_info.scope()->declarations());
7280  VisitStatements(function->body());
7281  if (HasStackOverflow()) {
7282  // Bail out if the inline function did, as we cannot residualize a call
7283  // instead.
7284  TraceInline(target, caller, "inline graph construction failed");
7285  target_shared->DisableOptimization(kInliningBailedOut);
7286  inline_bailout_ = true;
7287  delete target_state;
7288  return true;
7289  }
7290 
7291  // Update inlined nodes count.
7292  inlined_count_ += nodes_added;
7293 
7294  Handle<Code> unoptimized_code(target_shared->code());
7295  ASSERT(unoptimized_code->kind() == Code::FUNCTION);
7296  Handle<TypeFeedbackInfo> type_info(
7297  TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
7298  graph()->update_type_change_checksum(type_info->own_type_change_checksum());
7299 
7300  TraceInline(target, caller, NULL);
7301 
7302  if (current_block() != NULL) {
7303  FunctionState* state = function_state();
7304  if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
7305  // Falling off the end of an inlined construct call. In a test context the
7306  // return value will always evaluate to true, in a value context the
7307  // return value is the newly allocated receiver.
7308  if (call_context()->IsTest()) {
7309  Goto(inlined_test_context()->if_true(), state);
7310  } else if (call_context()->IsEffect()) {
7311  Goto(function_return(), state);
7312  } else {
7313  ASSERT(call_context()->IsValue());
7314  AddLeaveInlined(implicit_return_value, state);
7315  }
7316  } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
7317  // Falling off the end of an inlined setter call. The returned value is
7318  // never used, the value of an assignment is always the value of the RHS
7319  // of the assignment.
7320  if (call_context()->IsTest()) {
7321  inlined_test_context()->ReturnValue(implicit_return_value);
7322  } else if (call_context()->IsEffect()) {
7323  Goto(function_return(), state);
7324  } else {
7325  ASSERT(call_context()->IsValue());
7326  AddLeaveInlined(implicit_return_value, state);
7327  }
7328  } else {
7329  // Falling off the end of a normal inlined function. This basically means
7330  // returning undefined.
7331  if (call_context()->IsTest()) {
7332  Goto(inlined_test_context()->if_false(), state);
7333  } else if (call_context()->IsEffect()) {
7334  Goto(function_return(), state);
7335  } else {
7336  ASSERT(call_context()->IsValue());
7337  AddLeaveInlined(undefined, state);
7338  }
7339  }
7340  }
7341 
7342  // Fix up the function exits.
7343  if (inlined_test_context() != NULL) {
7344  HBasicBlock* if_true = inlined_test_context()->if_true();
7345  HBasicBlock* if_false = inlined_test_context()->if_false();
7346 
7347  HEnterInlined* entry = function_state()->entry();
7348 
7349  // Pop the return test context from the expression context stack.
7352  delete target_state;
7353 
7354  // Forward to the real test context.
7355  if (if_true->HasPredecessor()) {
7356  entry->RegisterReturnTarget(if_true, zone());
7357  if_true->SetJoinId(ast_id);
7358  HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
7359  Goto(if_true, true_target, function_state());
7360  }
7361  if (if_false->HasPredecessor()) {
7362  entry->RegisterReturnTarget(if_false, zone());
7363  if_false->SetJoinId(ast_id);
7364  HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
7365  Goto(if_false, false_target, function_state());
7366  }
7367  set_current_block(NULL);
7368  return true;
7369 
7370  } else if (function_return()->HasPredecessor()) {
7371  function_state()->entry()->RegisterReturnTarget(function_return(), zone());
7372  function_return()->SetJoinId(ast_id);
7374  } else {
7375  set_current_block(NULL);
7376  }
7377  delete target_state;
7378  return true;
7379 }
7380 
7381 
7382 bool HOptimizedGraphBuilder::TryInlineCall(Call* expr) {
7383  return TryInline(expr->target(),
7384  expr->arguments()->length(),
7385  NULL,
7386  expr->id(),
7387  expr->ReturnId(),
7388  NORMAL_RETURN,
7389  ScriptPositionToSourcePosition(expr->position()));
7390 }
7391 
7392 
7393 bool HOptimizedGraphBuilder::TryInlineConstruct(CallNew* expr,
7394  HValue* implicit_return_value) {
7395  return TryInline(expr->target(),
7396  expr->arguments()->length(),
7397  implicit_return_value,
7398  expr->id(),
7399  expr->ReturnId(),
7401  ScriptPositionToSourcePosition(expr->position()));
7402 }
7403 
7404 
7405 bool HOptimizedGraphBuilder::TryInlineGetter(Handle<JSFunction> getter,
7406  Handle<Map> receiver_map,
7407  BailoutId ast_id,
7408  BailoutId return_id) {
7409  if (TryInlineApiGetter(getter, receiver_map, ast_id)) return true;
7410  return TryInline(getter,
7411  0,
7412  NULL,
7413  ast_id,
7414  return_id,
7416  source_position());
7417 }
7418 
7419 
7420 bool HOptimizedGraphBuilder::TryInlineSetter(Handle<JSFunction> setter,
7421  Handle<Map> receiver_map,
7422  BailoutId id,
7423  BailoutId assignment_id,
7424  HValue* implicit_return_value) {
7425  if (TryInlineApiSetter(setter, receiver_map, id)) return true;
7426  return TryInline(setter,
7427  1,
7428  implicit_return_value,
7429  id, assignment_id,
7431  source_position());
7432 }
7433 
7434 
7435 bool HOptimizedGraphBuilder::TryInlineApply(Handle<JSFunction> function,
7436  Call* expr,
7437  int arguments_count) {
7438  return TryInline(function,
7439  arguments_count,
7440  NULL,
7441  expr->id(),
7442  expr->ReturnId(),
7443  NORMAL_RETURN,
7444  ScriptPositionToSourcePosition(expr->position()));
7445 }
7446 
7447 
7448 bool HOptimizedGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr) {
7449  if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
7450  BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
7451  switch (id) {
7452  case kMathExp:
7453  if (!FLAG_fast_math) break;
7454  // Fall through if FLAG_fast_math.
7455  case kMathRound:
7456  case kMathFloor:
7457  case kMathAbs:
7458  case kMathSqrt:
7459  case kMathLog:
7460  case kMathClz32:
7461  if (expr->arguments()->length() == 1) {
7462  HValue* argument = Pop();
7463  Drop(2); // Receiver and function.
7464  HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
7465  ast_context()->ReturnInstruction(op, expr->id());
7466  return true;
7467  }
7468  break;
7469  case kMathImul:
7470  if (expr->arguments()->length() == 2) {
7471  HValue* right = Pop();
7472  HValue* left = Pop();
7473  Drop(2); // Receiver and function.
7474  HInstruction* op = HMul::NewImul(zone(), context(), left, right);
7475  ast_context()->ReturnInstruction(op, expr->id());
7476  return true;
7477  }
7478  break;
7479  default:
7480  // Not supported for inlining yet.
7481  break;
7482  }
7483  return false;
7484 }
7485 
7486 
7487 bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
7488  Call* expr,
7489  HValue* receiver,
7490  Handle<Map> receiver_map) {
7491  // Try to inline calls like Math.* as operations in the calling function.
7492  if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
7493  BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
7494  int argument_count = expr->arguments()->length() + 1; // Plus receiver.
7495  switch (id) {
7496  case kStringCharCodeAt:
7497  case kStringCharAt:
7498  if (argument_count == 2) {
7499  HValue* index = Pop();
7500  HValue* string = Pop();
7501  Drop(1); // Function.
7502  HInstruction* char_code =
7503  BuildStringCharCodeAt(string, index);
7504  if (id == kStringCharCodeAt) {
7505  ast_context()->ReturnInstruction(char_code, expr->id());
7506  return true;
7507  }
7508  AddInstruction(char_code);
7509  HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
7510  ast_context()->ReturnInstruction(result, expr->id());
7511  return true;
7512  }
7513  break;
7514  case kStringFromCharCode:
7515  if (argument_count == 2) {
7516  HValue* argument = Pop();
7517  Drop(2); // Receiver and function.
7518  HInstruction* result = NewUncasted<HStringCharFromCode>(argument);
7519  ast_context()->ReturnInstruction(result, expr->id());
7520  return true;
7521  }
7522  break;
7523  case kMathExp:
7524  if (!FLAG_fast_math) break;
7525  // Fall through if FLAG_fast_math.
7526  case kMathRound:
7527  case kMathFloor:
7528  case kMathAbs:
7529  case kMathSqrt:
7530  case kMathLog:
7531  case kMathClz32:
7532  if (argument_count == 2) {
7533  HValue* argument = Pop();
7534  Drop(2); // Receiver and function.
7535  HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
7536  ast_context()->ReturnInstruction(op, expr->id());
7537  return true;
7538  }
7539  break;
7540  case kMathPow:
7541  if (argument_count == 3) {
7542  HValue* right = Pop();
7543  HValue* left = Pop();
7544  Drop(2); // Receiver and function.
7545  HInstruction* result = NULL;
7546  // Use sqrt() if exponent is 0.5 or -0.5.
7547  if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) {
7548  double exponent = HConstant::cast(right)->DoubleValue();
7549  if (exponent == 0.5) {
7550  result = NewUncasted<HUnaryMathOperation>(left, kMathPowHalf);
7551  } else if (exponent == -0.5) {
7552  HValue* one = graph()->GetConstant1();
7553  HInstruction* sqrt = AddUncasted<HUnaryMathOperation>(
7554  left, kMathPowHalf);
7555  // MathPowHalf doesn't have side effects so there's no need for
7556  // an environment simulation here.
7557  ASSERT(!sqrt->HasObservableSideEffects());
7558  result = NewUncasted<HDiv>(one, sqrt);
7559  } else if (exponent == 2.0) {
7560  result = NewUncasted<HMul>(left, left);
7561  }
7562  }
7563 
7564  if (result == NULL) {
7565  result = NewUncasted<HPower>(left, right);
7566  }
7567  ast_context()->ReturnInstruction(result, expr->id());
7568  return true;
7569  }
7570  break;
7571  case kMathMax:
7572  case kMathMin:
7573  if (argument_count == 3) {
7574  HValue* right = Pop();
7575  HValue* left = Pop();
7576  Drop(2); // Receiver and function.
7577  HMathMinMax::Operation op = (id == kMathMin) ? HMathMinMax::kMathMin
7578  : HMathMinMax::kMathMax;
7579  HInstruction* result = NewUncasted<HMathMinMax>(left, right, op);
7580  ast_context()->ReturnInstruction(result, expr->id());
7581  return true;
7582  }
7583  break;
7584  case kMathImul:
7585  if (argument_count == 3) {
7586  HValue* right = Pop();
7587  HValue* left = Pop();
7588  Drop(2); // Receiver and function.
7589  HInstruction* result = HMul::NewImul(zone(), context(), left, right);
7590  ast_context()->ReturnInstruction(result, expr->id());
7591  return true;
7592  }
7593  break;
7594  case kArrayPop: {
7595  if (receiver_map.is_null()) return false;
7596  if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
7597  ElementsKind elements_kind = receiver_map->elements_kind();
7598  if (!IsFastElementsKind(elements_kind)) return false;
7599 
7600  Drop(expr->arguments()->length());
7601  HValue* result;
7602  HValue* reduced_length;
7603  HValue* receiver = Pop();
7604 
7605  HValue* checked_object = AddCheckMap(receiver, receiver_map);
7606  HValue* length = Add<HLoadNamedField>(
7607  checked_object, static_cast<HValue*>(NULL),
7608  HObjectAccess::ForArrayLength(elements_kind));
7609 
7610  Drop(1); // Function.
7611 
7612  { NoObservableSideEffectsScope scope(this);
7613  IfBuilder length_checker(this);
7614 
7615  HValue* bounds_check = length_checker.If<HCompareNumericAndBranch>(
7616  length, graph()->GetConstant0(), Token::EQ);
7617  length_checker.Then();
7618 
7619  if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
7620 
7621  length_checker.Else();
7622  HValue* elements = AddLoadElements(checked_object);
7623  // Ensure that we aren't popping from a copy-on-write array.
7624  if (IsFastSmiOrObjectElementsKind(elements_kind)) {
7625  elements = BuildCopyElementsOnWrite(checked_object, elements,
7626  elements_kind, length);
7627  }
7628  reduced_length = AddUncasted<HSub>(length, graph()->GetConstant1());
7629  result = AddElementAccess(elements, reduced_length, NULL,
7630  bounds_check, elements_kind, LOAD);
7631  Factory* factory = isolate()->factory();
7632  double nan_double = FixedDoubleArray::hole_nan_as_double();
7633  HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
7634  ? Add<HConstant>(factory->the_hole_value())
7635  : Add<HConstant>(nan_double);
7636  if (IsFastSmiOrObjectElementsKind(elements_kind)) {
7637  elements_kind = FAST_HOLEY_ELEMENTS;
7638  }
7640  elements, reduced_length, hole, bounds_check, elements_kind, STORE);
7641  Add<HStoreNamedField>(
7642  checked_object, HObjectAccess::ForArrayLength(elements_kind),
7643  reduced_length, STORE_TO_INITIALIZED_ENTRY);
7644 
7645  if (!ast_context()->IsEffect()) Push(result);
7646 
7647  length_checker.End();
7648  }
7649  result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
7650  Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
7651  if (!ast_context()->IsEffect()) Drop(1);
7652 
7653  ast_context()->ReturnValue(result);
7654  return true;
7655  }
7656  case kArrayPush: {
7657  if (receiver_map.is_null()) return false;
7658  if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
7659  ElementsKind elements_kind = receiver_map->elements_kind();
7660  if (!IsFastElementsKind(elements_kind)) return false;
7661 
7662  HValue* op_vals[] = {
7663  context(),
7664  // Receiver.
7665  environment()->ExpressionStackAt(expr->arguments()->length())
7666  };
7667 
7668  const int argc = expr->arguments()->length();
7669  // Includes receiver.
7670  PushArgumentsFromEnvironment(argc + 1);
7671 
7672  CallInterfaceDescriptor* descriptor =
7674 
7675  ArrayPushStub stub(receiver_map->elements_kind(), argc);
7676  Handle<Code> code = stub.GetCode(isolate());
7677  HConstant* code_value = Add<HConstant>(code);
7678 
7679  ASSERT((sizeof(op_vals) / kPointerSize) ==
7680  descriptor->environment_length());
7681 
7682  HInstruction* call = New<HCallWithDescriptor>(
7683  code_value, argc + 1, descriptor,
7684  Vector<HValue*>(op_vals, descriptor->environment_length()));
7685  Drop(1); // Drop function.
7686  ast_context()->ReturnInstruction(call, expr->id());
7687  return true;
7688  }
7689  default:
7690  // Not yet supported for inlining.
7691  break;
7692  }
7693  return false;
7694 }
7695 
7696 
7697 bool HOptimizedGraphBuilder::TryInlineApiFunctionCall(Call* expr,
7698  HValue* receiver) {
7699  Handle<JSFunction> function = expr->target();
7700  int argc = expr->arguments()->length();
7701  SmallMapList receiver_maps;
7702  return TryInlineApiCall(function,
7703  receiver,
7704  &receiver_maps,
7705  argc,
7706  expr->id(),
7707  kCallApiFunction);
7708 }
7709 
7710 
7711 bool HOptimizedGraphBuilder::TryInlineApiMethodCall(
7712  Call* expr,
7713  HValue* receiver,
7714  SmallMapList* receiver_maps) {
7715  Handle<JSFunction> function = expr->target();
7716  int argc = expr->arguments()->length();
7717  return TryInlineApiCall(function,
7718  receiver,
7719  receiver_maps,
7720  argc,
7721  expr->id(),
7722  kCallApiMethod);
7723 }
7724 
7725 
7726 bool HOptimizedGraphBuilder::TryInlineApiGetter(Handle<JSFunction> function,
7727  Handle<Map> receiver_map,
7728  BailoutId ast_id) {
7729  SmallMapList receiver_maps(1, zone());
7730  receiver_maps.Add(receiver_map, zone());
7731  return TryInlineApiCall(function,
7732  NULL, // Receiver is on expression stack.
7733  &receiver_maps,
7734  0,
7735  ast_id,
7736  kCallApiGetter);
7737 }
7738 
7739 
7740 bool HOptimizedGraphBuilder::TryInlineApiSetter(Handle<JSFunction> function,
7741  Handle<Map> receiver_map,
7742  BailoutId ast_id) {
7743  SmallMapList receiver_maps(1, zone());
7744  receiver_maps.Add(receiver_map, zone());
7745  return TryInlineApiCall(function,
7746  NULL, // Receiver is on expression stack.
7747  &receiver_maps,
7748  1,
7749  ast_id,
7750  kCallApiSetter);
7751 }
7752 
7753 
7754 bool HOptimizedGraphBuilder::TryInlineApiCall(Handle<JSFunction> function,
7755  HValue* receiver,
7756  SmallMapList* receiver_maps,
7757  int argc,
7758  BailoutId ast_id,
7759  ApiCallType call_type) {
7760  CallOptimization optimization(function);
7761  if (!optimization.is_simple_api_call()) return false;
7762  Handle<Map> holder_map;
7763  if (call_type == kCallApiFunction) {
7764  // Cannot embed a direct reference to the global proxy map
7765  // as it maybe dropped on deserialization.
7767  ASSERT_EQ(0, receiver_maps->length());
7768  receiver_maps->Add(handle(
7769  function->context()->global_object()->global_receiver()->map()),
7770  zone());
7771  }
7772  CallOptimization::HolderLookup holder_lookup =
7773  CallOptimization::kHolderNotFound;
7774  Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
7775  receiver_maps->first(), &holder_lookup);
7776  if (holder_lookup == CallOptimization::kHolderNotFound) return false;
7777 
7778  if (FLAG_trace_inlining) {
7779  PrintF("Inlining api function ");
7780  function->ShortPrint();
7781  PrintF("\n");
7782  }
7783 
7784  bool drop_extra = false;
7785  bool is_store = false;
7786  switch (call_type) {
7787  case kCallApiFunction:
7788  case kCallApiMethod:
7789  // Need to check that none of the receiver maps could have changed.
7790  Add<HCheckMaps>(receiver, receiver_maps);
7791  // Need to ensure the chain between receiver and api_holder is intact.
7792  if (holder_lookup == CallOptimization::kHolderFound) {
7793  AddCheckPrototypeMaps(api_holder, receiver_maps->first());
7794  } else {
7795  ASSERT_EQ(holder_lookup, CallOptimization::kHolderIsReceiver);
7796  }
7797  // Includes receiver.
7798  PushArgumentsFromEnvironment(argc + 1);
7799  // Drop function after call.
7800  drop_extra = true;
7801  break;
7802  case kCallApiGetter:
7803  // Receiver and prototype chain cannot have changed.
7804  ASSERT_EQ(0, argc);
7805  ASSERT_EQ(NULL, receiver);
7806  // Receiver is on expression stack.
7807  receiver = Pop();
7808  Add<HPushArgument>(receiver);
7809  break;
7810  case kCallApiSetter:
7811  {
7812  is_store = true;
7813  // Receiver and prototype chain cannot have changed.
7814  ASSERT_EQ(1, argc);
7815  ASSERT_EQ(NULL, receiver);
7816  // Receiver and value are on expression stack.
7817  HValue* value = Pop();
7818  receiver = Pop();
7819  Add<HPushArgument>(receiver);
7820  Add<HPushArgument>(value);
7821  break;
7822  }
7823  }
7824 
7825  HValue* holder = NULL;
7826  switch (holder_lookup) {
7827  case CallOptimization::kHolderFound:
7828  holder = Add<HConstant>(api_holder);
7829  break;
7830  case CallOptimization::kHolderIsReceiver:
7831  holder = receiver;
7832  break;
7833  case CallOptimization::kHolderNotFound:
7834  UNREACHABLE();
7835  break;
7836  }
7837  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
7838  Handle<Object> call_data_obj(api_call_info->data(), isolate());
7839  bool call_data_is_undefined = call_data_obj->IsUndefined();
7840  HValue* call_data = Add<HConstant>(call_data_obj);
7841  ApiFunction fun(v8::ToCData<Address>(api_call_info->callback()));
7842  ExternalReference ref = ExternalReference(&fun,
7843  ExternalReference::DIRECT_API_CALL,
7844  isolate());
7845  HValue* api_function_address = Add<HConstant>(ExternalReference(ref));
7846 
7847  HValue* op_vals[] = {
7848  Add<HConstant>(function),
7849  call_data,
7850  holder,
7851  api_function_address,
7852  context()
7853  };
7854 
7855  CallInterfaceDescriptor* descriptor =
7857 
7858  CallApiFunctionStub stub(is_store, call_data_is_undefined, argc);
7859  Handle<Code> code = stub.GetCode(isolate());
7860  HConstant* code_value = Add<HConstant>(code);
7861 
7862  ASSERT((sizeof(op_vals) / kPointerSize) ==
7863  descriptor->environment_length());
7864 
7865  HInstruction* call = New<HCallWithDescriptor>(
7866  code_value, argc + 1, descriptor,
7867  Vector<HValue*>(op_vals, descriptor->environment_length()));
7868 
7869  if (drop_extra) Drop(1); // Drop function.
7870  ast_context()->ReturnInstruction(call, ast_id);
7871  return true;
7872 }
7873 
7874 
7875 bool HOptimizedGraphBuilder::TryCallApply(Call* expr) {
7876  ASSERT(expr->expression()->IsProperty());
7877 
7878  if (!expr->IsMonomorphic()) {
7879  return false;
7880  }
7881  Handle<Map> function_map = expr->GetReceiverTypes()->first();
7882  if (function_map->instance_type() != JS_FUNCTION_TYPE ||
7883  !expr->target()->shared()->HasBuiltinFunctionId() ||
7884  expr->target()->shared()->builtin_function_id() != kFunctionApply) {
7885  return false;
7886  }
7887 
7888  if (current_info()->scope()->arguments() == NULL) return false;
7889 
7890  ZoneList<Expression*>* args = expr->arguments();
7891  if (args->length() != 2) return false;
7892 
7893  VariableProxy* arg_two = args->at(1)->AsVariableProxy();
7894  if (arg_two == NULL || !arg_two->var()->IsStackAllocated()) return false;
7895  HValue* arg_two_value = LookupAndMakeLive(arg_two->var());
7896  if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false;
7897 
7898  // Found pattern f.apply(receiver, arguments).
7899  CHECK_ALIVE_OR_RETURN(VisitForValue(args->at(0)), true);
7900  HValue* receiver = Pop(); // receiver
7901  HValue* function = Pop(); // f
7902  Drop(1); // apply
7903 
7904  if (function_state()->outer() == NULL) {
7905  HInstruction* elements = Add<HArgumentsElements>(false);
7906  HInstruction* length = Add<HArgumentsLength>(elements);
7907  HValue* wrapped_receiver = BuildWrapReceiver(receiver, function);
7908  HInstruction* result = New<HApplyArguments>(function,
7909  wrapped_receiver,
7910  length,
7911  elements);
7912  ast_context()->ReturnInstruction(result, expr->id());
7913  return true;
7914  } else {
7915  // We are inside inlined function and we know exactly what is inside
7916  // arguments object. But we need to be able to materialize at deopt.
7917  ASSERT_EQ(environment()->arguments_environment()->parameter_count(),
7918  function_state()->entry()->arguments_object()->arguments_count());
7919  HArgumentsObject* args = function_state()->entry()->arguments_object();
7920  const ZoneList<HValue*>* arguments_values = args->arguments_values();
7921  int arguments_count = arguments_values->length();
7922  Push(function);
7923  Push(BuildWrapReceiver(receiver, function));
7924  for (int i = 1; i < arguments_count; i++) {
7925  Push(arguments_values->at(i));
7926  }
7927 
7928  Handle<JSFunction> known_function;
7929  if (function->IsConstant() &&
7930  HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
7931  known_function = Handle<JSFunction>::cast(
7932  HConstant::cast(function)->handle(isolate()));
7933  int args_count = arguments_count - 1; // Excluding receiver.
7934  if (TryInlineApply(known_function, expr, args_count)) return true;
7935  }
7936 
7937  PushArgumentsFromEnvironment(arguments_count);
7938  HInvokeFunction* call = New<HInvokeFunction>(
7939  function, known_function, arguments_count);
7940  Drop(1); // Function.
7941  ast_context()->ReturnInstruction(call, expr->id());
7942  return true;
7943  }
7944 }
7945 
7946 
7947 HValue* HOptimizedGraphBuilder::ImplicitReceiverFor(HValue* function,
7948  Handle<JSFunction> target) {
7949  SharedFunctionInfo* shared = target->shared();
7950  if (shared->strict_mode() == SLOPPY && !shared->native()) {
7951  // Cannot embed a direct reference to the global proxy
7952  // as is it dropped on deserialization.
7954  Handle<JSObject> global_receiver(
7955  target->context()->global_object()->global_receiver());
7956  return Add<HConstant>(global_receiver);
7957  }
7958  return graph()->GetConstantUndefined();
7959 }
7960 
7961 
7962 void HOptimizedGraphBuilder::VisitCall(Call* expr) {
7963  ASSERT(!HasStackOverflow());
7964  ASSERT(current_block() != NULL);
7965  ASSERT(current_block()->HasPredecessor());
7966  Expression* callee = expr->expression();
7967  int argument_count = expr->arguments()->length() + 1; // Plus receiver.
7968  HInstruction* call = NULL;
7969 
7970  Property* prop = callee->AsProperty();
7971  if (prop != NULL) {
7972  CHECK_ALIVE(VisitForValue(prop->obj()));
7973  HValue* receiver = Top();
7974 
7975  SmallMapList* types;
7976  ComputeReceiverTypes(expr, receiver, &types, zone());
7977 
7978  if (prop->key()->IsPropertyName() && types->length() > 0) {
7979  Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
7980  PropertyAccessInfo info(this, LOAD, ToType(types->first()), name);
7981  if (!info.CanAccessAsMonomorphic(types)) {
7982  HandlePolymorphicCallNamed(expr, receiver, types, name);
7983  return;
7984  }
7985  }
7986 
7987  HValue* key = NULL;
7988  if (!prop->key()->IsPropertyName()) {
7989  CHECK_ALIVE(VisitForValue(prop->key()));
7990  key = Pop();
7991  }
7992 
7993  CHECK_ALIVE(PushLoad(prop, receiver, key));
7994  HValue* function = Pop();
7995 
7996  if (FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
7997 
7998  // Push the function under the receiver.
7999  environment()->SetExpressionStackAt(0, function);
8000 
8001  Push(receiver);
8002 
8003  if (function->IsConstant() &&
8004  HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
8005  Handle<JSFunction> known_function = Handle<JSFunction>::cast(
8006  HConstant::cast(function)->handle(isolate()));
8007  expr->set_target(known_function);
8008 
8009  if (TryCallApply(expr)) return;
8010  CHECK_ALIVE(VisitExpressions(expr->arguments()));
8011 
8012  Handle<Map> map = types->length() == 1 ? types->first() : Handle<Map>();
8013  if (TryInlineBuiltinMethodCall(expr, receiver, map)) {
8014  if (FLAG_trace_inlining) {
8015  PrintF("Inlining builtin ");
8016  known_function->ShortPrint();
8017  PrintF("\n");
8018  }
8019  return;
8020  }
8021  if (TryInlineApiMethodCall(expr, receiver, types)) return;
8022 
8023  // Wrap the receiver if necessary.
8024  if (NeedsWrappingFor(ToType(types->first()), known_function)) {
8025  // Since HWrapReceiver currently cannot actually wrap numbers and
8026  // strings, use the regular CallFunctionStub for method calls to wrap
8027  // the receiver.
8028  // TODO(verwaest): Support creation of value wrappers directly in
8029  // HWrapReceiver.
8030  call = New<HCallFunction>(
8031  function, argument_count, WRAP_AND_CALL);
8032  } else if (TryInlineCall(expr)) {
8033  return;
8034  } else {
8035  call = BuildCallConstantFunction(known_function, argument_count);
8036  }
8037 
8038  } else {
8039  CHECK_ALIVE(VisitExpressions(expr->arguments()));
8040  CallFunctionFlags flags = receiver->type().IsJSObject()
8042  call = New<HCallFunction>(function, argument_count, flags);
8043  }
8044  PushArgumentsFromEnvironment(argument_count);
8045 
8046  } else {
8047  VariableProxy* proxy = expr->expression()->AsVariableProxy();
8048  if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
8049  return Bailout(kPossibleDirectCallToEval);
8050  }
8051 
8052  // The function is on the stack in the unoptimized code during
8053  // evaluation of the arguments.
8054  CHECK_ALIVE(VisitForValue(expr->expression()));
8055  HValue* function = Top();
8056  bool global_call = proxy != NULL && proxy->var()->IsUnallocated();
8057  if (global_call) {
8058  Variable* var = proxy->var();
8059  bool known_global_function = false;
8060  // If there is a global property cell for the name at compile time and
8061  // access check is not enabled we assume that the function will not change
8062  // and generate optimized code for calling the function.
8063  LookupResult lookup(isolate());
8064  GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, LOAD);
8065  if (type == kUseCell &&
8066  !current_info()->global_object()->IsAccessCheckNeeded()) {
8067  Handle<GlobalObject> global(current_info()->global_object());
8068  known_global_function = expr->ComputeGlobalTarget(global, &lookup);
8069  }
8070  if (known_global_function) {
8071  Add<HCheckValue>(function, expr->target());
8072 
8073  // Placeholder for the receiver.
8074  Push(graph()->GetConstantUndefined());
8075  CHECK_ALIVE(VisitExpressions(expr->arguments()));
8076 
8077  // Patch the global object on the stack by the expected receiver.
8078  HValue* receiver = ImplicitReceiverFor(function, expr->target());
8079  const int receiver_index = argument_count - 1;
8080  environment()->SetExpressionStackAt(receiver_index, receiver);
8081 
8082  if (TryInlineBuiltinFunctionCall(expr)) {
8083  if (FLAG_trace_inlining) {
8084  PrintF("Inlining builtin ");
8085  expr->target()->ShortPrint();
8086  PrintF("\n");
8087  }
8088  return;
8089  }
8090  if (TryInlineApiFunctionCall(expr, receiver)) return;
8091  if (TryInlineCall(expr)) return;
8092 
8093  PushArgumentsFromEnvironment(argument_count);
8094  call = BuildCallConstantFunction(expr->target(), argument_count);
8095  } else {
8096  Push(graph()->GetConstantUndefined());
8097  CHECK_ALIVE(VisitExpressions(expr->arguments()));
8098  PushArgumentsFromEnvironment(argument_count);
8099  call = New<HCallFunction>(function, argument_count);
8100  }
8101 
8102  } else if (expr->IsMonomorphic()) {
8103  Add<HCheckValue>(function, expr->target());
8104 
8105  Push(graph()->GetConstantUndefined());
8106  CHECK_ALIVE(VisitExpressions(expr->arguments()));
8107 
8108  HValue* receiver = ImplicitReceiverFor(function, expr->target());
8109  const int receiver_index = argument_count - 1;
8110  environment()->SetExpressionStackAt(receiver_index, receiver);
8111 
8112  if (TryInlineBuiltinFunctionCall(expr)) {
8113  if (FLAG_trace_inlining) {
8114  PrintF("Inlining builtin ");
8115  expr->target()->ShortPrint();
8116  PrintF("\n");
8117  }
8118  return;
8119  }
8120  if (TryInlineApiFunctionCall(expr, receiver)) return;
8121 
8122  if (TryInlineCall(expr)) return;
8123 
8124  call = PreProcessCall(New<HInvokeFunction>(
8125  function, expr->target(), argument_count));
8126 
8127  } else {
8128  Push(graph()->GetConstantUndefined());
8129  CHECK_ALIVE(VisitExpressions(expr->arguments()));
8130  PushArgumentsFromEnvironment(argument_count);
8131  call = New<HCallFunction>(function, argument_count);
8132  }
8133  }
8134 
8135  Drop(1); // Drop the function.
8136  return ast_context()->ReturnInstruction(call, expr->id());
8137 }
8138 
8139 
8140 void HOptimizedGraphBuilder::BuildInlinedCallNewArray(CallNew* expr) {
8141  NoObservableSideEffectsScope no_effects(this);
8142 
8143  int argument_count = expr->arguments()->length();
8144  // We should at least have the constructor on the expression stack.
8145  HValue* constructor = environment()->ExpressionStackAt(argument_count);
8146 
8147  ElementsKind kind = expr->elements_kind();
8148  Handle<AllocationSite> site = expr->allocation_site();
8149  ASSERT(!site.is_null());
8150 
8151  // Register on the site for deoptimization if the transition feedback changes.
8154  HInstruction* site_instruction = Add<HConstant>(site);
8155 
8156  // In the single constant argument case, we may have to adjust elements kind
8157  // to avoid creating a packed non-empty array.
8158  if (argument_count == 1 && !IsHoleyElementsKind(kind)) {
8159  HValue* argument = environment()->Top();
8160  if (argument->IsConstant()) {
8161  HConstant* constant_argument = HConstant::cast(argument);
8162  ASSERT(constant_argument->HasSmiValue());
8163  int constant_array_size = constant_argument->Integer32Value();
8164  if (constant_array_size != 0) {
8165  kind = GetHoleyElementsKind(kind);
8166  }
8167  }
8168  }
8169 
8170  // Build the array.
8171  JSArrayBuilder array_builder(this,
8172  kind,
8173  site_instruction,
8174  constructor,
8176  HValue* new_object;
8177  if (argument_count == 0) {
8178  new_object = array_builder.AllocateEmptyArray();
8179  } else if (argument_count == 1) {
8180  HValue* argument = environment()->Top();
8181  new_object = BuildAllocateArrayFromLength(&array_builder, argument);
8182  } else {
8183  HValue* length = Add<HConstant>(argument_count);
8184  // Smi arrays need to initialize array elements with the hole because
8185  // bailout could occur if the arguments don't fit in a smi.
8186  //
8187  // TODO(mvstanton): If all the arguments are constants in smi range, then
8188  // we could set fill_with_hole to false and save a few instructions.
8189  JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
8190  ? JSArrayBuilder::FILL_WITH_HOLE
8191  : JSArrayBuilder::DONT_FILL_WITH_HOLE;
8192  new_object = array_builder.AllocateArray(length, length, fill_mode);
8193  HValue* elements = array_builder.GetElementsLocation();
8194  for (int i = 0; i < argument_count; i++) {
8195  HValue* value = environment()->ExpressionStackAt(argument_count - i - 1);
8196  HValue* constant_i = Add<HConstant>(i);
8197  Add<HStoreKeyed>(elements, constant_i, value, kind);
8198  }
8199  }
8200 
8201  Drop(argument_count + 1); // drop constructor and args.
8202  ast_context()->ReturnValue(new_object);
8203 }
8204 
8205 
8206 // Checks whether allocation using the given constructor can be inlined.
8207 static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
8208  return constructor->has_initial_map() &&
8209  constructor->initial_map()->instance_type() == JS_OBJECT_TYPE &&
8210  constructor->initial_map()->instance_size() < HAllocate::kMaxInlineSize &&
8211  constructor->initial_map()->InitialPropertiesLength() == 0;
8212 }
8213 
8214 
8215 bool HOptimizedGraphBuilder::IsCallNewArrayInlineable(CallNew* expr) {
8216  Handle<JSFunction> caller = current_info()->closure();
8217  Handle<JSFunction> target(isolate()->native_context()->array_function(),
8218  isolate());
8219  int argument_count = expr->arguments()->length();
8220  // We should have the function plus array arguments on the environment stack.
8221  ASSERT(environment()->length() >= (argument_count + 1));
8222  Handle<AllocationSite> site = expr->allocation_site();
8223  ASSERT(!site.is_null());
8224 
8225  bool inline_ok = false;
8226  if (site->CanInlineCall()) {
8227  // We also want to avoid inlining in certain 1 argument scenarios.
8228  if (argument_count == 1) {
8229  HValue* argument = Top();
8230  if (argument->IsConstant()) {
8231  // Do not inline if the constant length argument is not a smi or
8232  // outside the valid range for a fast array.
8233  HConstant* constant_argument = HConstant::cast(argument);
8234  if (constant_argument->HasSmiValue()) {
8235  int value = constant_argument->Integer32Value();
8236  inline_ok = value >= 0 &&
8238  if (!inline_ok) {
8239  TraceInline(target, caller,
8240  "Length outside of valid array range");
8241  }
8242  }
8243  } else {
8244  inline_ok = true;
8245  }
8246  } else {
8247  inline_ok = true;
8248  }
8249  } else {
8250  TraceInline(target, caller, "AllocationSite requested no inlining.");
8251  }
8252 
8253  if (inline_ok) {
8254  TraceInline(target, caller, NULL);
8255  }
8256  return inline_ok;
8257 }
8258 
8259 
8260 void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
8261  ASSERT(!HasStackOverflow());
8262  ASSERT(current_block() != NULL);
8263  ASSERT(current_block()->HasPredecessor());
8264  if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
8265  int argument_count = expr->arguments()->length() + 1; // Plus constructor.
8266  Factory* factory = isolate()->factory();
8267 
8268  // The constructor function is on the stack in the unoptimized code
8269  // during evaluation of the arguments.
8270  CHECK_ALIVE(VisitForValue(expr->expression()));
8271  HValue* function = Top();
8272  CHECK_ALIVE(VisitExpressions(expr->arguments()));
8273 
8274  if (FLAG_inline_construct &&
8275  expr->IsMonomorphic() &&
8276  IsAllocationInlineable(expr->target())) {
8277  Handle<JSFunction> constructor = expr->target();
8278  HValue* check = Add<HCheckValue>(function, constructor);
8279 
8280  // Force completion of inobject slack tracking before generating
8281  // allocation code to finalize instance size.
8282  if (constructor->shared()->IsInobjectSlackTrackingInProgress()) {
8283  constructor->shared()->CompleteInobjectSlackTracking();
8284  }
8285 
8286  // Calculate instance size from initial map of constructor.
8287  ASSERT(constructor->has_initial_map());
8288  Handle<Map> initial_map(constructor->initial_map());
8289  int instance_size = initial_map->instance_size();
8290  ASSERT(initial_map->InitialPropertiesLength() == 0);
8291 
8292  // Allocate an instance of the implicit receiver object.
8293  HValue* size_in_bytes = Add<HConstant>(instance_size);
8294  HAllocationMode allocation_mode;
8295  if (FLAG_pretenuring_call_new) {
8296  if (FLAG_allocation_site_pretenuring) {
8297  // Try to use pretenuring feedback.
8298  Handle<AllocationSite> allocation_site = expr->allocation_site();
8299  allocation_mode = HAllocationMode(allocation_site);
8300  // Take a dependency on allocation site.
8303  top_info());
8304  } else {
8305  allocation_mode = HAllocationMode(
8306  isolate()->heap()->GetPretenureMode());
8307  }
8308  }
8309 
8310  HAllocate* receiver =
8311  BuildAllocate(size_in_bytes, HType::JSObject(), JS_OBJECT_TYPE,
8312  allocation_mode);
8313  receiver->set_known_initial_map(initial_map);
8314 
8315  // Load the initial map from the constructor.
8316  HValue* constructor_value = Add<HConstant>(constructor);
8317  HValue* initial_map_value =
8318  Add<HLoadNamedField>(constructor_value, static_cast<HValue*>(NULL),
8319  HObjectAccess::ForMapAndOffset(
8320  handle(constructor->map()),
8322 
8323  // Initialize map and fields of the newly allocated object.
8324  { NoObservableSideEffectsScope no_effects(this);
8325  ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE);
8326  Add<HStoreNamedField>(receiver,
8327  HObjectAccess::ForMapAndOffset(initial_map, JSObject::kMapOffset),
8328  initial_map_value);
8329  HValue* empty_fixed_array = Add<HConstant>(factory->empty_fixed_array());
8330  Add<HStoreNamedField>(receiver,
8331  HObjectAccess::ForMapAndOffset(initial_map,
8333  empty_fixed_array);
8334  Add<HStoreNamedField>(receiver,
8335  HObjectAccess::ForMapAndOffset(initial_map,
8337  empty_fixed_array);
8338  if (initial_map->inobject_properties() != 0) {
8339  HConstant* undefined = graph()->GetConstantUndefined();
8340  for (int i = 0; i < initial_map->inobject_properties(); i++) {
8341  int property_offset = initial_map->GetInObjectPropertyOffset(i);
8342  Add<HStoreNamedField>(receiver,
8343  HObjectAccess::ForMapAndOffset(initial_map, property_offset),
8344  undefined);
8345  }
8346  }
8347  }
8348 
8349  // Replace the constructor function with a newly allocated receiver using
8350  // the index of the receiver from the top of the expression stack.
8351  const int receiver_index = argument_count - 1;
8352  ASSERT(environment()->ExpressionStackAt(receiver_index) == function);
8353  environment()->SetExpressionStackAt(receiver_index, receiver);
8354 
8355  if (TryInlineConstruct(expr, receiver)) return;
8356 
8357  // TODO(mstarzinger): For now we remove the previous HAllocate and all
8358  // corresponding instructions and instead add HPushArgument for the
8359  // arguments in case inlining failed. What we actually should do is for
8360  // inlining to try to build a subgraph without mutating the parent graph.
8361  HInstruction* instr = current_block()->last();
8362  while (instr != initial_map_value) {
8363  HInstruction* prev_instr = instr->previous();
8364  instr->DeleteAndReplaceWith(NULL);
8365  instr = prev_instr;
8366  }
8367  initial_map_value->DeleteAndReplaceWith(NULL);
8368  receiver->DeleteAndReplaceWith(NULL);
8369  check->DeleteAndReplaceWith(NULL);
8370  environment()->SetExpressionStackAt(receiver_index, function);
8371  HInstruction* call =
8372  PreProcessCall(New<HCallNew>(function, argument_count));
8373  return ast_context()->ReturnInstruction(call, expr->id());
8374  } else {
8375  // The constructor function is both an operand to the instruction and an
8376  // argument to the construct call.
8377  Handle<JSFunction> array_function(
8378  isolate()->native_context()->array_function(), isolate());
8379  bool use_call_new_array = expr->target().is_identical_to(array_function);
8380  if (use_call_new_array && IsCallNewArrayInlineable(expr)) {
8381  // Verify we are still calling the array function for our native context.
8382  Add<HCheckValue>(function, array_function);
8383  BuildInlinedCallNewArray(expr);
8384  return;
8385  }
8386 
8387  HBinaryCall* call;
8388  if (use_call_new_array) {
8389  Add<HCheckValue>(function, array_function);
8390  call = New<HCallNewArray>(function, argument_count,
8391  expr->elements_kind());
8392  } else {
8393  call = New<HCallNew>(function, argument_count);
8394  }
8395  PreProcessCall(call);
8396  return ast_context()->ReturnInstruction(call, expr->id());
8397  }
8398 }
8399 
8400 
8401 // Support for generating inlined runtime functions.
8402 
8403 // Lookup table for generators for runtime calls that are generated inline.
8404 // Elements of the table are member pointers to functions of
8405 // HOptimizedGraphBuilder.
8406 #define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize) \
8407  &HOptimizedGraphBuilder::Generate##Name,
8408 
8413 };
8414 #undef INLINE_FUNCTION_GENERATOR_ADDRESS
8415 
8416 
8417 template <class ViewClass>
8419  HValue* obj,
8420  HValue* buffer,
8421  HValue* byte_offset,
8422  HValue* byte_length) {
8423 
8424  for (int offset = ViewClass::kSize;
8425  offset < ViewClass::kSizeWithInternalFields;
8426  offset += kPointerSize) {
8427  Add<HStoreNamedField>(obj,
8428  HObjectAccess::ForObservableJSObjectOffset(offset),
8429  graph()->GetConstant0());
8430  }
8431 
8432  Add<HStoreNamedField>(
8433  obj,
8434  HObjectAccess::ForJSArrayBufferViewByteOffset(),
8435  byte_offset);
8436  Add<HStoreNamedField>(
8437  obj,
8438  HObjectAccess::ForJSArrayBufferViewByteLength(),
8439  byte_length);
8440 
8441  if (buffer != NULL) {
8442  Add<HStoreNamedField>(
8443  obj,
8444  HObjectAccess::ForJSArrayBufferViewBuffer(), buffer);
8445  HObjectAccess weak_first_view_access =
8446  HObjectAccess::ForJSArrayBufferWeakFirstView();
8447  Add<HStoreNamedField>(obj,
8448  HObjectAccess::ForJSArrayBufferViewWeakNext(),
8449  Add<HLoadNamedField>(buffer,
8450  static_cast<HValue*>(NULL),
8451  weak_first_view_access));
8452  Add<HStoreNamedField>(buffer, weak_first_view_access, obj);
8453  } else {
8454  Add<HStoreNamedField>(
8455  obj,
8456  HObjectAccess::ForJSArrayBufferViewBuffer(),
8457  Add<HConstant>(static_cast<int32_t>(0)));
8458  Add<HStoreNamedField>(obj,
8459  HObjectAccess::ForJSArrayBufferViewWeakNext(),
8460  graph()->GetConstantUndefined());
8461  }
8462 }
8463 
8464 
8465 void HOptimizedGraphBuilder::GenerateDataViewInitialize(
8466  CallRuntime* expr) {
8467  ZoneList<Expression*>* arguments = expr->arguments();
8468 
8469  NoObservableSideEffectsScope scope(this);
8470  ASSERT(arguments->length()== 4);
8471  CHECK_ALIVE(VisitForValue(arguments->at(0)));
8472  HValue* obj = Pop();
8473 
8474  CHECK_ALIVE(VisitForValue(arguments->at(1)));
8475  HValue* buffer = Pop();
8476 
8477  CHECK_ALIVE(VisitForValue(arguments->at(2)));
8478  HValue* byte_offset = Pop();
8479 
8480  CHECK_ALIVE(VisitForValue(arguments->at(3)));
8481  HValue* byte_length = Pop();
8482 
8483  BuildArrayBufferViewInitialization<JSDataView>(
8484  obj, buffer, byte_offset, byte_length);
8485 }
8486 
8487 
8488 static Handle<Map> TypedArrayMap(Isolate* isolate,
8489  ExternalArrayType array_type,
8490  ElementsKind target_kind) {
8491  Handle<Context> native_context = isolate->native_context();
8492  Handle<JSFunction> fun;
8493  switch (array_type) {
8494 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
8495  case kExternal##Type##Array: \
8496  fun = Handle<JSFunction>(native_context->type##_array_fun()); \
8497  break;
8498 
8500 #undef TYPED_ARRAY_CASE
8501  }
8502  Handle<Map> map(fun->initial_map());
8503  return Map::AsElementsKind(map, target_kind);
8504 }
8505 
8506 
8507 HValue* HOptimizedGraphBuilder::BuildAllocateExternalElements(
8508  ExternalArrayType array_type,
8509  bool is_zero_byte_offset,
8510  HValue* buffer, HValue* byte_offset, HValue* length) {
8511  Handle<Map> external_array_map(
8512  isolate()->heap()->MapForExternalArrayType(array_type));
8513  HValue* elements =
8514  Add<HAllocate>(
8515  Add<HConstant>(ExternalArray::kAlignedSize),
8516  HType::Tagged(),
8517  NOT_TENURED,
8518  external_array_map->instance_type());
8519 
8520  AddStoreMapConstant(elements, external_array_map);
8521 
8522  HValue* backing_store = Add<HLoadNamedField>(
8523  buffer, static_cast<HValue*>(NULL),
8524  HObjectAccess::ForJSArrayBufferBackingStore());
8525 
8526  HValue* typed_array_start;
8527  if (is_zero_byte_offset) {
8528  typed_array_start = backing_store;
8529  } else {
8530  HInstruction* external_pointer =
8531  AddUncasted<HAdd>(backing_store, byte_offset);
8532  // Arguments are checked prior to call to TypedArrayInitialize,
8533  // including byte_offset.
8534  external_pointer->ClearFlag(HValue::kCanOverflow);
8535  typed_array_start = external_pointer;
8536  }
8537 
8538 
8539  Add<HStoreNamedField>(elements,
8540  HObjectAccess::ForExternalArrayExternalPointer(),
8541  typed_array_start);
8542 
8543  Add<HStoreNamedField>(elements,
8544  HObjectAccess::ForFixedArrayLength(), length);
8545  return elements;
8546 }
8547 
8548 
8549 HValue* HOptimizedGraphBuilder::BuildAllocateFixedTypedArray(
8550  ExternalArrayType array_type, size_t element_size,
8551  ElementsKind fixed_elements_kind,
8552  HValue* byte_length, HValue* length) {
8553  STATIC_ASSERT(
8555  HValue* total_size;
8556 
8557  // if fixed array's elements are not aligned to object's alignment,
8558  // we need to align the whole array to object alignment.
8559  if (element_size % kObjectAlignment != 0) {
8560  total_size = BuildObjectSizeAlignment(
8561  byte_length, FixedTypedArrayBase::kHeaderSize);
8562  } else {
8563  total_size = AddUncasted<HAdd>(byte_length,
8564  Add<HConstant>(FixedTypedArrayBase::kHeaderSize));
8565  total_size->ClearFlag(HValue::kCanOverflow);
8566  }
8567 
8568  Handle<Map> fixed_typed_array_map(
8569  isolate()->heap()->MapForFixedTypedArray(array_type));
8570  HValue* elements =
8571  Add<HAllocate>(total_size, HType::Tagged(),
8572  NOT_TENURED,
8573  fixed_typed_array_map->instance_type());
8574  AddStoreMapConstant(elements, fixed_typed_array_map);
8575 
8576  Add<HStoreNamedField>(elements,
8577  HObjectAccess::ForFixedArrayLength(),
8578  length);
8579  HValue* filler = Add<HConstant>(static_cast<int32_t>(0));
8580 
8581  {
8582  LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
8583 
8584  HValue* key = builder.BeginBody(
8585  Add<HConstant>(static_cast<int32_t>(0)),
8586  length, Token::LT);
8587  Add<HStoreKeyed>(elements, key, filler, fixed_elements_kind);
8588 
8589  builder.EndBody();
8590  }
8591  Add<HStoreNamedField>(
8592  elements, HObjectAccess::ForFixedArrayLength(), length);
8593  return elements;
8594 }
8595 
8596 
8597 void HOptimizedGraphBuilder::GenerateTypedArrayInitialize(
8598  CallRuntime* expr) {
8599  ZoneList<Expression*>* arguments = expr->arguments();
8600 
8601  NoObservableSideEffectsScope scope(this);
8602  static const int kObjectArg = 0;
8603  static const int kArrayIdArg = 1;
8604  static const int kBufferArg = 2;
8605  static const int kByteOffsetArg = 3;
8606  static const int kByteLengthArg = 4;
8607  static const int kArgsLength = 5;
8608  ASSERT(arguments->length() == kArgsLength);
8609 
8610 
8611  CHECK_ALIVE(VisitForValue(arguments->at(kObjectArg)));
8612  HValue* obj = Pop();
8613 
8614  ASSERT(arguments->at(kArrayIdArg)->node_type() == AstNode::kLiteral);
8615  Handle<Object> value =
8616  static_cast<Literal*>(arguments->at(kArrayIdArg))->value();
8617  ASSERT(value->IsSmi());
8618  int array_id = Smi::cast(*value)->value();
8619 
8620  HValue* buffer;
8621  if (!arguments->at(kBufferArg)->IsNullLiteral()) {
8622  CHECK_ALIVE(VisitForValue(arguments->at(kBufferArg)));
8623  buffer = Pop();
8624  } else {
8625  buffer = NULL;
8626  }
8627 
8628  HValue* byte_offset;
8629  bool is_zero_byte_offset;
8630 
8631  if (arguments->at(kByteOffsetArg)->node_type() == AstNode::kLiteral
8632  && Smi::FromInt(0) ==
8633  *static_cast<Literal*>(arguments->at(kByteOffsetArg))->value()) {
8634  byte_offset = Add<HConstant>(static_cast<int32_t>(0));
8635  is_zero_byte_offset = true;
8636  } else {
8637  CHECK_ALIVE(VisitForValue(arguments->at(kByteOffsetArg)));
8638  byte_offset = Pop();
8639  is_zero_byte_offset = false;
8640  ASSERT(buffer != NULL);
8641  }
8642 
8643  CHECK_ALIVE(VisitForValue(arguments->at(kByteLengthArg)));
8644  HValue* byte_length = Pop();
8645 
8646  IfBuilder byte_offset_smi(this);
8647 
8648  if (!is_zero_byte_offset) {
8649  byte_offset_smi.If<HIsSmiAndBranch>(byte_offset);
8650  byte_offset_smi.Then();
8651  }
8652 
8653  ExternalArrayType array_type =
8654  kExternalInt8Array; // Bogus initialization.
8655  size_t element_size = 1; // Bogus initialization.
8656  ElementsKind external_elements_kind = // Bogus initialization.
8658  ElementsKind fixed_elements_kind = // Bogus initialization.
8659  INT8_ELEMENTS;
8661  &array_type,
8662  &external_elements_kind,
8663  &fixed_elements_kind,
8664  &element_size);
8665 
8666 
8667  { // byte_offset is Smi.
8668  BuildArrayBufferViewInitialization<JSTypedArray>(
8669  obj, buffer, byte_offset, byte_length);
8670 
8671 
8672  HInstruction* length = AddUncasted<HDiv>(byte_length,
8673  Add<HConstant>(static_cast<int32_t>(element_size)));
8674 
8675  Add<HStoreNamedField>(obj,
8676  HObjectAccess::ForJSTypedArrayLength(),
8677  length);
8678 
8679  HValue* elements;
8680  if (buffer != NULL) {
8681  elements = BuildAllocateExternalElements(
8682  array_type, is_zero_byte_offset, buffer, byte_offset, length);
8683  Handle<Map> obj_map = TypedArrayMap(
8684  isolate(), array_type, external_elements_kind);
8685  AddStoreMapConstant(obj, obj_map);
8686  } else {
8687  ASSERT(is_zero_byte_offset);
8688  elements = BuildAllocateFixedTypedArray(
8689  array_type, element_size, fixed_elements_kind,
8690  byte_length, length);
8691  }
8692  Add<HStoreNamedField>(
8693  obj, HObjectAccess::ForElementsPointer(), elements);
8694  }
8695 
8696  if (!is_zero_byte_offset) {
8697  byte_offset_smi.Else();
8698  { // byte_offset is not Smi.
8699  Push(obj);
8700  CHECK_ALIVE(VisitForValue(arguments->at(kArrayIdArg)));
8701  Push(buffer);
8702  Push(byte_offset);
8703  Push(byte_length);
8704  PushArgumentsFromEnvironment(kArgsLength);
8705  Add<HCallRuntime>(expr->name(), expr->function(), kArgsLength);
8706  }
8707  }
8708  byte_offset_smi.End();
8709 }
8710 
8711 
8712 void HOptimizedGraphBuilder::GenerateMaxSmi(CallRuntime* expr) {
8713  ASSERT(expr->arguments()->length() == 0);
8714  HConstant* max_smi = New<HConstant>(static_cast<int32_t>(Smi::kMaxValue));
8715  return ast_context()->ReturnInstruction(max_smi, expr->id());
8716 }
8717 
8718 
8719 void HOptimizedGraphBuilder::GenerateTypedArrayMaxSizeInHeap(
8720  CallRuntime* expr) {
8721  ASSERT(expr->arguments()->length() == 0);
8722  HConstant* result = New<HConstant>(static_cast<int32_t>(
8723  FLAG_typed_array_max_size_in_heap));
8724  return ast_context()->ReturnInstruction(result, expr->id());
8725 }
8726 
8727 
8728 void HOptimizedGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
8729  ASSERT(!HasStackOverflow());
8730  ASSERT(current_block() != NULL);
8731  ASSERT(current_block()->HasPredecessor());
8732  if (expr->is_jsruntime()) {
8733  return Bailout(kCallToAJavaScriptRuntimeFunction);
8734  }
8735 
8736  const Runtime::Function* function = expr->function();
8737  ASSERT(function != NULL);
8738 
8739  if (function->intrinsic_type == Runtime::INLINE ||
8740  function->intrinsic_type == Runtime::INLINE_OPTIMIZED) {
8741  ASSERT(expr->name()->length() > 0);
8742  ASSERT(expr->name()->Get(0) == '_');
8743  // Call to an inline function.
8744  int lookup_index = static_cast<int>(function->function_id) -
8745  static_cast<int>(Runtime::kFirstInlineFunction);
8746  ASSERT(lookup_index >= 0);
8747  ASSERT(static_cast<size_t>(lookup_index) <
8749  InlineFunctionGenerator generator = kInlineFunctionGenerators[lookup_index];
8750 
8751  // Call the inline code generator using the pointer-to-member.
8752  (this->*generator)(expr);
8753  } else {
8754  ASSERT(function->intrinsic_type == Runtime::RUNTIME);
8755  Handle<String> name = expr->name();
8756  int argument_count = expr->arguments()->length();
8757  CHECK_ALIVE(VisitExpressions(expr->arguments()));
8758  PushArgumentsFromEnvironment(argument_count);
8759  HCallRuntime* call = New<HCallRuntime>(name, function,
8760  argument_count);
8761  return ast_context()->ReturnInstruction(call, expr->id());
8762  }
8763 }
8764 
8765 
8766 void HOptimizedGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
8767  ASSERT(!HasStackOverflow());
8768  ASSERT(current_block() != NULL);
8769  ASSERT(current_block()->HasPredecessor());
8770  switch (expr->op()) {
8771  case Token::DELETE: return VisitDelete(expr);
8772  case Token::VOID: return VisitVoid(expr);
8773  case Token::TYPEOF: return VisitTypeof(expr);
8774  case Token::NOT: return VisitNot(expr);
8775  default: UNREACHABLE();
8776  }
8777 }
8778 
8779 
8780 void HOptimizedGraphBuilder::VisitDelete(UnaryOperation* expr) {
8781  Property* prop = expr->expression()->AsProperty();
8782  VariableProxy* proxy = expr->expression()->AsVariableProxy();
8783  if (prop != NULL) {
8784  CHECK_ALIVE(VisitForValue(prop->obj()));
8785  CHECK_ALIVE(VisitForValue(prop->key()));
8786  HValue* key = Pop();
8787  HValue* obj = Pop();
8788  HValue* function = AddLoadJSBuiltin(Builtins::DELETE);
8789  Add<HPushArgument>(obj);
8790  Add<HPushArgument>(key);
8791  Add<HPushArgument>(Add<HConstant>(function_strict_mode()));
8792  // TODO(olivf) InvokeFunction produces a check for the parameter count,
8793  // even though we are certain to pass the correct number of arguments here.
8794  HInstruction* instr = New<HInvokeFunction>(function, 3);
8795  return ast_context()->ReturnInstruction(instr, expr->id());
8796  } else if (proxy != NULL) {
8797  Variable* var = proxy->var();
8798  if (var->IsUnallocated()) {
8799  Bailout(kDeleteWithGlobalVariable);
8800  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
8801  // Result of deleting non-global variables is false. 'this' is not
8802  // really a variable, though we implement it as one. The
8803  // subexpression does not have side effects.
8804  HValue* value = var->is_this()
8805  ? graph()->GetConstantTrue()
8806  : graph()->GetConstantFalse();
8807  return ast_context()->ReturnValue(value);
8808  } else {
8809  Bailout(kDeleteWithNonGlobalVariable);
8810  }
8811  } else {
8812  // Result of deleting non-property, non-variable reference is true.
8813  // Evaluate the subexpression for side effects.
8814  CHECK_ALIVE(VisitForEffect(expr->expression()));
8815  return ast_context()->ReturnValue(graph()->GetConstantTrue());
8816  }
8817 }
8818 
8819 
8820 void HOptimizedGraphBuilder::VisitVoid(UnaryOperation* expr) {
8821  CHECK_ALIVE(VisitForEffect(expr->expression()));
8822  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
8823 }
8824 
8825 
8826 void HOptimizedGraphBuilder::VisitTypeof(UnaryOperation* expr) {
8827  CHECK_ALIVE(VisitForTypeOf(expr->expression()));
8828  HValue* value = Pop();
8829  HInstruction* instr = New<HTypeof>(value);
8830  return ast_context()->ReturnInstruction(instr, expr->id());
8831 }
8832 
8833 
8834 void HOptimizedGraphBuilder::VisitNot(UnaryOperation* expr) {
8835  if (ast_context()->IsTest()) {
8836  TestContext* context = TestContext::cast(ast_context());
8837  VisitForControl(expr->expression(),
8838  context->if_false(),
8839  context->if_true());
8840  return;
8841  }
8842 
8843  if (ast_context()->IsEffect()) {
8844  VisitForEffect(expr->expression());
8845  return;
8846  }
8847 
8848  ASSERT(ast_context()->IsValue());
8849  HBasicBlock* materialize_false = graph()->CreateBasicBlock();
8850  HBasicBlock* materialize_true = graph()->CreateBasicBlock();
8851  CHECK_BAILOUT(VisitForControl(expr->expression(),
8852  materialize_false,
8853  materialize_true));
8854 
8855  if (materialize_false->HasPredecessor()) {
8856  materialize_false->SetJoinId(expr->MaterializeFalseId());
8857  set_current_block(materialize_false);
8858  Push(graph()->GetConstantFalse());
8859  } else {
8860  materialize_false = NULL;
8861  }
8862 
8863  if (materialize_true->HasPredecessor()) {
8864  materialize_true->SetJoinId(expr->MaterializeTrueId());
8865  set_current_block(materialize_true);
8866  Push(graph()->GetConstantTrue());
8867  } else {
8868  materialize_true = NULL;
8869  }
8870 
8871  HBasicBlock* join =
8872  CreateJoin(materialize_false, materialize_true, expr->id());
8873  set_current_block(join);
8874  if (join != NULL) return ast_context()->ReturnValue(Pop());
8875 }
8876 
8877 
8878 HInstruction* HOptimizedGraphBuilder::BuildIncrement(
8879  bool returns_original_input,
8880  CountOperation* expr) {
8881  // The input to the count operation is on top of the expression stack.
8882  Representation rep = Representation::FromType(expr->type());
8883  if (rep.IsNone() || rep.IsTagged()) {
8884  rep = Representation::Smi();
8885  }
8886 
8887  if (returns_original_input) {
8888  // We need an explicit HValue representing ToNumber(input). The
8889  // actual HChange instruction we need is (sometimes) added in a later
8890  // phase, so it is not available now to be used as an input to HAdd and
8891  // as the return value.
8892  HInstruction* number_input = AddUncasted<HForceRepresentation>(Pop(), rep);
8893  if (!rep.IsDouble()) {
8894  number_input->SetFlag(HInstruction::kFlexibleRepresentation);
8895  number_input->SetFlag(HInstruction::kCannotBeTagged);
8896  }
8897  Push(number_input);
8898  }
8899 
8900  // The addition has no side effects, so we do not need
8901  // to simulate the expression stack after this instruction.
8902  // Any later failures deopt to the load of the input or earlier.
8903  HConstant* delta = (expr->op() == Token::INC)
8904  ? graph()->GetConstant1()
8905  : graph()->GetConstantMinus1();
8906  HInstruction* instr = AddUncasted<HAdd>(Top(), delta);
8907  if (instr->IsAdd()) {
8908  HAdd* add = HAdd::cast(instr);
8909  add->set_observed_input_representation(1, rep);
8910  add->set_observed_input_representation(2, Representation::Smi());
8911  }
8912  instr->SetFlag(HInstruction::kCannotBeTagged);
8913  instr->ClearAllSideEffects();
8914  return instr;
8915 }
8916 
8917 
8918 void HOptimizedGraphBuilder::BuildStoreForEffect(Expression* expr,
8919  Property* prop,
8920  BailoutId ast_id,
8921  BailoutId return_id,
8922  HValue* object,
8923  HValue* key,
8924  HValue* value) {
8925  EffectContext for_effect(this);
8926  Push(object);
8927  if (key != NULL) Push(key);
8928  Push(value);
8929  BuildStore(expr, prop, ast_id, return_id);
8930 }
8931 
8932 
8933 void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
8934  ASSERT(!HasStackOverflow());
8935  ASSERT(current_block() != NULL);
8936  ASSERT(current_block()->HasPredecessor());
8937  if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
8938  Expression* target = expr->expression();
8939  VariableProxy* proxy = target->AsVariableProxy();
8940  Property* prop = target->AsProperty();
8941  if (proxy == NULL && prop == NULL) {
8942  return Bailout(kInvalidLhsInCountOperation);
8943  }
8944 
8945  // Match the full code generator stack by simulating an extra stack
8946  // element for postfix operations in a non-effect context. The return
8947  // value is ToNumber(input).
8948  bool returns_original_input =
8949  expr->is_postfix() && !ast_context()->IsEffect();
8950  HValue* input = NULL; // ToNumber(original_input).
8951  HValue* after = NULL; // The result after incrementing or decrementing.
8952 
8953  if (proxy != NULL) {
8954  Variable* var = proxy->var();
8955  if (var->mode() == CONST_LEGACY) {
8956  return Bailout(kUnsupportedCountOperationWithConst);
8957  }
8958  // Argument of the count operation is a variable, not a property.
8959  ASSERT(prop == NULL);
8960  CHECK_ALIVE(VisitForValue(target));
8961 
8962  after = BuildIncrement(returns_original_input, expr);
8963  input = returns_original_input ? Top() : Pop();
8964  Push(after);
8965 
8966  switch (var->location()) {
8967  case Variable::UNALLOCATED:
8968  HandleGlobalVariableAssignment(var,
8969  after,
8970  expr->AssignmentId());
8971  break;
8972 
8973  case Variable::PARAMETER:
8974  case Variable::LOCAL:
8975  BindIfLive(var, after);
8976  break;
8977 
8978  case Variable::CONTEXT: {
8979  // Bail out if we try to mutate a parameter value in a function
8980  // using the arguments object. We do not (yet) correctly handle the
8981  // arguments property of the function.
8982  if (current_info()->scope()->arguments() != NULL) {
8983  // Parameters will rewrite to context slots. We have no direct
8984  // way to detect that the variable is a parameter so we use a
8985  // linear search of the parameter list.
8986  int count = current_info()->scope()->num_parameters();
8987  for (int i = 0; i < count; ++i) {
8988  if (var == current_info()->scope()->parameter(i)) {
8989  return Bailout(kAssignmentToParameterInArgumentsObject);
8990  }
8991  }
8992  }
8993 
8994  HValue* context = BuildContextChainWalk(var);
8995  HStoreContextSlot::Mode mode = IsLexicalVariableMode(var->mode())
8996  ? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck;
8997  HStoreContextSlot* instr = Add<HStoreContextSlot>(context, var->index(),
8998  mode, after);
8999  if (instr->HasObservableSideEffects()) {
9000  Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
9001  }
9002  break;
9003  }
9004 
9005  case Variable::LOOKUP:
9006  return Bailout(kLookupVariableInCountOperation);
9007  }
9008 
9009  Drop(returns_original_input ? 2 : 1);
9010  return ast_context()->ReturnValue(expr->is_postfix() ? input : after);
9011  }
9012 
9013  // Argument of the count operation is a property.
9014  ASSERT(prop != NULL);
9015  if (returns_original_input) Push(graph()->GetConstantUndefined());
9016 
9017  CHECK_ALIVE(VisitForValue(prop->obj()));
9018  HValue* object = Top();
9019 
9020  HValue* key = NULL;
9021  if ((!prop->IsFunctionPrototype() && !prop->key()->IsPropertyName()) ||
9022  prop->IsStringAccess()) {
9023  CHECK_ALIVE(VisitForValue(prop->key()));
9024  key = Top();
9025  }
9026 
9027  CHECK_ALIVE(PushLoad(prop, object, key));
9028 
9029  after = BuildIncrement(returns_original_input, expr);
9030 
9031  if (returns_original_input) {
9032  input = Pop();
9033  // Drop object and key to push it again in the effect context below.
9034  Drop(key == NULL ? 1 : 2);
9035  environment()->SetExpressionStackAt(0, input);
9036  CHECK_ALIVE(BuildStoreForEffect(
9037  expr, prop, expr->id(), expr->AssignmentId(), object, key, after));
9038  return ast_context()->ReturnValue(Pop());
9039  }
9040 
9041  environment()->SetExpressionStackAt(0, after);
9042  return BuildStore(expr, prop, expr->id(), expr->AssignmentId());
9043 }
9044 
9045 
9046 HInstruction* HOptimizedGraphBuilder::BuildStringCharCodeAt(
9047  HValue* string,
9048  HValue* index) {
9049  if (string->IsConstant() && index->IsConstant()) {
9050  HConstant* c_string = HConstant::cast(string);
9051  HConstant* c_index = HConstant::cast(index);
9052  if (c_string->HasStringValue() && c_index->HasNumberValue()) {
9053  int32_t i = c_index->NumberValueAsInteger32();
9054  Handle<String> s = c_string->StringValue();
9055  if (i < 0 || i >= s->length()) {
9056  return New<HConstant>(OS::nan_value());
9057  }
9058  return New<HConstant>(s->Get(i));
9059  }
9060  }
9061  string = BuildCheckString(string);
9062  index = Add<HBoundsCheck>(index, AddLoadStringLength(string));
9063  return New<HStringCharCodeAt>(string, index);
9064 }
9065 
9066 
9067 // Checks if the given shift amounts have following forms:
9068 // (N1) and (N2) with N1 + N2 = 32; (sa) and (32 - sa).
9069 static bool ShiftAmountsAllowReplaceByRotate(HValue* sa,
9070  HValue* const32_minus_sa) {
9071  if (sa->IsConstant() && const32_minus_sa->IsConstant()) {
9072  const HConstant* c1 = HConstant::cast(sa);
9073  const HConstant* c2 = HConstant::cast(const32_minus_sa);
9074  return c1->HasInteger32Value() && c2->HasInteger32Value() &&
9075  (c1->Integer32Value() + c2->Integer32Value() == 32);
9076  }
9077  if (!const32_minus_sa->IsSub()) return false;
9078  HSub* sub = HSub::cast(const32_minus_sa);
9079  return sub->left()->EqualsInteger32Constant(32) && sub->right() == sa;
9080 }
9081 
9082 
9083 // Checks if the left and the right are shift instructions with the oposite
9084 // directions that can be replaced by one rotate right instruction or not.
9085 // Returns the operand and the shift amount for the rotate instruction in the
9086 // former case.
9088  HValue* right,
9089  HValue** operand,
9090  HValue** shift_amount) {
9091  HShl* shl;
9092  HShr* shr;
9093  if (left->IsShl() && right->IsShr()) {
9094  shl = HShl::cast(left);
9095  shr = HShr::cast(right);
9096  } else if (left->IsShr() && right->IsShl()) {
9097  shl = HShl::cast(right);
9098  shr = HShr::cast(left);
9099  } else {
9100  return false;
9101  }
9102  if (shl->left() != shr->left()) return false;
9103 
9104  if (!ShiftAmountsAllowReplaceByRotate(shl->right(), shr->right()) &&
9105  !ShiftAmountsAllowReplaceByRotate(shr->right(), shl->right())) {
9106  return false;
9107  }
9108  *operand= shr->left();
9109  *shift_amount = shr->right();
9110  return true;
9111 }
9112 
9113 
9114 bool CanBeZero(HValue* right) {
9115  if (right->IsConstant()) {
9116  HConstant* right_const = HConstant::cast(right);
9117  if (right_const->HasInteger32Value() &&
9118  (right_const->Integer32Value() & 0x1f) != 0) {
9119  return false;
9120  }
9121  }
9122  return true;
9123 }
9124 
9125 
9126 HValue* HGraphBuilder::EnforceNumberType(HValue* number,
9127  Type* expected) {
9128  if (expected->Is(Type::SignedSmall())) {
9129  return AddUncasted<HForceRepresentation>(number, Representation::Smi());
9130  }
9131  if (expected->Is(Type::Signed32())) {
9132  return AddUncasted<HForceRepresentation>(number,
9134  }
9135  return number;
9136 }
9137 
9138 
9139 HValue* HGraphBuilder::TruncateToNumber(HValue* value, Type** expected) {
9140  if (value->IsConstant()) {
9141  HConstant* constant = HConstant::cast(value);
9142  Maybe<HConstant*> number = constant->CopyToTruncatedNumber(zone());
9143  if (number.has_value) {
9144  *expected = Type::Number(zone());
9145  return AddInstruction(number.value);
9146  }
9147  }
9148 
9149  // We put temporary values on the stack, which don't correspond to anything
9150  // in baseline code. Since nothing is observable we avoid recording those
9151  // pushes with a NoObservableSideEffectsScope.
9152  NoObservableSideEffectsScope no_effects(this);
9153 
9154  Type* expected_type = *expected;
9155 
9156  // Separate the number type from the rest.
9157  Type* expected_obj =
9158  Type::Intersect(expected_type, Type::NonNumber(zone()), zone());
9159  Type* expected_number =
9160  Type::Intersect(expected_type, Type::Number(zone()), zone());
9161 
9162  // We expect to get a number.
9163  // (We need to check first, since Type::None->Is(Type::Any()) == true.
9164  if (expected_obj->Is(Type::None())) {
9165  ASSERT(!expected_number->Is(Type::None(zone())));
9166  return value;
9167  }
9168 
9169  if (expected_obj->Is(Type::Undefined(zone()))) {
9170  // This is already done by HChange.
9171  *expected = Type::Union(expected_number, Type::Float(zone()), zone());
9172  return value;
9173  }
9174 
9175  return value;
9176 }
9177 
9178 
9179 HValue* HOptimizedGraphBuilder::BuildBinaryOperation(
9180  BinaryOperation* expr,
9181  HValue* left,
9182  HValue* right,
9183  PushBeforeSimulateBehavior push_sim_result) {
9184  Type* left_type = expr->left()->bounds().lower;
9185  Type* right_type = expr->right()->bounds().lower;
9186  Type* result_type = expr->bounds().lower;
9187  Maybe<int> fixed_right_arg = expr->fixed_right_arg();
9188  Handle<AllocationSite> allocation_site = expr->allocation_site();
9189 
9190  PretenureFlag pretenure_flag = !FLAG_allocation_site_pretenuring ?
9192 
9193  HAllocationMode allocation_mode =
9194  FLAG_allocation_site_pretenuring
9195  ? (allocation_site.is_null()
9196  ? HAllocationMode(NOT_TENURED)
9197  : HAllocationMode(allocation_site))
9198  : HAllocationMode(pretenure_flag);
9199 
9200  HValue* result = HGraphBuilder::BuildBinaryOperation(
9201  expr->op(), left, right, left_type, right_type, result_type,
9202  fixed_right_arg, allocation_mode);
9203  // Add a simulate after instructions with observable side effects, and
9204  // after phis, which are the result of BuildBinaryOperation when we
9205  // inlined some complex subgraph.
9206  if (result->HasObservableSideEffects() || result->IsPhi()) {
9207  if (push_sim_result == PUSH_BEFORE_SIMULATE) {
9208  Push(result);
9209  Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
9210  Drop(1);
9211  } else {
9212  Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
9213  }
9214  }
9215  return result;
9216 }
9217 
9218 
9220  Token::Value op,
9221  HValue* left,
9222  HValue* right,
9223  Type* left_type,
9224  Type* right_type,
9225  Type* result_type,
9226  Maybe<int> fixed_right_arg,
9227  HAllocationMode allocation_mode) {
9228 
9229  Representation left_rep = Representation::FromType(left_type);
9230  Representation right_rep = Representation::FromType(right_type);
9231 
9232  bool maybe_string_add = op == Token::ADD &&
9233  (left_type->Maybe(Type::String()) ||
9234  right_type->Maybe(Type::String()));
9235 
9236  if (left_type->Is(Type::None())) {
9237  Add<HDeoptimize>("Insufficient type feedback for LHS of binary operation",
9239  // TODO(rossberg): we should be able to get rid of non-continuous
9240  // defaults.
9241  left_type = Type::Any(zone());
9242  } else {
9243  if (!maybe_string_add) left = TruncateToNumber(left, &left_type);
9244  left_rep = Representation::FromType(left_type);
9245  }
9246 
9247  if (right_type->Is(Type::None())) {
9248  Add<HDeoptimize>("Insufficient type feedback for RHS of binary operation",
9250  right_type = Type::Any(zone());
9251  } else {
9252  if (!maybe_string_add) right = TruncateToNumber(right, &right_type);
9253  right_rep = Representation::FromType(right_type);
9254  }
9255 
9256  // Special case for string addition here.
9257  if (op == Token::ADD &&
9258  (left_type->Is(Type::String()) || right_type->Is(Type::String()))) {
9259  // Validate type feedback for left argument.
9260  if (left_type->Is(Type::String())) {
9261  left = BuildCheckString(left);
9262  }
9263 
9264  // Validate type feedback for right argument.
9265  if (right_type->Is(Type::String())) {
9266  right = BuildCheckString(right);
9267  }
9268 
9269  // Convert left argument as necessary.
9270  if (left_type->Is(Type::Number())) {
9271  ASSERT(right_type->Is(Type::String()));
9272  left = BuildNumberToString(left, left_type);
9273  } else if (!left_type->Is(Type::String())) {
9274  ASSERT(right_type->Is(Type::String()));
9275  HValue* function = AddLoadJSBuiltin(Builtins::STRING_ADD_RIGHT);
9276  Add<HPushArgument>(left);
9277  Add<HPushArgument>(right);
9278  return AddUncasted<HInvokeFunction>(function, 2);
9279  }
9280 
9281  // Convert right argument as necessary.
9282  if (right_type->Is(Type::Number())) {
9283  ASSERT(left_type->Is(Type::String()));
9284  right = BuildNumberToString(right, right_type);
9285  } else if (!right_type->Is(Type::String())) {
9286  ASSERT(left_type->Is(Type::String()));
9287  HValue* function = AddLoadJSBuiltin(Builtins::STRING_ADD_LEFT);
9288  Add<HPushArgument>(left);
9289  Add<HPushArgument>(right);
9290  return AddUncasted<HInvokeFunction>(function, 2);
9291  }
9292 
9293  // Fast path for empty constant strings.
9294  if (left->IsConstant() &&
9295  HConstant::cast(left)->HasStringValue() &&
9296  HConstant::cast(left)->StringValue()->length() == 0) {
9297  return right;
9298  }
9299  if (right->IsConstant() &&
9300  HConstant::cast(right)->HasStringValue() &&
9301  HConstant::cast(right)->StringValue()->length() == 0) {
9302  return left;
9303  }
9304 
9305  // Register the dependent code with the allocation site.
9306  if (!allocation_mode.feedback_site().is_null()) {
9307  ASSERT(!graph()->info()->IsStub());
9308  Handle<AllocationSite> site(allocation_mode.feedback_site());
9311  }
9312 
9313  // Inline the string addition into the stub when creating allocation
9314  // mementos to gather allocation site feedback, or if we can statically
9315  // infer that we're going to create a cons string.
9316  if ((graph()->info()->IsStub() &&
9317  allocation_mode.CreateAllocationMementos()) ||
9318  (left->IsConstant() &&
9319  HConstant::cast(left)->HasStringValue() &&
9320  HConstant::cast(left)->StringValue()->length() + 1 >=
9322  (right->IsConstant() &&
9323  HConstant::cast(right)->HasStringValue() &&
9324  HConstant::cast(right)->StringValue()->length() + 1 >=
9326  return BuildStringAdd(left, right, allocation_mode);
9327  }
9328 
9329  // Fallback to using the string add stub.
9330  return AddUncasted<HStringAdd>(
9331  left, right, allocation_mode.GetPretenureMode(),
9332  STRING_ADD_CHECK_NONE, allocation_mode.feedback_site());
9333  }
9334 
9335  if (graph()->info()->IsStub()) {
9336  left = EnforceNumberType(left, left_type);
9337  right = EnforceNumberType(right, right_type);
9338  }
9339 
9340  Representation result_rep = Representation::FromType(result_type);
9341 
9342  bool is_non_primitive = (left_rep.IsTagged() && !left_rep.IsSmi()) ||
9343  (right_rep.IsTagged() && !right_rep.IsSmi());
9344 
9345  HInstruction* instr = NULL;
9346  // Only the stub is allowed to call into the runtime, since otherwise we would
9347  // inline several instructions (including the two pushes) for every tagged
9348  // operation in optimized code, which is more expensive, than a stub call.
9349  if (graph()->info()->IsStub() && is_non_primitive) {
9350  HValue* function = AddLoadJSBuiltin(BinaryOpIC::TokenToJSBuiltin(op));
9351  Add<HPushArgument>(left);
9352  Add<HPushArgument>(right);
9353  instr = AddUncasted<HInvokeFunction>(function, 2);
9354  } else {
9355  switch (op) {
9356  case Token::ADD:
9357  instr = AddUncasted<HAdd>(left, right);
9358  break;
9359  case Token::SUB:
9360  instr = AddUncasted<HSub>(left, right);
9361  break;
9362  case Token::MUL:
9363  instr = AddUncasted<HMul>(left, right);
9364  break;
9365  case Token::MOD: {
9366  if (fixed_right_arg.has_value &&
9367  !right->EqualsInteger32Constant(fixed_right_arg.value)) {
9368  HConstant* fixed_right = Add<HConstant>(
9369  static_cast<int>(fixed_right_arg.value));
9370  IfBuilder if_same(this);
9371  if_same.If<HCompareNumericAndBranch>(right, fixed_right, Token::EQ);
9372  if_same.Then();
9373  if_same.ElseDeopt("Unexpected RHS of binary operation");
9374  right = fixed_right;
9375  }
9376  instr = AddUncasted<HMod>(left, right);
9377  break;
9378  }
9379  case Token::DIV:
9380  instr = AddUncasted<HDiv>(left, right);
9381  break;
9382  case Token::BIT_XOR:
9383  case Token::BIT_AND:
9384  instr = AddUncasted<HBitwise>(op, left, right);
9385  break;
9386  case Token::BIT_OR: {
9387  HValue* operand, *shift_amount;
9388  if (left_type->Is(Type::Signed32()) &&
9389  right_type->Is(Type::Signed32()) &&
9390  MatchRotateRight(left, right, &operand, &shift_amount)) {
9391  instr = AddUncasted<HRor>(operand, shift_amount);
9392  } else {
9393  instr = AddUncasted<HBitwise>(op, left, right);
9394  }
9395  break;
9396  }
9397  case Token::SAR:
9398  instr = AddUncasted<HSar>(left, right);
9399  break;
9400  case Token::SHR:
9401  instr = AddUncasted<HShr>(left, right);
9402  if (FLAG_opt_safe_uint32_operations && instr->IsShr() &&
9403  CanBeZero(right)) {
9404  graph()->RecordUint32Instruction(instr);
9405  }
9406  break;
9407  case Token::SHL:
9408  instr = AddUncasted<HShl>(left, right);
9409  break;
9410  default:
9411  UNREACHABLE();
9412  }
9413  }
9414 
9415  if (instr->IsBinaryOperation()) {
9416  HBinaryOperation* binop = HBinaryOperation::cast(instr);
9417  binop->set_observed_input_representation(1, left_rep);
9418  binop->set_observed_input_representation(2, right_rep);
9419  binop->initialize_output_representation(result_rep);
9420  if (graph()->info()->IsStub()) {
9421  // Stub should not call into stub.
9423  // And should truncate on HForceRepresentation already.
9424  if (left->IsForceRepresentation()) {
9425  left->CopyFlag(HValue::kTruncatingToSmi, instr);
9426  left->CopyFlag(HValue::kTruncatingToInt32, instr);
9427  }
9428  if (right->IsForceRepresentation()) {
9429  right->CopyFlag(HValue::kTruncatingToSmi, instr);
9430  right->CopyFlag(HValue::kTruncatingToInt32, instr);
9431  }
9432  }
9433  }
9434  return instr;
9435 }
9436 
9437 
9438 // Check for the form (%_ClassOf(foo) === 'BarClass').
9439 static bool IsClassOfTest(CompareOperation* expr) {
9440  if (expr->op() != Token::EQ_STRICT) return false;
9441  CallRuntime* call = expr->left()->AsCallRuntime();
9442  if (call == NULL) return false;
9443  Literal* literal = expr->right()->AsLiteral();
9444  if (literal == NULL) return false;
9445  if (!literal->value()->IsString()) return false;
9446  if (!call->name()->IsOneByteEqualTo(STATIC_ASCII_VECTOR("_ClassOf"))) {
9447  return false;
9448  }
9449  ASSERT(call->arguments()->length() == 1);
9450  return true;
9451 }
9452 
9453 
9454 void HOptimizedGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) {
9455  ASSERT(!HasStackOverflow());
9456  ASSERT(current_block() != NULL);
9457  ASSERT(current_block()->HasPredecessor());
9458  switch (expr->op()) {
9459  case Token::COMMA:
9460  return VisitComma(expr);
9461  case Token::OR:
9462  case Token::AND:
9463  return VisitLogicalExpression(expr);
9464  default:
9465  return VisitArithmeticExpression(expr);
9466  }
9467 }
9468 
9469 
9470 void HOptimizedGraphBuilder::VisitComma(BinaryOperation* expr) {
9471  CHECK_ALIVE(VisitForEffect(expr->left()));
9472  // Visit the right subexpression in the same AST context as the entire
9473  // expression.
9474  Visit(expr->right());
9475 }
9476 
9477 
9479  bool is_logical_and = expr->op() == Token::AND;
9480  if (ast_context()->IsTest()) {
9481  TestContext* context = TestContext::cast(ast_context());
9482  // Translate left subexpression.
9483  HBasicBlock* eval_right = graph()->CreateBasicBlock();
9484  if (is_logical_and) {
9485  CHECK_BAILOUT(VisitForControl(expr->left(),
9486  eval_right,
9487  context->if_false()));
9488  } else {
9489  CHECK_BAILOUT(VisitForControl(expr->left(),
9490  context->if_true(),
9491  eval_right));
9492  }
9493 
9494  // Translate right subexpression by visiting it in the same AST
9495  // context as the entire expression.
9496  if (eval_right->HasPredecessor()) {
9497  eval_right->SetJoinId(expr->RightId());
9498  set_current_block(eval_right);
9499  Visit(expr->right());
9500  }
9501 
9502  } else if (ast_context()->IsValue()) {
9503  CHECK_ALIVE(VisitForValue(expr->left()));
9504  ASSERT(current_block() != NULL);
9505  HValue* left_value = Top();
9506 
9507  // Short-circuit left values that always evaluate to the same boolean value.
9508  if (expr->left()->ToBooleanIsTrue() || expr->left()->ToBooleanIsFalse()) {
9509  // l (evals true) && r -> r
9510  // l (evals true) || r -> l
9511  // l (evals false) && r -> l
9512  // l (evals false) || r -> r
9513  if (is_logical_and == expr->left()->ToBooleanIsTrue()) {
9514  Drop(1);
9515  CHECK_ALIVE(VisitForValue(expr->right()));
9516  }
9517  return ast_context()->ReturnValue(Pop());
9518  }
9519 
9520  // We need an extra block to maintain edge-split form.
9521  HBasicBlock* empty_block = graph()->CreateBasicBlock();
9522  HBasicBlock* eval_right = graph()->CreateBasicBlock();
9523  ToBooleanStub::Types expected(expr->left()->to_boolean_types());
9524  HBranch* test = is_logical_and
9525  ? New<HBranch>(left_value, expected, eval_right, empty_block)
9526  : New<HBranch>(left_value, expected, empty_block, eval_right);
9527  FinishCurrentBlock(test);
9528 
9529  set_current_block(eval_right);
9530  Drop(1); // Value of the left subexpression.
9531  CHECK_BAILOUT(VisitForValue(expr->right()));
9532 
9533  HBasicBlock* join_block =
9534  CreateJoin(empty_block, current_block(), expr->id());
9535  set_current_block(join_block);
9536  return ast_context()->ReturnValue(Pop());
9537 
9538  } else {
9539  ASSERT(ast_context()->IsEffect());
9540  // In an effect context, we don't need the value of the left subexpression,
9541  // only its control flow and side effects. We need an extra block to
9542  // maintain edge-split form.
9543  HBasicBlock* empty_block = graph()->CreateBasicBlock();
9544  HBasicBlock* right_block = graph()->CreateBasicBlock();
9545  if (is_logical_and) {
9546  CHECK_BAILOUT(VisitForControl(expr->left(), right_block, empty_block));
9547  } else {
9548  CHECK_BAILOUT(VisitForControl(expr->left(), empty_block, right_block));
9549  }
9550 
9551  // TODO(kmillikin): Find a way to fix this. It's ugly that there are
9552  // actually two empty blocks (one here and one inserted by
9553  // TestContext::BuildBranch, and that they both have an HSimulate though the
9554  // second one is not a merge node, and that we really have no good AST ID to
9555  // put on that first HSimulate.
9556 
9557  if (empty_block->HasPredecessor()) {
9558  empty_block->SetJoinId(expr->id());
9559  } else {
9560  empty_block = NULL;
9561  }
9562 
9563  if (right_block->HasPredecessor()) {
9564  right_block->SetJoinId(expr->RightId());
9565  set_current_block(right_block);
9566  CHECK_BAILOUT(VisitForEffect(expr->right()));
9567  right_block = current_block();
9568  } else {
9569  right_block = NULL;
9570  }
9571 
9572  HBasicBlock* join_block =
9573  CreateJoin(empty_block, right_block, expr->id());
9574  set_current_block(join_block);
9575  // We did not materialize any value in the predecessor environments,
9576  // so there is no need to handle it here.
9577  }
9578 }
9579 
9580 
9582  CHECK_ALIVE(VisitForValue(expr->left()));
9583  CHECK_ALIVE(VisitForValue(expr->right()));
9584  SetSourcePosition(expr->position());
9585  HValue* right = Pop();
9586  HValue* left = Pop();
9587  HValue* result =
9588  BuildBinaryOperation(expr, left, right,
9589  ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
9590  : PUSH_BEFORE_SIMULATE);
9591  if (FLAG_hydrogen_track_positions && result->IsBinaryOperation()) {
9592  HBinaryOperation::cast(result)->SetOperandPositions(
9593  zone(),
9594  ScriptPositionToSourcePosition(expr->left()->position()),
9595  ScriptPositionToSourcePosition(expr->right()->position()));
9596  }
9597  return ast_context()->ReturnValue(result);
9598 }
9599 
9600 
9601 void HOptimizedGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr,
9602  Expression* sub_expr,
9603  Handle<String> check) {
9604  CHECK_ALIVE(VisitForTypeOf(sub_expr));
9605  SetSourcePosition(expr->position());
9606  HValue* value = Pop();
9607  HTypeofIsAndBranch* instr = New<HTypeofIsAndBranch>(value, check);
9608  return ast_context()->ReturnControl(instr, expr->id());
9609 }
9610 
9611 
9612 static bool IsLiteralCompareBool(Isolate* isolate,
9613  HValue* left,
9614  Token::Value op,
9615  HValue* right) {
9616  return op == Token::EQ_STRICT &&
9617  ((left->IsConstant() &&
9618  HConstant::cast(left)->handle(isolate)->IsBoolean()) ||
9619  (right->IsConstant() &&
9620  HConstant::cast(right)->handle(isolate)->IsBoolean()));
9621 }
9622 
9623 
9624 void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
9625  ASSERT(!HasStackOverflow());
9626  ASSERT(current_block() != NULL);
9627  ASSERT(current_block()->HasPredecessor());
9628 
9629  if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
9630 
9631  // Check for a few fast cases. The AST visiting behavior must be in sync
9632  // with the full codegen: We don't push both left and right values onto
9633  // the expression stack when one side is a special-case literal.
9634  Expression* sub_expr = NULL;
9635  Handle<String> check;
9636  if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
9637  return HandleLiteralCompareTypeof(expr, sub_expr, check);
9638  }
9639  if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) {
9640  return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue);
9641  }
9642  if (expr->IsLiteralCompareNull(&sub_expr)) {
9643  return HandleLiteralCompareNil(expr, sub_expr, kNullValue);
9644  }
9645 
9646  if (IsClassOfTest(expr)) {
9647  CallRuntime* call = expr->left()->AsCallRuntime();
9648  ASSERT(call->arguments()->length() == 1);
9649  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9650  HValue* value = Pop();
9651  Literal* literal = expr->right()->AsLiteral();
9652  Handle<String> rhs = Handle<String>::cast(literal->value());
9653  HClassOfTestAndBranch* instr = New<HClassOfTestAndBranch>(value, rhs);
9654  return ast_context()->ReturnControl(instr, expr->id());
9655  }
9656 
9657  Type* left_type = expr->left()->bounds().lower;
9658  Type* right_type = expr->right()->bounds().lower;
9659  Type* combined_type = expr->combined_type();
9660 
9661  CHECK_ALIVE(VisitForValue(expr->left()));
9662  CHECK_ALIVE(VisitForValue(expr->right()));
9663 
9664  if (FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
9665 
9666  HValue* right = Pop();
9667  HValue* left = Pop();
9668  Token::Value op = expr->op();
9669 
9670  if (IsLiteralCompareBool(isolate(), left, op, right)) {
9671  HCompareObjectEqAndBranch* result =
9672  New<HCompareObjectEqAndBranch>(left, right);
9673  return ast_context()->ReturnControl(result, expr->id());
9674  }
9675 
9676  if (op == Token::INSTANCEOF) {
9677  // Check to see if the rhs of the instanceof is a global function not
9678  // residing in new space. If it is we assume that the function will stay the
9679  // same.
9680  Handle<JSFunction> target = Handle<JSFunction>::null();
9681  VariableProxy* proxy = expr->right()->AsVariableProxy();
9682  bool global_function = (proxy != NULL) && proxy->var()->IsUnallocated();
9683  if (global_function &&
9684  current_info()->has_global_object() &&
9685  !current_info()->global_object()->IsAccessCheckNeeded()) {
9686  Handle<String> name = proxy->name();
9687  Handle<GlobalObject> global(current_info()->global_object());
9688  LookupResult lookup(isolate());
9689  global->Lookup(*name, &lookup);
9690  if (lookup.IsNormal() && lookup.GetValue()->IsJSFunction()) {
9691  Handle<JSFunction> candidate(JSFunction::cast(lookup.GetValue()));
9692  // If the function is in new space we assume it's more likely to
9693  // change and thus prefer the general IC code.
9694  if (!isolate()->heap()->InNewSpace(*candidate)) {
9695  target = candidate;
9696  }
9697  }
9698  }
9699 
9700  // If the target is not null we have found a known global function that is
9701  // assumed to stay the same for this instanceof.
9702  if (target.is_null()) {
9703  HInstanceOf* result = New<HInstanceOf>(left, right);
9704  return ast_context()->ReturnInstruction(result, expr->id());
9705  } else {
9706  Add<HCheckValue>(right, target);
9707  HInstanceOfKnownGlobal* result =
9708  New<HInstanceOfKnownGlobal>(left, target);
9709  return ast_context()->ReturnInstruction(result, expr->id());
9710  }
9711 
9712  // Code below assumes that we don't fall through.
9713  UNREACHABLE();
9714  } else if (op == Token::IN) {
9715  HValue* function = AddLoadJSBuiltin(Builtins::IN);
9716  Add<HPushArgument>(left);
9717  Add<HPushArgument>(right);
9718  // TODO(olivf) InvokeFunction produces a check for the parameter count,
9719  // even though we are certain to pass the correct number of arguments here.
9720  HInstruction* result = New<HInvokeFunction>(function, 2);
9721  return ast_context()->ReturnInstruction(result, expr->id());
9722  }
9723 
9724  PushBeforeSimulateBehavior push_behavior =
9725  ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
9726  : PUSH_BEFORE_SIMULATE;
9727  HControlInstruction* compare = BuildCompareInstruction(
9728  op, left, right, left_type, right_type, combined_type,
9729  ScriptPositionToSourcePosition(expr->left()->position()),
9730  ScriptPositionToSourcePosition(expr->right()->position()),
9731  push_behavior, expr->id());
9732  if (compare == NULL) return; // Bailed out.
9733  return ast_context()->ReturnControl(compare, expr->id());
9734 }
9735 
9736 
9737 HControlInstruction* HOptimizedGraphBuilder::BuildCompareInstruction(
9738  Token::Value op,
9739  HValue* left,
9740  HValue* right,
9741  Type* left_type,
9742  Type* right_type,
9743  Type* combined_type,
9744  HSourcePosition left_position,
9745  HSourcePosition right_position,
9746  PushBeforeSimulateBehavior push_sim_result,
9747  BailoutId bailout_id) {
9748  // Cases handled below depend on collected type feedback. They should
9749  // soft deoptimize when there is no type feedback.
9750  if (combined_type->Is(Type::None())) {
9751  Add<HDeoptimize>("Insufficient type feedback for combined type "
9752  "of binary operation",
9754  combined_type = left_type = right_type = Type::Any(zone());
9755  }
9756 
9757  Representation left_rep = Representation::FromType(left_type);
9758  Representation right_rep = Representation::FromType(right_type);
9759  Representation combined_rep = Representation::FromType(combined_type);
9760 
9761  if (combined_type->Is(Type::Receiver())) {
9762  if (Token::IsEqualityOp(op)) {
9763  // Can we get away with map check and not instance type check?
9764  HValue* operand_to_check =
9765  left->block()->block_id() < right->block()->block_id() ? left : right;
9766  if (combined_type->IsClass()) {
9767  Handle<Map> map = combined_type->AsClass();
9768  AddCheckMap(operand_to_check, map);
9769  HCompareObjectEqAndBranch* result =
9770  New<HCompareObjectEqAndBranch>(left, right);
9771  if (FLAG_hydrogen_track_positions) {
9772  result->set_operand_position(zone(), 0, left_position);
9773  result->set_operand_position(zone(), 1, right_position);
9774  }
9775  return result;
9776  } else {
9777  BuildCheckHeapObject(operand_to_check);
9778  Add<HCheckInstanceType>(operand_to_check,
9779  HCheckInstanceType::IS_SPEC_OBJECT);
9780  HCompareObjectEqAndBranch* result =
9781  New<HCompareObjectEqAndBranch>(left, right);
9782  return result;
9783  }
9784  } else {
9785  Bailout(kUnsupportedNonPrimitiveCompare);
9786  return NULL;
9787  }
9788  } else if (combined_type->Is(Type::InternalizedString()) &&
9789  Token::IsEqualityOp(op)) {
9790  BuildCheckHeapObject(left);
9791  Add<HCheckInstanceType>(left, HCheckInstanceType::IS_INTERNALIZED_STRING);
9792  BuildCheckHeapObject(right);
9793  Add<HCheckInstanceType>(right, HCheckInstanceType::IS_INTERNALIZED_STRING);
9794  HCompareObjectEqAndBranch* result =
9795  New<HCompareObjectEqAndBranch>(left, right);
9796  return result;
9797  } else if (combined_type->Is(Type::String())) {
9798  BuildCheckHeapObject(left);
9799  Add<HCheckInstanceType>(left, HCheckInstanceType::IS_STRING);
9800  BuildCheckHeapObject(right);
9801  Add<HCheckInstanceType>(right, HCheckInstanceType::IS_STRING);
9802  HStringCompareAndBranch* result =
9803  New<HStringCompareAndBranch>(left, right, op);
9804  return result;
9805  } else {
9806  if (combined_rep.IsTagged() || combined_rep.IsNone()) {
9807  HCompareGeneric* result = Add<HCompareGeneric>(left, right, op);
9808  result->set_observed_input_representation(1, left_rep);
9809  result->set_observed_input_representation(2, right_rep);
9810  if (result->HasObservableSideEffects()) {
9811  if (push_sim_result == PUSH_BEFORE_SIMULATE) {
9812  Push(result);
9813  AddSimulate(bailout_id, REMOVABLE_SIMULATE);
9814  Drop(1);
9815  } else {
9816  AddSimulate(bailout_id, REMOVABLE_SIMULATE);
9817  }
9818  }
9819  // TODO(jkummerow): Can we make this more efficient?
9820  HBranch* branch = New<HBranch>(result);
9821  return branch;
9822  } else {
9823  HCompareNumericAndBranch* result =
9824  New<HCompareNumericAndBranch>(left, right, op);
9825  result->set_observed_input_representation(left_rep, right_rep);
9826  if (FLAG_hydrogen_track_positions) {
9827  result->SetOperandPositions(zone(), left_position, right_position);
9828  }
9829  return result;
9830  }
9831  }
9832 }
9833 
9834 
9835 void HOptimizedGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr,
9836  Expression* sub_expr,
9837  NilValue nil) {
9838  ASSERT(!HasStackOverflow());
9839  ASSERT(current_block() != NULL);
9840  ASSERT(current_block()->HasPredecessor());
9841  ASSERT(expr->op() == Token::EQ || expr->op() == Token::EQ_STRICT);
9842  if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
9843  CHECK_ALIVE(VisitForValue(sub_expr));
9844  HValue* value = Pop();
9845  if (expr->op() == Token::EQ_STRICT) {
9846  HConstant* nil_constant = nil == kNullValue
9847  ? graph()->GetConstantNull()
9848  : graph()->GetConstantUndefined();
9849  HCompareObjectEqAndBranch* instr =
9850  New<HCompareObjectEqAndBranch>(value, nil_constant);
9851  return ast_context()->ReturnControl(instr, expr->id());
9852  } else {
9853  ASSERT_EQ(Token::EQ, expr->op());
9854  Type* type = expr->combined_type()->Is(Type::None())
9855  ? Type::Any(zone()) : expr->combined_type();
9856  HIfContinuation continuation;
9857  BuildCompareNil(value, type, &continuation);
9858  return ast_context()->ReturnContinuation(&continuation, expr->id());
9859  }
9860 }
9861 
9862 
9863 HInstruction* HOptimizedGraphBuilder::BuildThisFunction() {
9864  // If we share optimized code between different closures, the
9865  // this-function is not a constant, except inside an inlined body.
9866  if (function_state()->outer() != NULL) {
9867  return New<HConstant>(
9868  function_state()->compilation_info()->closure());
9869  } else {
9870  return New<HThisFunction>();
9871  }
9872 }
9873 
9874 
9875 HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
9876  Handle<JSObject> boilerplate_object,
9877  AllocationSiteUsageContext* site_context) {
9878  NoObservableSideEffectsScope no_effects(this);
9879  InstanceType instance_type = boilerplate_object->map()->instance_type();
9880  ASSERT(instance_type == JS_ARRAY_TYPE || instance_type == JS_OBJECT_TYPE);
9881 
9882  HType type = instance_type == JS_ARRAY_TYPE
9883  ? HType::JSArray() : HType::JSObject();
9884  HValue* object_size_constant = Add<HConstant>(
9885  boilerplate_object->map()->instance_size());
9886 
9887  PretenureFlag pretenure_flag = isolate()->heap()->GetPretenureMode();
9888  if (FLAG_allocation_site_pretenuring) {
9889  pretenure_flag = site_context->current()->GetPretenureMode();
9890  Handle<AllocationSite> site(site_context->current());
9893  }
9894 
9895  HInstruction* object = Add<HAllocate>(object_size_constant, type,
9896  pretenure_flag, instance_type, site_context->current());
9897 
9898  // If allocation folding reaches Page::kMaxRegularHeapObjectSize the
9899  // elements array may not get folded into the object. Hence, we set the
9900  // elements pointer to empty fixed array and let store elimination remove
9901  // this store in the folding case.
9902  HConstant* empty_fixed_array = Add<HConstant>(
9903  isolate()->factory()->empty_fixed_array());
9904  Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
9905  empty_fixed_array, INITIALIZING_STORE);
9906 
9907  BuildEmitObjectHeader(boilerplate_object, object);
9908 
9909  Handle<FixedArrayBase> elements(boilerplate_object->elements());
9910  int elements_size = (elements->length() > 0 &&
9911  elements->map() != isolate()->heap()->fixed_cow_array_map()) ?
9912  elements->Size() : 0;
9913 
9914  if (pretenure_flag == TENURED &&
9915  elements->map() == isolate()->heap()->fixed_cow_array_map() &&
9916  isolate()->heap()->InNewSpace(*elements)) {
9917  // If we would like to pretenure a fixed cow array, we must ensure that the
9918  // array is already in old space, otherwise we'll create too many old-to-
9919  // new-space pointers (overflowing the store buffer).
9920  elements = Handle<FixedArrayBase>(
9922  Handle<FixedArray>::cast(elements)));
9923  boilerplate_object->set_elements(*elements);
9924  }
9925 
9926  HInstruction* object_elements = NULL;
9927  if (elements_size > 0) {
9928  HValue* object_elements_size = Add<HConstant>(elements_size);
9929  if (boilerplate_object->HasFastDoubleElements()) {
9930  object_elements = Add<HAllocate>(object_elements_size, HType::Tagged(),
9931  pretenure_flag, FIXED_DOUBLE_ARRAY_TYPE, site_context->current());
9932  } else {
9933  object_elements = Add<HAllocate>(object_elements_size, HType::Tagged(),
9934  pretenure_flag, FIXED_ARRAY_TYPE, site_context->current());
9935  }
9936  }
9937  BuildInitElementsInObjectHeader(boilerplate_object, object, object_elements);
9938 
9939  // Copy object elements if non-COW.
9940  if (object_elements != NULL) {
9941  BuildEmitElements(boilerplate_object, elements, object_elements,
9942  site_context);
9943  }
9944 
9945  // Copy in-object properties.
9946  if (boilerplate_object->map()->NumberOfFields() != 0) {
9947  BuildEmitInObjectProperties(boilerplate_object, object, site_context,
9948  pretenure_flag);
9949  }
9950  return object;
9951 }
9952 
9953 
9954 void HOptimizedGraphBuilder::BuildEmitObjectHeader(
9955  Handle<JSObject> boilerplate_object,
9956  HInstruction* object) {
9957  ASSERT(boilerplate_object->properties()->length() == 0);
9958 
9959  Handle<Map> boilerplate_object_map(boilerplate_object->map());
9960  AddStoreMapConstant(object, boilerplate_object_map);
9961 
9962  Handle<Object> properties_field =
9963  Handle<Object>(boilerplate_object->properties(), isolate());
9964  ASSERT(*properties_field == isolate()->heap()->empty_fixed_array());
9965  HInstruction* properties = Add<HConstant>(properties_field);
9966  HObjectAccess access = HObjectAccess::ForPropertiesPointer();
9967  Add<HStoreNamedField>(object, access, properties);
9968 
9969  if (boilerplate_object->IsJSArray()) {
9970  Handle<JSArray> boilerplate_array =
9971  Handle<JSArray>::cast(boilerplate_object);
9972  Handle<Object> length_field =
9973  Handle<Object>(boilerplate_array->length(), isolate());
9974  HInstruction* length = Add<HConstant>(length_field);
9975 
9976  ASSERT(boilerplate_array->length()->IsSmi());
9977  Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(
9978  boilerplate_array->GetElementsKind()), length);
9979  }
9980 }
9981 
9982 
9983 void HOptimizedGraphBuilder::BuildInitElementsInObjectHeader(
9984  Handle<JSObject> boilerplate_object,
9985  HInstruction* object,
9986  HInstruction* object_elements) {
9987  ASSERT(boilerplate_object->properties()->length() == 0);
9988  if (object_elements == NULL) {
9989  Handle<Object> elements_field =
9990  Handle<Object>(boilerplate_object->elements(), isolate());
9991  object_elements = Add<HConstant>(elements_field);
9992  }
9993  Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
9994  object_elements);
9995 }
9996 
9997 
9998 void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
9999  Handle<JSObject> boilerplate_object,
10000  HInstruction* object,
10001  AllocationSiteUsageContext* site_context,
10002  PretenureFlag pretenure_flag) {
10003  Handle<Map> boilerplate_map(boilerplate_object->map());
10004  Handle<DescriptorArray> descriptors(boilerplate_map->instance_descriptors());
10005  int limit = boilerplate_map->NumberOfOwnDescriptors();
10006 
10007  int copied_fields = 0;
10008  for (int i = 0; i < limit; i++) {
10009  PropertyDetails details = descriptors->GetDetails(i);
10010  if (details.type() != FIELD) continue;
10011  copied_fields++;
10012  int index = descriptors->GetFieldIndex(i);
10013  int property_offset = boilerplate_object->GetInObjectPropertyOffset(index);
10014  Handle<Name> name(descriptors->GetKey(i));
10015  Handle<Object> value =
10016  Handle<Object>(boilerplate_object->InObjectPropertyAt(index),
10017  isolate());
10018 
10019  // The access for the store depends on the type of the boilerplate.
10020  HObjectAccess access = boilerplate_object->IsJSArray() ?
10021  HObjectAccess::ForJSArrayOffset(property_offset) :
10022  HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
10023 
10024  if (value->IsJSObject()) {
10025  Handle<JSObject> value_object = Handle<JSObject>::cast(value);
10026  Handle<AllocationSite> current_site = site_context->EnterNewScope();
10027  HInstruction* result =
10028  BuildFastLiteral(value_object, site_context);
10029  site_context->ExitScope(current_site, value_object);
10030  Add<HStoreNamedField>(object, access, result);
10031  } else {
10032  Representation representation = details.representation();
10033  HInstruction* value_instruction;
10034 
10035  if (representation.IsDouble()) {
10036  // Allocate a HeapNumber box and store the value into it.
10037  HValue* heap_number_constant = Add<HConstant>(HeapNumber::kSize);
10038  // This heap number alloc does not have a corresponding
10039  // AllocationSite. That is okay because
10040  // 1) it's a child object of another object with a valid allocation site
10041  // 2) we can just use the mode of the parent object for pretenuring
10042  HInstruction* double_box =
10043  Add<HAllocate>(heap_number_constant, HType::HeapNumber(),
10044  pretenure_flag, HEAP_NUMBER_TYPE);
10045  AddStoreMapConstant(double_box,
10046  isolate()->factory()->heap_number_map());
10047  Add<HStoreNamedField>(double_box, HObjectAccess::ForHeapNumberValue(),
10048  Add<HConstant>(value));
10049  value_instruction = double_box;
10050  } else if (representation.IsSmi()) {
10051  value_instruction = value->IsUninitialized()
10052  ? graph()->GetConstant0()
10053  : Add<HConstant>(value);
10054  // Ensure that value is stored as smi.
10055  access = access.WithRepresentation(representation);
10056  } else {
10057  value_instruction = Add<HConstant>(value);
10058  }
10059 
10060  Add<HStoreNamedField>(object, access, value_instruction);
10061  }
10062  }
10063 
10064  int inobject_properties = boilerplate_object->map()->inobject_properties();
10065  HInstruction* value_instruction =
10066  Add<HConstant>(isolate()->factory()->one_pointer_filler_map());
10067  for (int i = copied_fields; i < inobject_properties; i++) {
10068  ASSERT(boilerplate_object->IsJSObject());
10069  int property_offset = boilerplate_object->GetInObjectPropertyOffset(i);
10070  HObjectAccess access =
10071  HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
10072  Add<HStoreNamedField>(object, access, value_instruction);
10073  }
10074 }
10075 
10076 
10077 void HOptimizedGraphBuilder::BuildEmitElements(
10078  Handle<JSObject> boilerplate_object,
10079  Handle<FixedArrayBase> elements,
10080  HValue* object_elements,
10081  AllocationSiteUsageContext* site_context) {
10082  ElementsKind kind = boilerplate_object->map()->elements_kind();
10083  int elements_length = elements->length();
10084  HValue* object_elements_length = Add<HConstant>(elements_length);
10085  BuildInitializeElementsHeader(object_elements, kind, object_elements_length);
10086 
10087  // Copy elements backing store content.
10088  if (elements->IsFixedDoubleArray()) {
10089  BuildEmitFixedDoubleArray(elements, kind, object_elements);
10090  } else if (elements->IsFixedArray()) {
10091  BuildEmitFixedArray(elements, kind, object_elements,
10092  site_context);
10093  } else {
10094  UNREACHABLE();
10095  }
10096 }
10097 
10098 
10099 void HOptimizedGraphBuilder::BuildEmitFixedDoubleArray(
10100  Handle<FixedArrayBase> elements,
10101  ElementsKind kind,
10102  HValue* object_elements) {
10103  HInstruction* boilerplate_elements = Add<HConstant>(elements);
10104  int elements_length = elements->length();
10105  for (int i = 0; i < elements_length; i++) {
10106  HValue* key_constant = Add<HConstant>(i);
10107  HInstruction* value_instruction =
10108  Add<HLoadKeyed>(boilerplate_elements, key_constant,
10109  static_cast<HValue*>(NULL), kind,
10111  HInstruction* store = Add<HStoreKeyed>(object_elements, key_constant,
10112  value_instruction, kind);
10113  store->SetFlag(HValue::kAllowUndefinedAsNaN);
10114  }
10115 }
10116 
10117 
10118 void HOptimizedGraphBuilder::BuildEmitFixedArray(
10119  Handle<FixedArrayBase> elements,
10120  ElementsKind kind,
10121  HValue* object_elements,
10122  AllocationSiteUsageContext* site_context) {
10123  HInstruction* boilerplate_elements = Add<HConstant>(elements);
10124  int elements_length = elements->length();
10125  Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
10126  for (int i = 0; i < elements_length; i++) {
10127  Handle<Object> value(fast_elements->get(i), isolate());
10128  HValue* key_constant = Add<HConstant>(i);
10129  if (value->IsJSObject()) {
10130  Handle<JSObject> value_object = Handle<JSObject>::cast(value);
10131  Handle<AllocationSite> current_site = site_context->EnterNewScope();
10132  HInstruction* result =
10133  BuildFastLiteral(value_object, site_context);
10134  site_context->ExitScope(current_site, value_object);
10135  Add<HStoreKeyed>(object_elements, key_constant, result, kind);
10136  } else {
10137  HInstruction* value_instruction =
10138  Add<HLoadKeyed>(boilerplate_elements, key_constant,
10139  static_cast<HValue*>(NULL), kind,
10141  Add<HStoreKeyed>(object_elements, key_constant, value_instruction, kind);
10142  }
10143  }
10144 }
10145 
10146 
10147 void HOptimizedGraphBuilder::VisitThisFunction(ThisFunction* expr) {
10148  ASSERT(!HasStackOverflow());
10149  ASSERT(current_block() != NULL);
10150  ASSERT(current_block()->HasPredecessor());
10151  HInstruction* instr = BuildThisFunction();
10152  return ast_context()->ReturnInstruction(instr, expr->id());
10153 }
10154 
10155 
10157  ZoneList<Declaration*>* declarations) {
10158  ASSERT(globals_.is_empty());
10159  AstVisitor::VisitDeclarations(declarations);
10160  if (!globals_.is_empty()) {
10161  Handle<FixedArray> array =
10162  isolate()->factory()->NewFixedArray(globals_.length(), TENURED);
10163  for (int i = 0; i < globals_.length(); ++i) array->set(i, *globals_.at(i));
10164  int flags = DeclareGlobalsEvalFlag::encode(current_info()->is_eval()) |
10167  Add<HDeclareGlobals>(array, flags);
10168  globals_.Clear();
10169  }
10170 }
10171 
10172 
10173 void HOptimizedGraphBuilder::VisitVariableDeclaration(
10174  VariableDeclaration* declaration) {
10175  VariableProxy* proxy = declaration->proxy();
10176  VariableMode mode = declaration->mode();
10177  Variable* variable = proxy->var();
10178  bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
10179  switch (variable->location()) {
10180  case Variable::UNALLOCATED:
10181  globals_.Add(variable->name(), zone());
10182  globals_.Add(variable->binding_needs_init()
10183  ? isolate()->factory()->the_hole_value()
10184  : isolate()->factory()->undefined_value(), zone());
10185  return;
10186  case Variable::PARAMETER:
10187  case Variable::LOCAL:
10188  if (hole_init) {
10189  HValue* value = graph()->GetConstantHole();
10190  environment()->Bind(variable, value);
10191  }
10192  break;
10193  case Variable::CONTEXT:
10194  if (hole_init) {
10195  HValue* value = graph()->GetConstantHole();
10196  HValue* context = environment()->context();
10197  HStoreContextSlot* store = Add<HStoreContextSlot>(
10198  context, variable->index(), HStoreContextSlot::kNoCheck, value);
10199  if (store->HasObservableSideEffects()) {
10200  Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
10201  }
10202  }
10203  break;
10204  case Variable::LOOKUP:
10205  return Bailout(kUnsupportedLookupSlotInDeclaration);
10206  }
10207 }
10208 
10209 
10210 void HOptimizedGraphBuilder::VisitFunctionDeclaration(
10211  FunctionDeclaration* declaration) {
10212  VariableProxy* proxy = declaration->proxy();
10213  Variable* variable = proxy->var();
10214  switch (variable->location()) {
10215  case Variable::UNALLOCATED: {
10216  globals_.Add(variable->name(), zone());
10217  Handle<SharedFunctionInfo> function = Compiler::BuildFunctionInfo(
10218  declaration->fun(), current_info()->script());
10219  // Check for stack-overflow exception.
10220  if (function.is_null()) return SetStackOverflow();
10221  globals_.Add(function, zone());
10222  return;
10223  }
10224  case Variable::PARAMETER:
10225  case Variable::LOCAL: {
10226  CHECK_ALIVE(VisitForValue(declaration->fun()));
10227  HValue* value = Pop();
10228  BindIfLive(variable, value);
10229  break;
10230  }
10231  case Variable::CONTEXT: {
10232  CHECK_ALIVE(VisitForValue(declaration->fun()));
10233  HValue* value = Pop();
10234  HValue* context = environment()->context();
10235  HStoreContextSlot* store = Add<HStoreContextSlot>(
10236  context, variable->index(), HStoreContextSlot::kNoCheck, value);
10237  if (store->HasObservableSideEffects()) {
10238  Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
10239  }
10240  break;
10241  }
10242  case Variable::LOOKUP:
10243  return Bailout(kUnsupportedLookupSlotInDeclaration);
10244  }
10245 }
10246 
10247 
10248 void HOptimizedGraphBuilder::VisitModuleDeclaration(
10249  ModuleDeclaration* declaration) {
10250  UNREACHABLE();
10251 }
10252 
10253 
10254 void HOptimizedGraphBuilder::VisitImportDeclaration(
10255  ImportDeclaration* declaration) {
10256  UNREACHABLE();
10257 }
10258 
10259 
10260 void HOptimizedGraphBuilder::VisitExportDeclaration(
10261  ExportDeclaration* declaration) {
10262  UNREACHABLE();
10263 }
10264 
10265 
10266 void HOptimizedGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) {
10267  UNREACHABLE();
10268 }
10269 
10270 
10271 void HOptimizedGraphBuilder::VisitModuleVariable(ModuleVariable* module) {
10272  UNREACHABLE();
10273 }
10274 
10275 
10276 void HOptimizedGraphBuilder::VisitModulePath(ModulePath* module) {
10277  UNREACHABLE();
10278 }
10279 
10280 
10281 void HOptimizedGraphBuilder::VisitModuleUrl(ModuleUrl* module) {
10282  UNREACHABLE();
10283 }
10284 
10285 
10286 void HOptimizedGraphBuilder::VisitModuleStatement(ModuleStatement* stmt) {
10287  UNREACHABLE();
10288 }
10289 
10290 
10291 // Generators for inline runtime functions.
10292 // Support for types.
10293 void HOptimizedGraphBuilder::GenerateIsSmi(CallRuntime* call) {
10294  ASSERT(call->arguments()->length() == 1);
10295  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10296  HValue* value = Pop();
10297  HIsSmiAndBranch* result = New<HIsSmiAndBranch>(value);
10298  return ast_context()->ReturnControl(result, call->id());
10299 }
10300 
10301 
10302 void HOptimizedGraphBuilder::GenerateIsSpecObject(CallRuntime* call) {
10303  ASSERT(call->arguments()->length() == 1);
10304  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10305  HValue* value = Pop();
10306  HHasInstanceTypeAndBranch* result =
10307  New<HHasInstanceTypeAndBranch>(value,
10310  return ast_context()->ReturnControl(result, call->id());
10311 }
10312 
10313 
10314 void HOptimizedGraphBuilder::GenerateIsFunction(CallRuntime* call) {
10315  ASSERT(call->arguments()->length() == 1);
10316  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10317  HValue* value = Pop();
10318  HHasInstanceTypeAndBranch* result =
10319  New<HHasInstanceTypeAndBranch>(value, JS_FUNCTION_TYPE);
10320  return ast_context()->ReturnControl(result, call->id());
10321 }
10322 
10323 
10324 void HOptimizedGraphBuilder::GenerateIsMinusZero(CallRuntime* call) {
10325  ASSERT(call->arguments()->length() == 1);
10326  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10327  HValue* value = Pop();
10328  HCompareMinusZeroAndBranch* result = New<HCompareMinusZeroAndBranch>(value);
10329  return ast_context()->ReturnControl(result, call->id());
10330 }
10331 
10332 
10333 void HOptimizedGraphBuilder::GenerateHasCachedArrayIndex(CallRuntime* call) {
10334  ASSERT(call->arguments()->length() == 1);
10335  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10336  HValue* value = Pop();
10337  HHasCachedArrayIndexAndBranch* result =
10338  New<HHasCachedArrayIndexAndBranch>(value);
10339  return ast_context()->ReturnControl(result, call->id());
10340 }
10341 
10342 
10343 void HOptimizedGraphBuilder::GenerateIsArray(CallRuntime* call) {
10344  ASSERT(call->arguments()->length() == 1);
10345  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10346  HValue* value = Pop();
10347  HHasInstanceTypeAndBranch* result =
10348  New<HHasInstanceTypeAndBranch>(value, JS_ARRAY_TYPE);
10349  return ast_context()->ReturnControl(result, call->id());
10350 }
10351 
10352 
10353 void HOptimizedGraphBuilder::GenerateIsRegExp(CallRuntime* call) {
10354  ASSERT(call->arguments()->length() == 1);
10355  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10356  HValue* value = Pop();
10357  HHasInstanceTypeAndBranch* result =
10358  New<HHasInstanceTypeAndBranch>(value, JS_REGEXP_TYPE);
10359  return ast_context()->ReturnControl(result, call->id());
10360 }
10361 
10362 
10363 void HOptimizedGraphBuilder::GenerateIsObject(CallRuntime* call) {
10364  ASSERT(call->arguments()->length() == 1);
10365  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10366  HValue* value = Pop();
10367  HIsObjectAndBranch* result = New<HIsObjectAndBranch>(value);
10368  return ast_context()->ReturnControl(result, call->id());
10369 }
10370 
10371 
10372 void HOptimizedGraphBuilder::GenerateIsNonNegativeSmi(CallRuntime* call) {
10373  return Bailout(kInlinedRuntimeFunctionIsNonNegativeSmi);
10374 }
10375 
10376 
10377 void HOptimizedGraphBuilder::GenerateIsUndetectableObject(CallRuntime* call) {
10378  ASSERT(call->arguments()->length() == 1);
10379  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10380  HValue* value = Pop();
10381  HIsUndetectableAndBranch* result = New<HIsUndetectableAndBranch>(value);
10382  return ast_context()->ReturnControl(result, call->id());
10383 }
10384 
10385 
10386 void HOptimizedGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf(
10387  CallRuntime* call) {
10388  return Bailout(kInlinedRuntimeFunctionIsStringWrapperSafeForDefaultValueOf);
10389 }
10390 
10391 
10392 // Support for construct call checks.
10393 void HOptimizedGraphBuilder::GenerateIsConstructCall(CallRuntime* call) {
10394  ASSERT(call->arguments()->length() == 0);
10395  if (function_state()->outer() != NULL) {
10396  // We are generating graph for inlined function.
10397  HValue* value = function_state()->inlining_kind() == CONSTRUCT_CALL_RETURN
10398  ? graph()->GetConstantTrue()
10399  : graph()->GetConstantFalse();
10400  return ast_context()->ReturnValue(value);
10401  } else {
10402  return ast_context()->ReturnControl(New<HIsConstructCallAndBranch>(),
10403  call->id());
10404  }
10405 }
10406 
10407 
10408 // Support for arguments.length and arguments[?].
10409 void HOptimizedGraphBuilder::GenerateArgumentsLength(CallRuntime* call) {
10410  // Our implementation of arguments (based on this stack frame or an
10411  // adapter below it) does not work for inlined functions. This runtime
10412  // function is blacklisted by AstNode::IsInlineable.
10413  ASSERT(function_state()->outer() == NULL);
10414  ASSERT(call->arguments()->length() == 0);
10415  HInstruction* elements = Add<HArgumentsElements>(false);
10416  HArgumentsLength* result = New<HArgumentsLength>(elements);
10417  return ast_context()->ReturnInstruction(result, call->id());
10418 }
10419 
10420 
10421 void HOptimizedGraphBuilder::GenerateArguments(CallRuntime* call) {
10422  // Our implementation of arguments (based on this stack frame or an
10423  // adapter below it) does not work for inlined functions. This runtime
10424  // function is blacklisted by AstNode::IsInlineable.
10425  ASSERT(function_state()->outer() == NULL);
10426  ASSERT(call->arguments()->length() == 1);
10427  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10428  HValue* index = Pop();
10429  HInstruction* elements = Add<HArgumentsElements>(false);
10430  HInstruction* length = Add<HArgumentsLength>(elements);
10431  HInstruction* checked_index = Add<HBoundsCheck>(index, length);
10432  HAccessArgumentsAt* result = New<HAccessArgumentsAt>(
10433  elements, length, checked_index);
10434  return ast_context()->ReturnInstruction(result, call->id());
10435 }
10436 
10437 
10438 // Support for accessing the class and value fields of an object.
10439 void HOptimizedGraphBuilder::GenerateClassOf(CallRuntime* call) {
10440  // The special form detected by IsClassOfTest is detected before we get here
10441  // and does not cause a bailout.
10442  return Bailout(kInlinedRuntimeFunctionClassOf);
10443 }
10444 
10445 
10446 void HOptimizedGraphBuilder::GenerateValueOf(CallRuntime* call) {
10447  ASSERT(call->arguments()->length() == 1);
10448  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10449  HValue* object = Pop();
10450 
10451  IfBuilder if_objectisvalue(this);
10452  HValue* objectisvalue = if_objectisvalue.If<HHasInstanceTypeAndBranch>(
10453  object, JS_VALUE_TYPE);
10454  if_objectisvalue.Then();
10455  {
10456  // Return the actual value.
10457  Push(Add<HLoadNamedField>(
10458  object, objectisvalue,
10459  HObjectAccess::ForObservableJSObjectOffset(
10461  Add<HSimulate>(call->id(), FIXED_SIMULATE);
10462  }
10463  if_objectisvalue.Else();
10464  {
10465  // If the object is not a value return the object.
10466  Push(object);
10467  Add<HSimulate>(call->id(), FIXED_SIMULATE);
10468  }
10469  if_objectisvalue.End();
10470  return ast_context()->ReturnValue(Pop());
10471 }
10472 
10473 
10474 void HOptimizedGraphBuilder::GenerateDateField(CallRuntime* call) {
10475  ASSERT(call->arguments()->length() == 2);
10476  ASSERT_NE(NULL, call->arguments()->at(1)->AsLiteral());
10477  Smi* index = Smi::cast(*(call->arguments()->at(1)->AsLiteral()->value()));
10478  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10479  HValue* date = Pop();
10480  HDateField* result = New<HDateField>(date, index);
10481  return ast_context()->ReturnInstruction(result, call->id());
10482 }
10483 
10484 
10485 void HOptimizedGraphBuilder::GenerateOneByteSeqStringSetChar(
10486  CallRuntime* call) {
10487  ASSERT(call->arguments()->length() == 3);
10488  // We need to follow the evaluation order of full codegen.
10489  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
10490  CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
10491  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10492  HValue* string = Pop();
10493  HValue* value = Pop();
10494  HValue* index = Pop();
10495  Add<HSeqStringSetChar>(String::ONE_BYTE_ENCODING, string,
10496  index, value);
10497  Add<HSimulate>(call->id(), FIXED_SIMULATE);
10498  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
10499 }
10500 
10501 
10502 void HOptimizedGraphBuilder::GenerateTwoByteSeqStringSetChar(
10503  CallRuntime* call) {
10504  ASSERT(call->arguments()->length() == 3);
10505  // We need to follow the evaluation order of full codegen.
10506  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
10507  CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
10508  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10509  HValue* string = Pop();
10510  HValue* value = Pop();
10511  HValue* index = Pop();
10512  Add<HSeqStringSetChar>(String::TWO_BYTE_ENCODING, string,
10513  index, value);
10514  Add<HSimulate>(call->id(), FIXED_SIMULATE);
10515  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
10516 }
10517 
10518 
10519 void HOptimizedGraphBuilder::GenerateSetValueOf(CallRuntime* call) {
10520  ASSERT(call->arguments()->length() == 2);
10521  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10522  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
10523  HValue* value = Pop();
10524  HValue* object = Pop();
10525 
10526  // Check if object is a JSValue.
10527  IfBuilder if_objectisvalue(this);
10528  if_objectisvalue.If<HHasInstanceTypeAndBranch>(object, JS_VALUE_TYPE);
10529  if_objectisvalue.Then();
10530  {
10531  // Create in-object property store to kValueOffset.
10532  Add<HStoreNamedField>(object,
10533  HObjectAccess::ForObservableJSObjectOffset(JSValue::kValueOffset),
10534  value);
10535  if (!ast_context()->IsEffect()) {
10536  Push(value);
10537  }
10538  Add<HSimulate>(call->id(), FIXED_SIMULATE);
10539  }
10540  if_objectisvalue.Else();
10541  {
10542  // Nothing to do in this case.
10543  if (!ast_context()->IsEffect()) {
10544  Push(value);
10545  }
10546  Add<HSimulate>(call->id(), FIXED_SIMULATE);
10547  }
10548  if_objectisvalue.End();
10549  if (!ast_context()->IsEffect()) {
10550  Drop(1);
10551  }
10552  return ast_context()->ReturnValue(value);
10553 }
10554 
10555 
10556 // Fast support for charCodeAt(n).
10557 void HOptimizedGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) {
10558  ASSERT(call->arguments()->length() == 2);
10559  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10560  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
10561  HValue* index = Pop();
10562  HValue* string = Pop();
10563  HInstruction* result = BuildStringCharCodeAt(string, index);
10564  return ast_context()->ReturnInstruction(result, call->id());
10565 }
10566 
10567 
10568 // Fast support for string.charAt(n) and string[n].
10569 void HOptimizedGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) {
10570  ASSERT(call->arguments()->length() == 1);
10571  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10572  HValue* char_code = Pop();
10573  HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
10574  return ast_context()->ReturnInstruction(result, call->id());
10575 }
10576 
10577 
10578 // Fast support for string.charAt(n) and string[n].
10579 void HOptimizedGraphBuilder::GenerateStringCharAt(CallRuntime* call) {
10580  ASSERT(call->arguments()->length() == 2);
10581  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10582  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
10583  HValue* index = Pop();
10584  HValue* string = Pop();
10585  HInstruction* char_code = BuildStringCharCodeAt(string, index);
10586  AddInstruction(char_code);
10587  HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
10588  return ast_context()->ReturnInstruction(result, call->id());
10589 }
10590 
10591 
10592 // Fast support for object equality testing.
10593 void HOptimizedGraphBuilder::GenerateObjectEquals(CallRuntime* call) {
10594  ASSERT(call->arguments()->length() == 2);
10595  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10596  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
10597  HValue* right = Pop();
10598  HValue* left = Pop();
10599  HCompareObjectEqAndBranch* result =
10600  New<HCompareObjectEqAndBranch>(left, right);
10601  return ast_context()->ReturnControl(result, call->id());
10602 }
10603 
10604 
10605 void HOptimizedGraphBuilder::GenerateLog(CallRuntime* call) {
10606  // %_Log is ignored in optimized code.
10607  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
10608 }
10609 
10610 
10611 // Fast support for StringAdd.
10612 void HOptimizedGraphBuilder::GenerateStringAdd(CallRuntime* call) {
10613  ASSERT_EQ(2, call->arguments()->length());
10614  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10615  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
10616  HValue* right = Pop();
10617  HValue* left = Pop();
10618  HInstruction* result = NewUncasted<HStringAdd>(left, right);
10619  return ast_context()->ReturnInstruction(result, call->id());
10620 }
10621 
10622 
10623 // Fast support for SubString.
10624 void HOptimizedGraphBuilder::GenerateSubString(CallRuntime* call) {
10625  ASSERT_EQ(3, call->arguments()->length());
10626  CHECK_ALIVE(VisitExpressions(call->arguments()));
10627  PushArgumentsFromEnvironment(call->arguments()->length());
10628  HCallStub* result = New<HCallStub>(CodeStub::SubString, 3);
10629  return ast_context()->ReturnInstruction(result, call->id());
10630 }
10631 
10632 
10633 // Fast support for StringCompare.
10634 void HOptimizedGraphBuilder::GenerateStringCompare(CallRuntime* call) {
10635  ASSERT_EQ(2, call->arguments()->length());
10636  CHECK_ALIVE(VisitExpressions(call->arguments()));
10637  PushArgumentsFromEnvironment(call->arguments()->length());
10638  HCallStub* result = New<HCallStub>(CodeStub::StringCompare, 2);
10639  return ast_context()->ReturnInstruction(result, call->id());
10640 }
10641 
10642 
10643 // Support for direct calls from JavaScript to native RegExp code.
10644 void HOptimizedGraphBuilder::GenerateRegExpExec(CallRuntime* call) {
10645  ASSERT_EQ(4, call->arguments()->length());
10646  CHECK_ALIVE(VisitExpressions(call->arguments()));
10647  PushArgumentsFromEnvironment(call->arguments()->length());
10648  HCallStub* result = New<HCallStub>(CodeStub::RegExpExec, 4);
10649  return ast_context()->ReturnInstruction(result, call->id());
10650 }
10651 
10652 
10653 void HOptimizedGraphBuilder::GenerateDoubleLo(CallRuntime* call) {
10654  ASSERT_EQ(1, call->arguments()->length());
10655  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10656  HValue* value = Pop();
10657  HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::LOW);
10658  return ast_context()->ReturnInstruction(result, call->id());
10659 }
10660 
10661 
10662 void HOptimizedGraphBuilder::GenerateDoubleHi(CallRuntime* call) {
10663  ASSERT_EQ(1, call->arguments()->length());
10664  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10665  HValue* value = Pop();
10666  HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::HIGH);
10667  return ast_context()->ReturnInstruction(result, call->id());
10668 }
10669 
10670 
10671 void HOptimizedGraphBuilder::GenerateConstructDouble(CallRuntime* call) {
10672  ASSERT_EQ(2, call->arguments()->length());
10673  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10674  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
10675  HValue* lo = Pop();
10676  HValue* hi = Pop();
10677  HInstruction* result = NewUncasted<HConstructDouble>(hi, lo);
10678  return ast_context()->ReturnInstruction(result, call->id());
10679 }
10680 
10681 
10682 // Construct a RegExp exec result with two in-object properties.
10683 void HOptimizedGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) {
10684  ASSERT_EQ(3, call->arguments()->length());
10685  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10686  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
10687  CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
10688  HValue* input = Pop();
10689  HValue* index = Pop();
10690  HValue* length = Pop();
10691  HValue* result = BuildRegExpConstructResult(length, index, input);
10692  return ast_context()->ReturnValue(result);
10693 }
10694 
10695 
10696 // Support for fast native caches.
10697 void HOptimizedGraphBuilder::GenerateGetFromCache(CallRuntime* call) {
10698  return Bailout(kInlinedRuntimeFunctionGetFromCache);
10699 }
10700 
10701 
10702 // Fast support for number to string.
10703 void HOptimizedGraphBuilder::GenerateNumberToString(CallRuntime* call) {
10704  ASSERT_EQ(1, call->arguments()->length());
10705  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10706  HValue* number = Pop();
10707  HValue* result = BuildNumberToString(number, Type::Any(zone()));
10708  return ast_context()->ReturnValue(result);
10709 }
10710 
10711 
10712 // Fast call for custom callbacks.
10713 void HOptimizedGraphBuilder::GenerateCallFunction(CallRuntime* call) {
10714  // 1 ~ The function to call is not itself an argument to the call.
10715  int arg_count = call->arguments()->length() - 1;
10716  ASSERT(arg_count >= 1); // There's always at least a receiver.
10717 
10718  CHECK_ALIVE(VisitExpressions(call->arguments()));
10719  // The function is the last argument
10720  HValue* function = Pop();
10721  // Push the arguments to the stack
10722  PushArgumentsFromEnvironment(arg_count);
10723 
10724  IfBuilder if_is_jsfunction(this);
10725  if_is_jsfunction.If<HHasInstanceTypeAndBranch>(function, JS_FUNCTION_TYPE);
10726 
10727  if_is_jsfunction.Then();
10728  {
10729  HInstruction* invoke_result =
10730  Add<HInvokeFunction>(function, arg_count);
10731  if (!ast_context()->IsEffect()) {
10732  Push(invoke_result);
10733  }
10734  Add<HSimulate>(call->id(), FIXED_SIMULATE);
10735  }
10736 
10737  if_is_jsfunction.Else();
10738  {
10739  HInstruction* call_result =
10740  Add<HCallFunction>(function, arg_count);
10741  if (!ast_context()->IsEffect()) {
10742  Push(call_result);
10743  }
10744  Add<HSimulate>(call->id(), FIXED_SIMULATE);
10745  }
10746  if_is_jsfunction.End();
10747 
10748  if (ast_context()->IsEffect()) {
10749  // EffectContext::ReturnValue ignores the value, so we can just pass
10750  // 'undefined' (as we do not have the call result anymore).
10751  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
10752  } else {
10753  return ast_context()->ReturnValue(Pop());
10754  }
10755 }
10756 
10757 
10758 // Fast call to math functions.
10759 void HOptimizedGraphBuilder::GenerateMathPow(CallRuntime* call) {
10760  ASSERT_EQ(2, call->arguments()->length());
10761  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10762  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
10763  HValue* right = Pop();
10764  HValue* left = Pop();
10765  HInstruction* result = NewUncasted<HPower>(left, right);
10766  return ast_context()->ReturnInstruction(result, call->id());
10767 }
10768 
10769 
10770 void HOptimizedGraphBuilder::GenerateMathLog(CallRuntime* call) {
10771  ASSERT(call->arguments()->length() == 1);
10772  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10773  HValue* value = Pop();
10774  HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathLog);
10775  return ast_context()->ReturnInstruction(result, call->id());
10776 }
10777 
10778 
10779 void HOptimizedGraphBuilder::GenerateMathSqrt(CallRuntime* call) {
10780  ASSERT(call->arguments()->length() == 1);
10781  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10782  HValue* value = Pop();
10783  HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathSqrt);
10784  return ast_context()->ReturnInstruction(result, call->id());
10785 }
10786 
10787 
10788 void HOptimizedGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) {
10789  ASSERT(call->arguments()->length() == 1);
10790  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10791  HValue* value = Pop();
10792  HGetCachedArrayIndex* result = New<HGetCachedArrayIndex>(value);
10793  return ast_context()->ReturnInstruction(result, call->id());
10794 }
10795 
10796 
10797 void HOptimizedGraphBuilder::GenerateFastAsciiArrayJoin(CallRuntime* call) {
10798  return Bailout(kInlinedRuntimeFunctionFastAsciiArrayJoin);
10799 }
10800 
10801 
10802 // Support for generators.
10803 void HOptimizedGraphBuilder::GenerateGeneratorNext(CallRuntime* call) {
10804  return Bailout(kInlinedRuntimeFunctionGeneratorNext);
10805 }
10806 
10807 
10808 void HOptimizedGraphBuilder::GenerateGeneratorThrow(CallRuntime* call) {
10809  return Bailout(kInlinedRuntimeFunctionGeneratorThrow);
10810 }
10811 
10812 
10813 void HOptimizedGraphBuilder::GenerateDebugBreakInOptimizedCode(
10814  CallRuntime* call) {
10815  Add<HDebugBreak>();
10816  return ast_context()->ReturnValue(graph()->GetConstant0());
10817 }
10818 
10819 
10820 #undef CHECK_BAILOUT
10821 #undef CHECK_ALIVE
10822 
10823 
10824 HEnvironment::HEnvironment(HEnvironment* outer,
10825  Scope* scope,
10826  Handle<JSFunction> closure,
10827  Zone* zone)
10828  : closure_(closure),
10829  values_(0, zone),
10830  frame_type_(JS_FUNCTION),
10831  parameter_count_(0),
10832  specials_count_(1),
10833  local_count_(0),
10834  outer_(outer),
10835  entry_(NULL),
10836  pop_count_(0),
10837  push_count_(0),
10838  ast_id_(BailoutId::None()),
10839  zone_(zone) {
10840  Initialize(scope->num_parameters() + 1, scope->num_stack_slots(), 0);
10841 }
10842 
10843 
10844 HEnvironment::HEnvironment(Zone* zone, int parameter_count)
10845  : values_(0, zone),
10846  frame_type_(STUB),
10847  parameter_count_(parameter_count),
10848  specials_count_(1),
10849  local_count_(0),
10850  outer_(NULL),
10851  entry_(NULL),
10852  pop_count_(0),
10853  push_count_(0),
10854  ast_id_(BailoutId::None()),
10855  zone_(zone) {
10856  Initialize(parameter_count, 0, 0);
10857 }
10858 
10859 
10860 HEnvironment::HEnvironment(const HEnvironment* other, Zone* zone)
10861  : values_(0, zone),
10862  frame_type_(JS_FUNCTION),
10863  parameter_count_(0),
10864  specials_count_(0),
10865  local_count_(0),
10866  outer_(NULL),
10867  entry_(NULL),
10868  pop_count_(0),
10869  push_count_(0),
10870  ast_id_(other->ast_id()),
10871  zone_(zone) {
10872  Initialize(other);
10873 }
10874 
10875 
10876 HEnvironment::HEnvironment(HEnvironment* outer,
10877  Handle<JSFunction> closure,
10878  FrameType frame_type,
10879  int arguments,
10880  Zone* zone)
10881  : closure_(closure),
10882  values_(arguments, zone),
10883  frame_type_(frame_type),
10884  parameter_count_(arguments),
10885  specials_count_(0),
10886  local_count_(0),
10887  outer_(outer),
10888  entry_(NULL),
10889  pop_count_(0),
10890  push_count_(0),
10891  ast_id_(BailoutId::None()),
10892  zone_(zone) {
10893 }
10894 
10895 
10896 void HEnvironment::Initialize(int parameter_count,
10897  int local_count,
10898  int stack_height) {
10899  parameter_count_ = parameter_count;
10900  local_count_ = local_count;
10901 
10902  // Avoid reallocating the temporaries' backing store on the first Push.
10903  int total = parameter_count + specials_count_ + local_count + stack_height;
10904  values_.Initialize(total + 4, zone());
10905  for (int i = 0; i < total; ++i) values_.Add(NULL, zone());
10906 }
10907 
10908 
10909 void HEnvironment::Initialize(const HEnvironment* other) {
10910  closure_ = other->closure();
10911  values_.AddAll(other->values_, zone());
10912  assigned_variables_.Union(other->assigned_variables_, zone());
10913  frame_type_ = other->frame_type_;
10914  parameter_count_ = other->parameter_count_;
10915  local_count_ = other->local_count_;
10916  if (other->outer_ != NULL) outer_ = other->outer_->Copy(); // Deep copy.
10917  entry_ = other->entry_;
10918  pop_count_ = other->pop_count_;
10919  push_count_ = other->push_count_;
10920  specials_count_ = other->specials_count_;
10921  ast_id_ = other->ast_id_;
10922 }
10923 
10924 
10925 void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) {
10926  ASSERT(!block->IsLoopHeader());
10927  ASSERT(values_.length() == other->values_.length());
10928 
10929  int length = values_.length();
10930  for (int i = 0; i < length; ++i) {
10931  HValue* value = values_[i];
10932  if (value != NULL && value->IsPhi() && value->block() == block) {
10933  // There is already a phi for the i'th value.
10934  HPhi* phi = HPhi::cast(value);
10935  // Assert index is correct and that we haven't missed an incoming edge.
10936  ASSERT(phi->merged_index() == i || !phi->HasMergedIndex());
10937  ASSERT(phi->OperandCount() == block->predecessors()->length());
10938  phi->AddInput(other->values_[i]);
10939  } else if (values_[i] != other->values_[i]) {
10940  // There is a fresh value on the incoming edge, a phi is needed.
10941  ASSERT(values_[i] != NULL && other->values_[i] != NULL);
10942  HPhi* phi = block->AddNewPhi(i);
10943  HValue* old_value = values_[i];
10944  for (int j = 0; j < block->predecessors()->length(); j++) {
10945  phi->AddInput(old_value);
10946  }
10947  phi->AddInput(other->values_[i]);
10948  this->values_[i] = phi;
10949  }
10950  }
10951 }
10952 
10953 
10954 void HEnvironment::Bind(int index, HValue* value) {
10955  ASSERT(value != NULL);
10956  assigned_variables_.Add(index, zone());
10957  values_[index] = value;
10958 }
10959 
10960 
10961 bool HEnvironment::HasExpressionAt(int index) const {
10962  return index >= parameter_count_ + specials_count_ + local_count_;
10963 }
10964 
10965 
10966 bool HEnvironment::ExpressionStackIsEmpty() const {
10967  ASSERT(length() >= first_expression_index());
10968  return length() == first_expression_index();
10969 }
10970 
10971 
10972 void HEnvironment::SetExpressionStackAt(int index_from_top, HValue* value) {
10973  int count = index_from_top + 1;
10974  int index = values_.length() - count;
10975  ASSERT(HasExpressionAt(index));
10976  // The push count must include at least the element in question or else
10977  // the new value will not be included in this environment's history.
10978  if (push_count_ < count) {
10979  // This is the same effect as popping then re-pushing 'count' elements.
10980  pop_count_ += (count - push_count_);
10981  push_count_ = count;
10982  }
10983  values_[index] = value;
10984 }
10985 
10986 
10987 void HEnvironment::Drop(int count) {
10988  for (int i = 0; i < count; ++i) {
10989  Pop();
10990  }
10991 }
10992 
10993 
10994 HEnvironment* HEnvironment::Copy() const {
10995  return new(zone()) HEnvironment(this, zone());
10996 }
10997 
10998 
10999 HEnvironment* HEnvironment::CopyWithoutHistory() const {
11000  HEnvironment* result = Copy();
11001  result->ClearHistory();
11002  return result;
11003 }
11004 
11005 
11006 HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const {
11007  HEnvironment* new_env = Copy();
11008  for (int i = 0; i < values_.length(); ++i) {
11009  HPhi* phi = loop_header->AddNewPhi(i);
11010  phi->AddInput(values_[i]);
11011  new_env->values_[i] = phi;
11012  }
11013  new_env->ClearHistory();
11014  return new_env;
11015 }
11016 
11017 
11018 HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
11019  Handle<JSFunction> target,
11020  FrameType frame_type,
11021  int arguments) const {
11022  HEnvironment* new_env =
11023  new(zone()) HEnvironment(outer, target, frame_type,
11024  arguments + 1, zone());
11025  for (int i = 0; i <= arguments; ++i) { // Include receiver.
11026  new_env->Push(ExpressionStackAt(arguments - i));
11027  }
11028  new_env->ClearHistory();
11029  return new_env;
11030 }
11031 
11032 
11033 HEnvironment* HEnvironment::CopyForInlining(
11034  Handle<JSFunction> target,
11035  int arguments,
11036  FunctionLiteral* function,
11037  HConstant* undefined,
11038  InliningKind inlining_kind) const {
11039  ASSERT(frame_type() == JS_FUNCTION);
11040 
11041  // Outer environment is a copy of this one without the arguments.
11042  int arity = function->scope()->num_parameters();
11043 
11044  HEnvironment* outer = Copy();
11045  outer->Drop(arguments + 1); // Including receiver.
11046  outer->ClearHistory();
11047 
11048  if (inlining_kind == CONSTRUCT_CALL_RETURN) {
11049  // Create artificial constructor stub environment. The receiver should
11050  // actually be the constructor function, but we pass the newly allocated
11051  // object instead, DoComputeConstructStubFrame() relies on that.
11052  outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments);
11053  } else if (inlining_kind == GETTER_CALL_RETURN) {
11054  // We need an additional StackFrame::INTERNAL frame for restoring the
11055  // correct context.
11056  outer = CreateStubEnvironment(outer, target, JS_GETTER, arguments);
11057  } else if (inlining_kind == SETTER_CALL_RETURN) {
11058  // We need an additional StackFrame::INTERNAL frame for temporarily saving
11059  // the argument of the setter, see StoreStubCompiler::CompileStoreViaSetter.
11060  outer = CreateStubEnvironment(outer, target, JS_SETTER, arguments);
11061  }
11062 
11063  if (arity != arguments) {
11064  // Create artificial arguments adaptation environment.
11065  outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments);
11066  }
11067 
11068  HEnvironment* inner =
11069  new(zone()) HEnvironment(outer, function->scope(), target, zone());
11070  // Get the argument values from the original environment.
11071  for (int i = 0; i <= arity; ++i) { // Include receiver.
11072  HValue* push = (i <= arguments) ?
11073  ExpressionStackAt(arguments - i) : undefined;
11074  inner->SetValueAt(i, push);
11075  }
11076  inner->SetValueAt(arity + 1, context());
11077  for (int i = arity + 2; i < inner->length(); ++i) {
11078  inner->SetValueAt(i, undefined);
11079  }
11080 
11081  inner->set_ast_id(BailoutId::FunctionEntry());
11082  return inner;
11083 }
11084 
11085 
11086 void HEnvironment::PrintTo(StringStream* stream) {
11087  for (int i = 0; i < length(); i++) {
11088  if (i == 0) stream->Add("parameters\n");
11089  if (i == parameter_count()) stream->Add("specials\n");
11090  if (i == parameter_count() + specials_count()) stream->Add("locals\n");
11091  if (i == parameter_count() + specials_count() + local_count()) {
11092  stream->Add("expressions\n");
11093  }
11094  HValue* val = values_.at(i);
11095  stream->Add("%d: ", i);
11096  if (val != NULL) {
11097  val->PrintNameTo(stream);
11098  } else {
11099  stream->Add("NULL");
11100  }
11101  stream->Add("\n");
11102  }
11103  PrintF("\n");
11104 }
11105 
11106 
11107 void HEnvironment::PrintToStd() {
11108  HeapStringAllocator string_allocator;
11109  StringStream trace(&string_allocator);
11110  PrintTo(&trace);
11111  PrintF("%s", trace.ToCString().get());
11112 }
11113 
11114 
11115 void HTracer::TraceCompilation(CompilationInfo* info) {
11116  Tag tag(this, "compilation");
11117  if (info->IsOptimizing()) {
11118  Handle<String> name = info->function()->debug_name();
11119  PrintStringProperty("name", name->ToCString().get());
11120  PrintIndent();
11121  trace_.Add("method \"%s:%d\"\n",
11122  name->ToCString().get(),
11123  info->optimization_id());
11124  } else {
11125  CodeStub::Major major_key = info->code_stub()->MajorKey();
11126  PrintStringProperty("name", CodeStub::MajorName(major_key, false));
11127  PrintStringProperty("method", "stub");
11128  }
11129  PrintLongProperty("date", static_cast<int64_t>(OS::TimeCurrentMillis()));
11130 }
11131 
11132 
11133 void HTracer::TraceLithium(const char* name, LChunk* chunk) {
11134  ASSERT(!chunk->isolate()->concurrent_recompilation_enabled());
11135  AllowHandleDereference allow_deref;
11136  AllowDeferredHandleDereference allow_deferred_deref;
11137  Trace(name, chunk->graph(), chunk);
11138 }
11139 
11140 
11141 void HTracer::TraceHydrogen(const char* name, HGraph* graph) {
11142  ASSERT(!graph->isolate()->concurrent_recompilation_enabled());
11143  AllowHandleDereference allow_deref;
11144  AllowDeferredHandleDereference allow_deferred_deref;
11145  Trace(name, graph, NULL);
11146 }
11147 
11148 
11149 void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
11150  Tag tag(this, "cfg");
11151  PrintStringProperty("name", name);
11152  const ZoneList<HBasicBlock*>* blocks = graph->blocks();
11153  for (int i = 0; i < blocks->length(); i++) {
11154  HBasicBlock* current = blocks->at(i);
11155  Tag block_tag(this, "block");
11156  PrintBlockProperty("name", current->block_id());
11157  PrintIntProperty("from_bci", -1);
11158  PrintIntProperty("to_bci", -1);
11159 
11160  if (!current->predecessors()->is_empty()) {
11161  PrintIndent();
11162  trace_.Add("predecessors");
11163  for (int j = 0; j < current->predecessors()->length(); ++j) {
11164  trace_.Add(" \"B%d\"", current->predecessors()->at(j)->block_id());
11165  }
11166  trace_.Add("\n");
11167  } else {
11168  PrintEmptyProperty("predecessors");
11169  }
11170 
11171  if (current->end()->SuccessorCount() == 0) {
11172  PrintEmptyProperty("successors");
11173  } else {
11174  PrintIndent();
11175  trace_.Add("successors");
11176  for (HSuccessorIterator it(current->end()); !it.Done(); it.Advance()) {
11177  trace_.Add(" \"B%d\"", it.Current()->block_id());
11178  }
11179  trace_.Add("\n");
11180  }
11181 
11182  PrintEmptyProperty("xhandlers");
11183 
11184  {
11185  PrintIndent();
11186  trace_.Add("flags");
11187  if (current->IsLoopSuccessorDominator()) {
11188  trace_.Add(" \"dom-loop-succ\"");
11189  }
11190  if (current->IsUnreachable()) {
11191  trace_.Add(" \"dead\"");
11192  }
11193  if (current->is_osr_entry()) {
11194  trace_.Add(" \"osr\"");
11195  }
11196  trace_.Add("\n");
11197  }
11198 
11199  if (current->dominator() != NULL) {
11200  PrintBlockProperty("dominator", current->dominator()->block_id());
11201  }
11202 
11203  PrintIntProperty("loop_depth", current->LoopNestingDepth());
11204 
11205  if (chunk != NULL) {
11206  int first_index = current->first_instruction_index();
11207  int last_index = current->last_instruction_index();
11208  PrintIntProperty(
11209  "first_lir_id",
11210  LifetimePosition::FromInstructionIndex(first_index).Value());
11211  PrintIntProperty(
11212  "last_lir_id",
11213  LifetimePosition::FromInstructionIndex(last_index).Value());
11214  }
11215 
11216  {
11217  Tag states_tag(this, "states");
11218  Tag locals_tag(this, "locals");
11219  int total = current->phis()->length();
11220  PrintIntProperty("size", current->phis()->length());
11221  PrintStringProperty("method", "None");
11222  for (int j = 0; j < total; ++j) {
11223  HPhi* phi = current->phis()->at(j);
11224  PrintIndent();
11225  trace_.Add("%d ", phi->merged_index());
11226  phi->PrintNameTo(&trace_);
11227  trace_.Add(" ");
11228  phi->PrintTo(&trace_);
11229  trace_.Add("\n");
11230  }
11231  }
11232 
11233  {
11234  Tag HIR_tag(this, "HIR");
11235  for (HInstructionIterator it(current); !it.Done(); it.Advance()) {
11236  HInstruction* instruction = it.Current();
11237  int uses = instruction->UseCount();
11238  PrintIndent();
11239  trace_.Add("0 %d ", uses);
11240  instruction->PrintNameTo(&trace_);
11241  trace_.Add(" ");
11242  instruction->PrintTo(&trace_);
11243  if (FLAG_hydrogen_track_positions &&
11244  instruction->has_position() &&
11245  instruction->position().raw() != 0) {
11246  const HSourcePosition pos = instruction->position();
11247  trace_.Add(" pos:");
11248  if (pos.inlining_id() != 0) {
11249  trace_.Add("%d_", pos.inlining_id());
11250  }
11251  trace_.Add("%d", pos.position());
11252  }
11253  trace_.Add(" <|@\n");
11254  }
11255  }
11256 
11257 
11258  if (chunk != NULL) {
11259  Tag LIR_tag(this, "LIR");
11260  int first_index = current->first_instruction_index();
11261  int last_index = current->last_instruction_index();
11262  if (first_index != -1 && last_index != -1) {
11263  const ZoneList<LInstruction*>* instructions = chunk->instructions();
11264  for (int i = first_index; i <= last_index; ++i) {
11265  LInstruction* linstr = instructions->at(i);
11266  if (linstr != NULL) {
11267  PrintIndent();
11268  trace_.Add("%d ",
11270  linstr->PrintTo(&trace_);
11271  trace_.Add(" [hir:");
11272  linstr->hydrogen_value()->PrintNameTo(&trace_);
11273  trace_.Add("]");
11274  trace_.Add(" <|@\n");
11275  }
11276  }
11277  }
11278  }
11279  }
11280 }
11281 
11282 
11283 void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) {
11284  Tag tag(this, "intervals");
11285  PrintStringProperty("name", name);
11286 
11287  const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges();
11288  for (int i = 0; i < fixed_d->length(); ++i) {
11289  TraceLiveRange(fixed_d->at(i), "fixed", allocator->zone());
11290  }
11291 
11292  const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges();
11293  for (int i = 0; i < fixed->length(); ++i) {
11294  TraceLiveRange(fixed->at(i), "fixed", allocator->zone());
11295  }
11296 
11297  const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges();
11298  for (int i = 0; i < live_ranges->length(); ++i) {
11299  TraceLiveRange(live_ranges->at(i), "object", allocator->zone());
11300  }
11301 }
11302 
11303 
11304 void HTracer::TraceLiveRange(LiveRange* range, const char* type,
11305  Zone* zone) {
11306  if (range != NULL && !range->IsEmpty()) {
11307  PrintIndent();
11308  trace_.Add("%d %s", range->id(), type);
11309  if (range->HasRegisterAssigned()) {
11310  LOperand* op = range->CreateAssignedOperand(zone);
11311  int assigned_reg = op->index();
11312  if (op->IsDoubleRegister()) {
11313  trace_.Add(" \"%s\"",
11315  } else {
11316  ASSERT(op->IsRegister());
11317  trace_.Add(" \"%s\"", Register::AllocationIndexToString(assigned_reg));
11318  }
11319  } else if (range->IsSpilled()) {
11320  LOperand* op = range->TopLevel()->GetSpillOperand();
11321  if (op->IsDoubleStackSlot()) {
11322  trace_.Add(" \"double_stack:%d\"", op->index());
11323  } else {
11324  ASSERT(op->IsStackSlot());
11325  trace_.Add(" \"stack:%d\"", op->index());
11326  }
11327  }
11328  int parent_index = -1;
11329  if (range->IsChild()) {
11330  parent_index = range->parent()->id();
11331  } else {
11332  parent_index = range->id();
11333  }
11334  LOperand* op = range->FirstHint();
11335  int hint_index = -1;
11336  if (op != NULL && op->IsUnallocated()) {
11337  hint_index = LUnallocated::cast(op)->virtual_register();
11338  }
11339  trace_.Add(" %d %d", parent_index, hint_index);
11340  UseInterval* cur_interval = range->first_interval();
11341  while (cur_interval != NULL && range->Covers(cur_interval->start())) {
11342  trace_.Add(" [%d, %d[",
11343  cur_interval->start().Value(),
11344  cur_interval->end().Value());
11345  cur_interval = cur_interval->next();
11346  }
11347 
11348  UsePosition* current_pos = range->first_pos();
11349  while (current_pos != NULL) {
11350  if (current_pos->RegisterIsBeneficial() || FLAG_trace_all_uses) {
11351  trace_.Add(" %d M", current_pos->pos().Value());
11352  }
11353  current_pos = current_pos->next();
11354  }
11355 
11356  trace_.Add(" \"\"\n");
11357  }
11358 }
11359 
11360 
11361 void HTracer::FlushToFile() {
11362  AppendChars(filename_.start(), trace_.ToCString().get(), trace_.length(),
11363  false);
11364  trace_.Reset();
11365 }
11366 
11367 
11368 void HStatistics::Initialize(CompilationInfo* info) {
11369  if (info->shared_info().is_null()) return;
11370  source_size_ += info->shared_info()->SourceSize();
11371 }
11372 
11373 
11374 void HStatistics::Print() {
11375  PrintF("Timing results:\n");
11376  TimeDelta sum;
11377  for (int i = 0; i < times_.length(); ++i) {
11378  sum += times_[i];
11379  }
11380 
11381  for (int i = 0; i < names_.length(); ++i) {
11382  PrintF("%32s", names_[i]);
11383  double ms = times_[i].InMillisecondsF();
11384  double percent = times_[i].PercentOf(sum);
11385  PrintF(" %8.3f ms / %4.1f %% ", ms, percent);
11386 
11387  unsigned size = sizes_[i];
11388  double size_percent = static_cast<double>(size) * 100 / total_size_;
11389  PrintF(" %9u bytes / %4.1f %%\n", size, size_percent);
11390  }
11391 
11392  PrintF("----------------------------------------"
11393  "---------------------------------------\n");
11394  TimeDelta total = create_graph_ + optimize_graph_ + generate_code_;
11395  PrintF("%32s %8.3f ms / %4.1f %% \n",
11396  "Create graph",
11397  create_graph_.InMillisecondsF(),
11398  create_graph_.PercentOf(total));
11399  PrintF("%32s %8.3f ms / %4.1f %% \n",
11400  "Optimize graph",
11401  optimize_graph_.InMillisecondsF(),
11402  optimize_graph_.PercentOf(total));
11403  PrintF("%32s %8.3f ms / %4.1f %% \n",
11404  "Generate and install code",
11405  generate_code_.InMillisecondsF(),
11406  generate_code_.PercentOf(total));
11407  PrintF("----------------------------------------"
11408  "---------------------------------------\n");
11409  PrintF("%32s %8.3f ms (%.1f times slower than full code gen)\n",
11410  "Total",
11411  total.InMillisecondsF(),
11412  total.TimesOf(full_code_gen_));
11413 
11414  double source_size_in_kb = static_cast<double>(source_size_) / 1024;
11415  double normalized_time = source_size_in_kb > 0
11416  ? total.InMillisecondsF() / source_size_in_kb
11417  : 0;
11418  double normalized_size_in_kb = source_size_in_kb > 0
11419  ? total_size_ / 1024 / source_size_in_kb
11420  : 0;
11421  PrintF("%32s %8.3f ms %7.3f kB allocated\n",
11422  "Average per kB source",
11423  normalized_time, normalized_size_in_kb);
11424 }
11425 
11426 
11427 void HStatistics::SaveTiming(const char* name, TimeDelta time, unsigned size) {
11428  total_size_ += size;
11429  for (int i = 0; i < names_.length(); ++i) {
11430  if (strcmp(names_[i], name) == 0) {
11431  times_[i] += time;
11432  sizes_[i] += size;
11433  return;
11434  }
11435  }
11436  names_.Add(name);
11437  times_.Add(time);
11438  sizes_.Add(size);
11439 }
11440 
11441 
11443  if (ShouldProduceTraceOutput()) {
11444  isolate()->GetHTracer()->TraceHydrogen(name(), graph_);
11445  }
11446 
11447 #ifdef DEBUG
11448  graph_->Verify(false); // No full verify.
11449 #endif
11450 }
11451 
11452 } } // namespace v8::internal
bool HasObservableSideEffects() const
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
#define INLINE_FUNCTION_LIST(F)
Definition: runtime.h:643
bool IsTest() const
Definition: hydrogen.h:778
AstContext(HOptimizedGraphBuilder *owner, Expression::Context kind)
Definition: hydrogen.cc:3641
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
HValue * TruncateToNumber(HValue *value, Type **expected)
Definition: hydrogen.cc:9139
ArgumentsAllowedFlag
Definition: hydrogen.h:764
void GotoNoSimulate(HBasicBlock *from, HBasicBlock *target)
Definition: hydrogen.h:1100
static LUnallocated * cast(LOperand *op)
Definition: lithium.h:156
static const int kUnlimitedMaxInlinedNodesCumulative
Definition: hydrogen.h:2089
Isolate * isolate() const
Definition: hydrogen.h:1073
static HSourcePosition Unknown()
Handle< FixedArray > CopyAndTenureFixedCOWArray(Handle< FixedArray > array)
Definition: factory.cc:891
#define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize)
Definition: hydrogen.cc:8406
VariableDeclaration * function() const
Definition: scopes.h:326
bool IsExternalArrayElementsKind(ElementsKind kind)
bool IsHoleyElementsKind(ElementsKind kind)
void VisitTypeof(UnaryOperation *expr)
Definition: hydrogen.cc:8826
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size)
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:7519
static const int kBuiltinsOffset
Definition: objects.h:7610
void set_ast_context(AstContext *context)
Definition: hydrogen.h:2100
bool calls_eval() const
Definition: scopes.h:299
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths true
Definition: flags.cc:208
static Representation Smi()
void VisitVoid(UnaryOperation *expr)
Definition: hydrogen.cc:8820
void CopyFlag(Flag f, HValue *other)
void PrintF(const char *format,...)
Definition: v8utils.cc:40
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
void PushArgumentsFromEnvironment(int count)
Definition: hydrogen.cc:4109
bool InNewSpace(Object *object)
Definition: heap-inl.h:307
static double hole_nan_as_double()
Definition: objects-inl.h:2161
static TypeFeedbackInfo * cast(Object *obj)
static String * cast(Object *obj)
T value
Definition: v8.h:923
bool IsEffect() const
Definition: hydrogen.h:776
static Smi * FromInt(int value)
Definition: objects-inl.h:1209
bool IsFastObjectElementsKind(ElementsKind kind)
static bool MakeCode(CompilationInfo *info)
static const int kEnumCacheBridgeIndicesCacheIndex
Definition: objects.h:3495
void VisitForValue(Expression *expr, ArgumentsAllowedFlag flag=ARGUMENTS_NOT_ALLOWED)
Definition: hydrogen.cc:3880
void ClearDependsOnFlag(GVNFlag f)
HBasicBlock * CreateLoop(IterationStatement *statement, HBasicBlock *loop_entry, HBasicBlock *body_exit, HBasicBlock *loop_successor, HBasicBlock *break_block)
Definition: hydrogen.cc:3030
HInstruction * AddLoadStringInstanceType(HValue *string)
Definition: hydrogen.cc:6205
void AddLeaveInlined(HBasicBlock *block, HValue *return_value, FunctionState *state)
Definition: hydrogen.h:1106
Handle< Script > script() const
Definition: compiler.h:83
static Handle< String > cast(Handle< S > that)
Definition: handles.h:75
void VisitDelete(UnaryOperation *expr)
Definition: hydrogen.cc:8780
HOsrBuilder * osr() const
Definition: hydrogen.h:2051
static Representation Integer32()
static bool Analyze(CompilationInfo *info)
Definition: scopes.cc:278
bool HasIllegalRedeclaration() const
Definition: scopes.h:220
CallInterfaceDescriptor * call_descriptor(CallDescriptorKey index)
Definition: isolate.cc:2280
void Push(HValue *value)
Definition: hydrogen.h:1079
static BailoutId StubEntry()
Definition: utils.h:1168
virtual void ReturnInstruction(HInstruction *instr, BailoutId ast_id)=0
HBasicBlock * function_return() const
Definition: hydrogen.h:2109
PretenureFlag GetPretenureMode()
Definition: heap.h:1569
kSerializedDataOffset Object
Definition: objects-inl.h:5016
HValue * BuildBinaryOperation(Token::Value op, HValue *left, HValue *right, Type *left_type, Type *right_type, Type *result_type, Maybe< int > fixed_right_arg, HAllocationMode allocation_mode)
Definition: hydrogen.cc:9219
TypeImpl< ZoneTypeConfig > Type
Builtins * builtins()
Definition: isolate.h:948
int int32_t
Definition: unicode.cc:47
void set_this_has_uses(bool has_no_uses)
Definition: compiler.h:111
void set_bailout_reason(BailoutReason reason)
Definition: compiler.h:281
KeyedAccessStoreMode
Definition: objects.h:164
Location location() const
Definition: variables.h:146
void AddSimulate(BailoutId id, RemovableSimulate removable=FIXED_SIMULATE)
Definition: hydrogen.cc:1227
bool EqualsInteger32Constant(int32_t value)
static bool enabled()
Definition: serialize.h:485
static ScopeInfo * Empty(Isolate *isolate)
Definition: scopeinfo.cc:151
static LifetimePosition FromInstructionIndex(int index)
virtual void ReturnControl(HControlInstruction *instr, BailoutId ast_id)=0
bool IsStackAllocated() const
Definition: variables.h:118
HSourcePosition source_position()
Definition: hydrogen.h:1836
AllocationSiteOverrideMode
Definition: code-stubs.h:759
static const Function * FunctionForId(FunctionId id)
Definition: runtime.cc:15154
List< Handle< Map > > MapHandleList
Definition: list.h:218
#define ASSERT(condition)
Definition: checks.h:329
HBasicBlock * current_block() const
Definition: hydrogen.h:1066
const int kPointerSizeLog2
Definition: globals.h:281
static Script * cast(Object *obj)
ExternalArrayType
Definition: v8.h:2113
Iterator< i::Map > Classes()
Definition: types.h:290
FunctionState * function_state() const
Definition: hydrogen.h:2059
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3090
static int OffsetOfFunctionWithId(Builtins::JavaScript id)
Definition: objects.h:7674
void VisitLoopBody(IterationStatement *stmt, HBasicBlock *loop_entry, BreakAndContinueInfo *break_info)
Definition: hydrogen.cc:4512
const uint32_t kStringRepresentationMask
Definition: objects.h:615
static const int kUnlimitedMaxInlinedNodes
Definition: hydrogen.h:2088
bool is_this() const
Definition: variables.h:129
#define CHECK(condition)
Definition: checks.h:75
PerThreadAssertScopeDebugOnly< DEFERRED_HANDLE_DEREFERENCE_ASSERT, true > AllowDeferredHandleDereference
Definition: assert-scope.h:234
#define DEFINE_GET_CONSTANT(Name, name, htype, boolean_value)
Definition: hydrogen.cc:704
HSourcePosition ScriptPositionToSourcePosition(int position)
Definition: hydrogen.h:1830
void set_observed_input_representation(int index, Representation rep)
const intptr_t kObjectAlignmentMask
Definition: v8globals.h:45
bool Maybe(TypeImpl *that)
Definition: types.cc:339
void VisitDeclarations(ZoneList< Declaration * > *declarations)
Definition: hydrogen.cc:10156
Factory * factory()
Definition: isolate.h:995
bool IsFastElementsKind(ElementsKind kind)
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
static const int kMaxStorePolymorphism
Definition: hydrogen.h:2083
static Representation FromType(Type *type)
Definition: types.cc:570
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Handle< String > name() const
Definition: variables.h:96
#define CHECK_ALIVE_OR_RETURN(call, value)
Definition: hydrogen.cc:3861
static Smi * cast(Object *object)
HEnvironment * environment() const
Definition: hydrogen.h:1068
static const char * AllocationIndexToString(int index)
static Handle< ScopeInfo > Create(Scope *scope, Zone *zone)
Definition: scopeinfo.cc:39
Handle< String > FlattenGetString(Handle< String > string)
Definition: handles.cc:156
HInstruction * PreProcessCall(Instruction *call)
Definition: hydrogen.cc:4122
static const int kUnlimitedMaxInlinedSourceSize
Definition: hydrogen.h:2087
HAllocate * BuildAllocate(HValue *object_size, HType type, InstanceType instance_type, HAllocationMode allocation_mode)
Definition: hydrogen.cc:1766
int ContextChainLength(Scope *scope)
Definition: scopes.cc:721
kInstanceClassNameOffset flag
Definition: objects-inl.h:5115
CompilationInfo * current_info() const
Definition: hydrogen.h:2103
static const int kSize
Definition: objects.h:10077
V8_INLINE Handle< Boolean > True(Isolate *isolate)
Definition: v8.h:6559
static Handle< Object > TryMigrateInstance(Handle< JSObject > instance)
Definition: objects.cc:3866
virtual BailoutId ContinueId() const =0
void Bailout(BailoutReason reason)
Definition: hydrogen.cc:3868
static const int kMinLength
Definition: objects.h:9170
#define IN
void Bind(Variable *var, HValue *value)
Definition: hydrogen.h:2169
virtual HValue * context()=0
void(HOptimizedGraphBuilder::* InlineFunctionGenerator)(CallRuntime *call)
Definition: hydrogen.h:2074
int virtual_register() const
Definition: lithium.h:257
#define UNREACHABLE()
Definition: checks.h:52
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Definition: flags.cc:211
void FinishExitWithHardDeoptimization(const char *reason)
Definition: hydrogen.cc:1257
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
Definition: v8.h:917
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
Definition: flags.cc:665
bool IsFastPackedElementsKind(ElementsKind kind)
const int kDoubleSize
Definition: globals.h:266
Variable * arguments() const
Definition: scopes.h:341
AstContext * call_context() const
Definition: hydrogen.h:2106
void VisitForEffect(Expression *expr)
Definition: hydrogen.cc:3874
static const int kDontAdaptArgumentsSentinel
Definition: objects.h:7098
MUST_USE_RESULT MaybeObject * AsElementsKind(ElementsKind kind)
Definition: objects.cc:3360
static bool IsValidElementsTransition(ElementsKind from_kind, ElementsKind to_kind)
Definition: objects.cc:12848
NilValue
Definition: v8.h:133
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1278
static BailoutId Declarations()
Definition: utils.h:1166
static const int kMaxFastLiteralDepth
Definition: hydrogen.h:2093
const int kPointerSize
Definition: globals.h:268
void check(i::Vector< const uint8_t > string)
void set_current_block(HBasicBlock *block)
Definition: hydrogen.h:1067
BreakAndContinueScope * break_scope() const
Definition: hydrogen.h:2044
static const InlineFunctionGenerator kInlineFunctionGenerators[]
Definition: hydrogen.h:2077
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Handle< Object > GlobalConstantFor(Handle< String > name)
Definition: factory.cc:2013
HValue * LookupAndMakeLive(Variable *var)
Definition: hydrogen.h:2196
void BuildArrayBufferViewInitialization(HValue *obj, HValue *buffer, HValue *byte_offset, HValue *byte_length)
Definition: hydrogen.cc:8418
bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind, ElementsKind to_kind)
static const int kMaxLoadPolymorphism
Definition: hydrogen.h:2082
HInstruction * BuildCheckPrototypeMaps(Handle< JSObject > prototype, Handle< JSObject > holder)
Definition: hydrogen.cc:6785
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
const uint32_t kOneByteDataHintMask
Definition: objects.h:638
HInstruction * AddLoadStringLength(HValue *string)
Definition: hydrogen.cc:6219
virtual void ReturnContinuation(HIfContinuation *continuation, BailoutId ast_id)=0
int index() const
Definition: variables.h:147
Handle< FixedArray > NewFixedArray(int size, PretenureFlag pretenure=NOT_TENURED)
Definition: factory.cc:53
bool IsFixedTypedArrayElementsKind(ElementsKind kind)
bool IsLexicalVariableMode(VariableMode mode)
Definition: v8globals.h:508
static const int kPropertiesOffset
Definition: objects.h:2755
bool IsContextSlot() const
Definition: variables.h:119
static double TimeCurrentMillis()
virtual void initialize_output_representation(Representation observed)
int num_parameters() const
Definition: scopes.h:338
bool IsFastSmiElementsKind(ElementsKind kind)
void set_type(HType new_type)
static int SizeFor(int length)
Definition: objects.h:3152
static const int kElementsOffset
Definition: objects.h:2756
#define COMMA
Definition: flags.h:101
void VisitLogicalExpression(BinaryOperation *expr)
Definition: hydrogen.cc:9478
static BailoutId FunctionEntry()
Definition: utils.h:1165
HValue * BuildAllocateArrayFromLength(JSArrayBuilder *array_builder, HValue *length_argument)
Definition: hydrogen.cc:2255
static bool IsEqualityOp(Value op)
Definition: token.h:228
#define STATIC_ASCII_VECTOR(x)
Definition: utils.h:570
#define BASE_EMBEDDED
Definition: allocation.h:68
V8_INLINE Handle< Primitive > Undefined(Isolate *isolate)
Definition: v8.h:6541
Handle< JSFunction > closure() const
Definition: compiler.h:81
bool IsDeclaredVariableMode(VariableMode mode)
Definition: v8globals.h:503
HOptimizedGraphBuilder(CompilationInfo *info)
Definition: hydrogen.cc:2980
BailoutId ExitId() const
Definition: ast.h:422
static BailoutId None()
Definition: utils.h:1164
HInstruction * AddUncasted()
Definition: hydrogen.h:1122
void VisitForTypeOf(Expression *expr)
Definition: hydrogen.cc:3887
void BuildCompareNil(HValue *value, Type *type, HIfContinuation *continuation)
Definition: hydrogen.cc:2661
void VisitNot(UnaryOperation *expr)
Definition: hydrogen.cc:8834
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
bool Is(TypeImpl *that)
Definition: types.h:246
#define STRING_TYPE(NAME, size, name, Name)
Definition: v8.h:2107
static const int kHeaderSize
Definition: objects.h:3016
HBasicBlock * CreateJoin(HBasicBlock *first, HBasicBlock *second, BailoutId join_id)
Definition: hydrogen.cc:3001
static TypeHandle Intersect(TypeHandle type1, TypeHandle type2, Region *reg)
HInstruction * AddElementAccess(HValue *elements, HValue *checked_key, HValue *val, HValue *dependency, ElementsKind elements_kind, PropertyAccessType access_type, LoadKeyedHoleMode load_mode=NEVER_RETURN_HOLE)
Definition: hydrogen.cc:2386
HLoopInformation * loop()
Definition: hydrogen.cc:3253
void VisitForControl(Expression *expr, HBasicBlock *true_block, HBasicBlock *false_block)
Definition: hydrogen.cc:3894
static const int kMapOffset
Definition: objects.h:1890
HValue * BuildWrapReceiver(HValue *object, HValue *function)
Definition: hydrogen.cc:1279
V8_INLINE Handle< Boolean > False(Isolate *isolate)
Definition: v8.h:6568
#define CHECK_ALIVE(call)
Definition: hydrogen.cc:3854
HValue * BuildObjectSizeAlignment(HValue *unaligned_size, int header_size)
Definition: hydrogen.cc:1907
void SetSourcePosition(int position)
Definition: hydrogen.h:1815
HValue * EnforceNumberType(HValue *number, Type *expected)
Definition: hydrogen.cc:9126
HValue * BuildRegExpConstructResult(HValue *length, HValue *index, HValue *input)
Definition: hydrogen.cc:1560
PostorderProcessor * PerformStep(Zone *zone, BitVector *visited, ZoneList< HBasicBlock * > *order)
Definition: hydrogen.cc:3263
static double nan_value()
void ChangeRepresentation(Representation r)
static const int kSize
Definition: objects.h:1979
bool is_null() const
Definition: handles.h:81
Handle< T > handle(T *t, Isolate *isolate)
Definition: handles.h:103
HBasicBlock * BuildOsrLoopEntry(IterationStatement *statement)
Definition: hydrogen-osr.cc:40
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
const intptr_t kObjectAlignment
Definition: v8globals.h:44
HInstruction * BuildGetNativeContext(HValue *closure)
Definition: hydrogen.cc:2745
static PostorderProcessor * CreateEntryProcessor(Zone *zone, HBasicBlock *block, BitVector *visited)
Definition: hydrogen.cc:3256
bool HasOsrEntryAt(IterationStatement *statement)
Definition: hydrogen-osr.cc:35
ElementsKind GetInitialFastElementsKind()
static const int kMaxFastLiteralProperties
Definition: hydrogen.h:2094
HValue * BuildCheckString(HValue *string)
Definition: hydrogen.cc:1268
HBasicBlock * CreateLoopHeaderBlock()
Definition: hydrogen.cc:1242
Type * ToType(Handle< Map > map)
Definition: hydrogen.h:2237
CompilationInfo * top_info()
Definition: hydrogen.h:1074
HValue * BuildCopyElementsOnWrite(HValue *object, HValue *elements, ElementsKind kind, HValue *length)
Definition: hydrogen.cc:1359
Handle< SharedFunctionInfo > shared_info() const
Definition: compiler.h:82
int AppendChars(const char *filename, const char *str, int size, bool verbose)
Definition: v8utils.cc:214
static Handle< Object > CreateArrayLiteralBoilerplate(Isolate *isolate, Handle< FixedArray > literals, Handle< FixedArray > elements)
Definition: runtime.cc:352
void VisitExpressions(ZoneList< Expression * > *exprs)
Definition: hydrogen.cc:3902
HValue * BuildNumberToString(HValue *object, Type *type)
Definition: hydrogen.cc:1627
const uint32_t kOneByteDataHintTag
Definition: objects.h:639
HBasicBlock * CreateBasicBlock(HEnvironment *env)
Definition: hydrogen.cc:1235
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
Definition: objects-inl.h:1477
void BuildInitializeElementsHeader(HValue *elements, ElementsKind kind, HValue *capacity)
Definition: hydrogen.cc:2326
bool binding_needs_init() const
Definition: variables.h:125
#define TYPED_ARRAYS(V)
Definition: objects.h:4663
static Handle< T > null()
Definition: handles.h:80
bool IsUnallocated() const
Definition: variables.h:115
virtual void VisitStatements(ZoneList< Statement * > *statements) V8_OVERRIDE
Definition: hydrogen.cc:4167
virtual BailoutId StackCheckId() const =0
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
void VisitComma(BinaryOperation *expr)
Definition: hydrogen.cc:9470
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
Definition: compiler.cc:996
PostorderProcessor * child()
Definition: hydrogen.cc:3251
void USE(T)
Definition: globals.h:341
kInstanceClassNameOffset kNeedsAccessCheckBit kRemovePrototypeBit kIsExpressionBit kAllowLazyCompilation kUsesArguments formal_parameter_count
Definition: objects-inl.h:5190
const uint32_t kOneByteStringTag
Definition: objects.h:611
Counters * counters()
Definition: isolate.h:859
bool CanBeZero(HValue *right)
Definition: hydrogen.cc:9114
#define ASSERT_NE(v1, v2)
Definition: checks.h:331
PerThreadAssertScopeDebugOnly< HEAP_ALLOCATION_ASSERT, false > DisallowHeapAllocation
Definition: assert-scope.h:214
static Builtins::JavaScript TokenToJSBuiltin(Token::Value op)
Definition: ic.cc:2771
bool end_
static const int kEnumCacheBridgeCacheIndex
Definition: objects.h:3494
void Print(const v8::FunctionCallbackInfo< v8::Value > &args)
HStoreNamedField * AddStoreMapConstant(HValue *object, Handle< Map > map)
Definition: hydrogen.cc:2958
PostorderProcessor * parent()
Definition: hydrogen.cc:3249
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
static const char * AllocationIndexToString(int index)
bool IsFastHoleyElementsKind(ElementsKind kind)
static const int kMaxCallPolymorphism
Definition: hydrogen.h:2081
HInstruction * BuildUncheckedMonomorphicElementAccess(HValue *checked_object, HValue *key, HValue *val, bool is_js_array, ElementsKind elements_kind, PropertyAccessType access_type, LoadKeyedHoleMode load_mode, KeyedAccessStoreMode store_mode)
Definition: hydrogen.cc:2141
HeapObject * obj
bool has_global_object() const
Definition: compiler.h:221
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric literals(0o77, 0b11)") DEFINE_bool(harmony_strings
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
static TypeHandle Union(TypeHandle type1, TypeHandle type2, Region *reg)
#define DEFINE_IS_CONSTANT(Name, name)
Definition: hydrogen.cc:731
HLoadNamedField * AddLoadElements(HValue *object)
Definition: hydrogen.cc:2419
static const int kValueOffset
Definition: objects.h:7701
HValue * BuildCheckMap(HValue *obj, Handle< Map > map)
Definition: hydrogen.cc:1263
bool is_function_scope() const
Definition: scopes.h:284
bool IsValue() const
Definition: hydrogen.h:777
PerThreadAssertScopeDebugOnly< HANDLE_DEREFERENCE_ASSERT, true > AllowHandleDereference
Definition: assert-scope.h:226
static Representation Tagged()
virtual bool IsJump() const
Definition: ast.h:262
#define STATIC_ASSERT(test)
Definition: checks.h:341
AstContext * ast_context() const
Definition: hydrogen.h:2099
HInstruction * BuildConstantMapCheck(Handle< JSObject > constant, CompilationInfo *info)
Definition: hydrogen.cc:6767
bool has_value
Definition: v8.h:922
void FinishCurrentBlock(HControlInstruction *last)
Definition: hydrogen.cc:1193
virtual bool BuildGraph() V8_OVERRIDE
Definition: hydrogen.cc:3910
T Min(T a, T b)
Definition: utils.h:234
bool MatchRotateRight(HValue *left, HValue *right, HValue **operand, HValue **shift_amount)
Definition: hydrogen.cc:9087
void BuildFillElementsWithHole(HValue *elements, ElementsKind elements_kind, HValue *from, HValue *to)
Definition: hydrogen.cc:2479
void BindIfLive(Variable *var, HValue *value)
Definition: hydrogen.h:2182
static const int kInitialMaxFastElementArray
Definition: objects.h:2744
Statement * body() const
Definition: ast.h:732
static void ArrayIdToTypeAndSize(int array_id, ExternalArrayType *type, ElementsKind *external_elements_kind, ElementsKind *fixed_elements_kind, size_t *element_size)
Definition: runtime.cc:928
static const int kMaxValue
Definition: objects.h:1681
static void RecordFunctionCompilation(Logger::LogEventsAndTags tag, CompilationInfo *info, Handle< SharedFunctionInfo > shared)
Definition: compiler.cc:1251
HInnerAllocatedObject * BuildJSArrayHeader(HValue *array, HValue *array_map, AllocationSiteMode mode, ElementsKind elements_kind, HValue *allocation_site_payload, HValue *length_field)
Definition: hydrogen.cc:2352
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
#define VOID
static HValue * cast(HValue *value)
#define CHECK_BAILOUT(call)
Definition: hydrogen.cc:3847
HValue * BuildCheckHeapObject(HValue *object)
Definition: hydrogen.cc:1251
#define ARRAY_SIZE(a)
Definition: globals.h:333
virtual void ReturnValue(HValue *value)=0
HValue * BuildStringAdd(HValue *left, HValue *right, HAllocationMode allocation_mode)
Definition: hydrogen.cc:2092
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
V8_INLINE Handle< Primitive > Null(Isolate *isolate)
Definition: v8.h:6550
static JSObject * cast(Object *obj)
HValue * AddLoadJSBuiltin(Builtins::JavaScript builtin)
Definition: hydrogen.cc:2965
bool IsDictionaryElementsKind(ElementsKind kind)
ZoneList< Declaration * > * declarations()
Definition: scopes.h:344
HGraph * graph() const
Definition: hydrogen.h:1072
HInstruction * AddInstruction(HInstruction *instr)
Definition: hydrogen.cc:1180
static const int kAlignedSize
Definition: objects.h:4705
bool IsFastDoubleElementsKind(ElementsKind kind)
HBasicBlock * JoinContinue(IterationStatement *statement, HBasicBlock *exit_block, HBasicBlock *continue_block)
Definition: hydrogen.cc:3018
const uint32_t kStringEncodingMask
Definition: objects.h:609
void Goto(HBasicBlock *from, HBasicBlock *target, FunctionState *state=NULL, bool add_simulate=true)
Definition: hydrogen.h:1089
static Handle< JSObject > DeepWalk(Handle< JSObject > object, AllocationSiteCreationContext *site_context)
Definition: objects.cc:5850
void VisitArithmeticExpression(BinaryOperation *expr)
Definition: hydrogen.cc:9581
static void AddDependentCompilationInfo(Handle< AllocationSite > site, Reason reason, CompilationInfo *info)
Definition: objects.cc:12725
void FinishExitCurrentBlock(HControlInstruction *instruction)
Definition: hydrogen.cc:1204
TestContext * inlined_test_context() const
Definition: hydrogen.h:2112
Scope * scope() const
Definition: compiler.h:78
#define INLINE_OPTIMIZED_FUNCTION_LIST(F)
Definition: runtime.h:692
Zone * zone() const
Definition: hydrogen.h:1071
static void Run(CompilationInfo *info)
Definition: typing.cc:57
static JSFunction * cast(Object *obj)