v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
full-codegen.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_FULL_CODEGEN_H_
29 #define V8_FULL_CODEGEN_H_
30 
31 #include "v8.h"
32 
33 #include "allocation.h"
34 #include "ast.h"
35 #include "code-stubs.h"
36 #include "codegen.h"
37 #include "compiler.h"
38 
39 namespace v8 {
40 namespace internal {
41 
42 // Forward declarations.
43 class JumpPatchSite;
44 
45 // AST node visitor which can tell whether a given statement will be breakable
46 // when the code is compiled by the full compiler in the debugger. This means
47 // that there will be an IC (load/store/call) in the code generated for the
48 // debugger to piggybag on.
49 class BreakableStatementChecker: public AstVisitor {
50  public:
51  BreakableStatementChecker() : is_breakable_(false) {}
52 
53  void Check(Statement* stmt);
54  void Check(Expression* stmt);
55 
56  bool is_breakable() { return is_breakable_; }
57 
58  private:
59  // AST node visit functions.
60 #define DECLARE_VISIT(type) virtual void Visit##type(type* node);
62 #undef DECLARE_VISIT
63 
64  bool is_breakable_;
65 
66  DISALLOW_COPY_AND_ASSIGN(BreakableStatementChecker);
67 };
68 
69 
70 // -----------------------------------------------------------------------------
71 // Full code generator.
72 
73 class FullCodeGenerator: public AstVisitor {
74  public:
75  enum State {
78  };
79 
81  : masm_(masm),
82  info_(info),
83  scope_(info->scope()),
84  nesting_stack_(NULL),
85  loop_depth_(0),
86  globals_(NULL),
87  context_(NULL),
88  bailout_entries_(info->HasDeoptimizationSupport()
89  ? info->function()->ast_node_count() : 0,
90  info->zone()),
91  stack_checks_(2, info->zone()), // There's always at least one.
92  type_feedback_cells_(info->HasDeoptimizationSupport()
93  ? info->function()->ast_node_count() : 0,
94  info->zone()),
95  ic_total_count_(0),
96  zone_(info->zone()) {
97  Initialize();
98  }
99 
100  void Initialize();
101 
102  static bool MakeCode(CompilationInfo* info);
103 
104  // Encode state and pc-offset as a BitField<type, start, size>.
105  // Only use 30 bits because we encode the result as a smi.
106  class StateField : public BitField<State, 0, 1> { };
107  class PcField : public BitField<unsigned, 1, 30-1> { };
108 
109  static const char* State2String(State state) {
110  switch (state) {
111  case NO_REGISTERS: return "NO_REGISTERS";
112  case TOS_REG: return "TOS_REG";
113  }
114  UNREACHABLE();
115  return NULL;
116  }
117 
118  Zone* zone() const { return zone_; }
119 
120  static const int kMaxBackEdgeWeight = 127;
121 
122 #if V8_TARGET_ARCH_IA32
123  static const int kBackEdgeDistanceUnit = 100;
124 #elif V8_TARGET_ARCH_X64
125  static const int kBackEdgeDistanceUnit = 162;
126 #elif V8_TARGET_ARCH_ARM
127  static const int kBackEdgeDistanceUnit = 142;
128 #elif V8_TARGET_ARCH_MIPS
129  static const int kBackEdgeDistanceUnit = 142;
130 #else
131 #error Unsupported target architecture.
132 #endif
133 
134 
135  private:
136  class Breakable;
137  class Iteration;
138 
139  class TestContext;
140 
142  public:
143  explicit NestedStatement(FullCodeGenerator* codegen) : codegen_(codegen) {
144  // Link into codegen's nesting stack.
145  previous_ = codegen->nesting_stack_;
146  codegen->nesting_stack_ = this;
147  }
148  virtual ~NestedStatement() {
149  // Unlink from codegen's nesting stack.
150  ASSERT_EQ(this, codegen_->nesting_stack_);
151  codegen_->nesting_stack_ = previous_;
152  }
153 
154  virtual Breakable* AsBreakable() { return NULL; }
155  virtual Iteration* AsIteration() { return NULL; }
156 
157  virtual bool IsContinueTarget(Statement* target) { return false; }
158  virtual bool IsBreakTarget(Statement* target) { return false; }
159 
160  // Notify the statement that we are exiting it via break, continue, or
161  // return and give it a chance to generate cleanup code. Return the
162  // next outer statement in the nesting stack. We accumulate in
163  // *stack_depth the amount to drop the stack and in *context_length the
164  // number of context chain links to unwind as we traverse the nesting
165  // stack from an exit to its target.
166  virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
167  return previous_;
168  }
169 
170  protected:
171  MacroAssembler* masm() { return codegen_->masm(); }
172 
173  FullCodeGenerator* codegen_;
174  NestedStatement* previous_;
175 
176  private:
178  };
179 
180  // A breakable statement such as a block.
181  class Breakable : public NestedStatement {
182  public:
183  Breakable(FullCodeGenerator* codegen, BreakableStatement* statement)
184  : NestedStatement(codegen), statement_(statement) {
185  }
186  virtual ~Breakable() {}
187 
188  virtual Breakable* AsBreakable() { return this; }
189  virtual bool IsBreakTarget(Statement* target) {
190  return statement() == target;
191  }
192 
193  BreakableStatement* statement() { return statement_; }
194  Label* break_label() { return &break_label_; }
195 
196  private:
197  BreakableStatement* statement_;
198  Label break_label_;
199  };
200 
201  // An iteration statement such as a while, for, or do loop.
202  class Iteration : public Breakable {
203  public:
204  Iteration(FullCodeGenerator* codegen, IterationStatement* statement)
205  : Breakable(codegen, statement) {
206  }
207  virtual ~Iteration() {}
208 
209  virtual Iteration* AsIteration() { return this; }
210  virtual bool IsContinueTarget(Statement* target) {
211  return statement() == target;
212  }
213 
214  Label* continue_label() { return &continue_label_; }
215 
216  private:
217  Label continue_label_;
218  };
219 
220  // A nested block statement.
221  class NestedBlock : public Breakable {
222  public:
223  NestedBlock(FullCodeGenerator* codegen, Block* block)
224  : Breakable(codegen, block) {
225  }
226  virtual ~NestedBlock() {}
227 
228  virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
229  if (statement()->AsBlock()->scope() != NULL) {
230  ++(*context_length);
231  }
232  return previous_;
233  };
234  };
235 
236  // The try block of a try/catch statement.
237  class TryCatch : public NestedStatement {
238  public:
239  explicit TryCatch(FullCodeGenerator* codegen) : NestedStatement(codegen) {
240  }
241  virtual ~TryCatch() {}
242 
243  virtual NestedStatement* Exit(int* stack_depth, int* context_length);
244  };
245 
246  // The try block of a try/finally statement.
247  class TryFinally : public NestedStatement {
248  public:
249  TryFinally(FullCodeGenerator* codegen, Label* finally_entry)
250  : NestedStatement(codegen), finally_entry_(finally_entry) {
251  }
252  virtual ~TryFinally() {}
253 
254  virtual NestedStatement* Exit(int* stack_depth, int* context_length);
255 
256  private:
257  Label* finally_entry_;
258  };
259 
260  // The finally block of a try/finally statement.
261  class Finally : public NestedStatement {
262  public:
263  static const int kElementCount = 5;
264 
265  explicit Finally(FullCodeGenerator* codegen) : NestedStatement(codegen) { }
266  virtual ~Finally() {}
267 
268  virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
269  *stack_depth += kElementCount;
270  return previous_;
271  }
272  };
273 
274  // The body of a for/in loop.
275  class ForIn : public Iteration {
276  public:
277  static const int kElementCount = 5;
278 
279  ForIn(FullCodeGenerator* codegen, ForInStatement* statement)
280  : Iteration(codegen, statement) {
281  }
282  virtual ~ForIn() {}
283 
284  virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
285  *stack_depth += kElementCount;
286  return previous_;
287  }
288  };
289 
290 
291  // The body of a with or catch.
292  class WithOrCatch : public NestedStatement {
293  public:
294  explicit WithOrCatch(FullCodeGenerator* codegen)
295  : NestedStatement(codegen) {
296  }
297  virtual ~WithOrCatch() {}
298 
299  virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
300  ++(*context_length);
301  return previous_;
302  }
303  };
304 
305  // Type of a member function that generates inline code for a native function.
306  typedef void (FullCodeGenerator::*InlineFunctionGenerator)(CallRuntime* expr);
307 
308  static const InlineFunctionGenerator kInlineFunctionGenerators[];
309 
310  // A platform-specific utility to overwrite the accumulator register
311  // with a GC-safe value.
312  void ClearAccumulator();
313 
314  // Determine whether or not to inline the smi case for the given
315  // operation.
316  bool ShouldInlineSmiCase(Token::Value op);
317 
318  // Helper function to convert a pure value into a test context. The value
319  // is expected on the stack or the accumulator, depending on the platform.
320  // See the platform-specific implementation for details.
321  void DoTest(Expression* condition,
322  Label* if_true,
323  Label* if_false,
324  Label* fall_through);
325  void DoTest(const TestContext* context);
326 
327  // Helper function to split control flow and avoid a branch to the
328  // fall-through label if it is set up.
329 #ifdef V8_TARGET_ARCH_MIPS
330  void Split(Condition cc,
331  Register lhs,
332  const Operand& rhs,
333  Label* if_true,
334  Label* if_false,
335  Label* fall_through);
336 #else // All non-mips arch.
337  void Split(Condition cc,
338  Label* if_true,
339  Label* if_false,
340  Label* fall_through);
341 #endif // V8_TARGET_ARCH_MIPS
342 
343  // Load the value of a known (PARAMETER, LOCAL, or CONTEXT) variable into
344  // a register. Emits a context chain walk if if necessary (so does
345  // SetVar) so avoid calling both on the same variable.
346  void GetVar(Register destination, Variable* var);
347 
348  // Assign to a known (PARAMETER, LOCAL, or CONTEXT) variable. If it's in
349  // the context, the write barrier will be emitted and source, scratch0,
350  // scratch1 will be clobbered. Emits a context chain walk if if necessary
351  // (so does GetVar) so avoid calling both on the same variable.
352  void SetVar(Variable* var,
353  Register source,
354  Register scratch0,
355  Register scratch1);
356 
357  // An operand used to read/write a stack-allocated (PARAMETER or LOCAL)
358  // variable. Writing does not need the write barrier.
359  MemOperand StackOperand(Variable* var);
360 
361  // An operand used to read/write a known (PARAMETER, LOCAL, or CONTEXT)
362  // variable. May emit code to traverse the context chain, loading the
363  // found context into the scratch register. Writing to this operand will
364  // need the write barrier if location is CONTEXT.
365  MemOperand VarOperand(Variable* var, Register scratch);
366 
367  void VisitForEffect(Expression* expr) {
368  EffectContext context(this);
369  Visit(expr);
370  PrepareForBailout(expr, NO_REGISTERS);
371  }
372 
373  void VisitForAccumulatorValue(Expression* expr) {
374  AccumulatorValueContext context(this);
375  Visit(expr);
376  PrepareForBailout(expr, TOS_REG);
377  }
378 
379  void VisitForStackValue(Expression* expr) {
380  StackValueContext context(this);
381  Visit(expr);
382  PrepareForBailout(expr, NO_REGISTERS);
383  }
384 
385  void VisitForControl(Expression* expr,
386  Label* if_true,
387  Label* if_false,
388  Label* fall_through) {
389  TestContext context(this, expr, if_true, if_false, fall_through);
390  Visit(expr);
391  // For test contexts, we prepare for bailout before branching, not at
392  // the end of the entire expression. This happens as part of visiting
393  // the expression.
394  }
395 
396  void VisitInDuplicateContext(Expression* expr);
397 
398  void VisitDeclarations(ZoneList<Declaration*>* declarations);
399  void DeclareGlobals(Handle<FixedArray> pairs);
400  int DeclareGlobalsFlags();
401 
402  // Try to perform a comparison as a fast inlined literal compare if
403  // the operands allow it. Returns true if the compare operations
404  // has been matched and all code generated; false otherwise.
405  bool TryLiteralCompare(CompareOperation* compare);
406 
407  // Platform-specific code for comparing the type of a value with
408  // a given literal string.
409  void EmitLiteralCompareTypeof(Expression* expr,
410  Expression* sub_expr,
411  Handle<String> check);
412 
413  // Platform-specific code for equality comparison with a nil-like value.
414  void EmitLiteralCompareNil(CompareOperation* expr,
415  Expression* sub_expr,
416  NilValue nil);
417 
418  // Bailout support.
419  void PrepareForBailout(Expression* node, State state);
420  void PrepareForBailoutForId(BailoutId id, State state);
421 
422  // Cache cell support. This associates AST ids with global property cells
423  // that will be cleared during GC and collected by the type-feedback oracle.
424  void RecordTypeFeedbackCell(TypeFeedbackId id,
425  Handle<JSGlobalPropertyCell> cell);
426 
427  // Record a call's return site offset, used to rebuild the frame if the
428  // called function was inlined at the site.
429  void RecordJSReturnSite(Call* call);
430 
431  // Prepare for bailout before a test (or compare) and branch. If
432  // should_normalize, then the following comparison will not handle the
433  // canonical JS true value so we will insert a (dead) test against true at
434  // the actual bailout target from the optimized code. If not
435  // should_normalize, the true and false labels are ignored.
436  void PrepareForBailoutBeforeSplit(Expression* expr,
437  bool should_normalize,
438  Label* if_true,
439  Label* if_false);
440 
441  // If enabled, emit debug code for checking that the current context is
442  // neither a with nor a catch context.
443  void EmitDebugCheckDeclarationContext(Variable* variable);
444 
445  // Platform-specific code for checking the stack limit at the back edge of
446  // a loop.
447  // This is meant to be called at loop back edges, |back_edge_target| is
448  // the jump target of the back edge and is used to approximate the amount
449  // of code inside the loop.
450  void EmitStackCheck(IterationStatement* stmt, Label* back_edge_target);
451  // Record the OSR AST id corresponding to a stack check in the code.
452  void RecordStackCheck(BailoutId osr_ast_id);
453  // Emit a table of stack check ids and pcs into the code stream. Return
454  // the offset of the start of the table.
455  unsigned EmitStackCheckTable();
456 
457  void EmitProfilingCounterDecrement(int delta);
458  void EmitProfilingCounterReset();
459 
460  // Platform-specific return sequence
461  void EmitReturnSequence();
462 
463  // Platform-specific code sequences for calls
464  void EmitCallWithStub(Call* expr, CallFunctionFlags flags);
465  void EmitCallWithIC(Call* expr, Handle<Object> name, RelocInfo::Mode mode);
466  void EmitKeyedCallWithIC(Call* expr, Expression* key);
467 
468  // Platform-specific code for inline runtime calls.
469  InlineFunctionGenerator FindInlineFunctionGenerator(Runtime::FunctionId id);
470 
471  void EmitInlineRuntimeCall(CallRuntime* expr);
472 
473 #define EMIT_INLINE_RUNTIME_CALL(name, x, y) \
474  void Emit##name(CallRuntime* expr);
477 #undef EMIT_INLINE_RUNTIME_CALL
478 
479  // Platform-specific code for loading variables.
480  void EmitLoadGlobalCheckExtensions(Variable* var,
481  TypeofState typeof_state,
482  Label* slow);
483  MemOperand ContextSlotOperandCheckExtensions(Variable* var, Label* slow);
484  void EmitDynamicLookupFastCase(Variable* var,
485  TypeofState typeof_state,
486  Label* slow,
487  Label* done);
488  void EmitVariableLoad(VariableProxy* proxy);
489 
490  void EmitAccessor(Expression* expression);
491 
492  // Expects the arguments and the function already pushed.
493  void EmitResolvePossiblyDirectEval(int arg_count);
494 
495  // Platform-specific support for allocating a new closure based on
496  // the given function info.
497  void EmitNewClosure(Handle<SharedFunctionInfo> info, bool pretenure);
498 
499  // Platform-specific support for compiling assignments.
500 
501  // Load a value from a named property.
502  // The receiver is left on the stack by the IC.
503  void EmitNamedPropertyLoad(Property* expr);
504 
505  // Load a value from a keyed property.
506  // The receiver and the key is left on the stack by the IC.
507  void EmitKeyedPropertyLoad(Property* expr);
508 
509  // Apply the compound assignment operator. Expects the left operand on top
510  // of the stack and the right one in the accumulator.
511  void EmitBinaryOp(BinaryOperation* expr,
512  Token::Value op,
513  OverwriteMode mode);
514 
515  // Helper functions for generating inlined smi code for certain
516  // binary operations.
517  void EmitInlineSmiBinaryOp(BinaryOperation* expr,
518  Token::Value op,
519  OverwriteMode mode,
520  Expression* left,
521  Expression* right);
522 
523  // Assign to the given expression as if via '='. The right-hand-side value
524  // is expected in the accumulator.
525  void EmitAssignment(Expression* expr);
526 
527  // Complete a variable assignment. The right-hand-side value is expected
528  // in the accumulator.
529  void EmitVariableAssignment(Variable* var,
530  Token::Value op);
531 
532  // Complete a named property assignment. The receiver is expected on top
533  // of the stack and the right-hand-side value in the accumulator.
534  void EmitNamedPropertyAssignment(Assignment* expr);
535 
536  // Complete a keyed property assignment. The receiver and key are
537  // expected on top of the stack and the right-hand-side value in the
538  // accumulator.
539  void EmitKeyedPropertyAssignment(Assignment* expr);
540 
541  void CallIC(Handle<Code> code,
542  RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
543  TypeFeedbackId id = TypeFeedbackId::None());
544 
545  void SetFunctionPosition(FunctionLiteral* fun);
546  void SetReturnPosition(FunctionLiteral* fun);
547  void SetStatementPosition(Statement* stmt);
548  void SetExpressionPosition(Expression* expr, int pos);
549  void SetStatementPosition(int pos);
550  void SetSourcePosition(int pos);
551 
552  // Non-local control flow support.
553  void EnterFinallyBlock();
554  void ExitFinallyBlock();
555 
556  // Loop nesting counter.
557  int loop_depth() { return loop_depth_; }
558  void increment_loop_depth() { loop_depth_++; }
559  void decrement_loop_depth() {
560  ASSERT(loop_depth_ > 0);
561  loop_depth_--;
562  }
563 
564  MacroAssembler* masm() { return masm_; }
565 
566  class ExpressionContext;
567  const ExpressionContext* context() { return context_; }
568  void set_new_context(const ExpressionContext* context) { context_ = context; }
569 
570  Handle<Script> script() { return info_->script(); }
571  bool is_eval() { return info_->is_eval(); }
572  bool is_native() { return info_->is_native(); }
573  bool is_classic_mode() { return language_mode() == CLASSIC_MODE; }
574  LanguageMode language_mode() { return function()->language_mode(); }
575  FunctionLiteral* function() { return info_->function(); }
576  Scope* scope() { return scope_; }
577 
578  static Register result_register();
579  static Register context_register();
580 
581  // Set fields in the stack frame. Offsets are the frame pointer relative
582  // offsets defined in, e.g., StandardFrameConstants.
583  void StoreToFrameField(int frame_offset, Register value);
584 
585  // Load a value from the current context. Indices are defined as an enum
586  // in v8::internal::Context.
587  void LoadContextField(Register dst, int context_index);
588 
589  // Push the function argument for the runtime functions PushWithContext
590  // and PushCatchContext.
591  void PushFunctionArgumentForContextAllocation();
592 
593  // AST node visit functions.
594 #define DECLARE_VISIT(type) virtual void Visit##type(type* node);
596 #undef DECLARE_VISIT
597 
598  void EmitUnaryOperation(UnaryOperation* expr, const char* comment);
599 
600  void VisitComma(BinaryOperation* expr);
601  void VisitLogicalExpression(BinaryOperation* expr);
602  void VisitArithmeticExpression(BinaryOperation* expr);
603 
604  void VisitForTypeofValue(Expression* expr);
605 
606  void Generate();
607  void PopulateDeoptimizationData(Handle<Code> code);
608  void PopulateTypeFeedbackInfo(Handle<Code> code);
609  void PopulateTypeFeedbackCells(Handle<Code> code);
610 
611  Handle<FixedArray> handler_table() { return handler_table_; }
612 
613  struct BailoutEntry {
614  BailoutId id;
615  unsigned pc_and_state;
616  };
617 
618  struct TypeFeedbackCellEntry {
619  TypeFeedbackId ast_id;
620  Handle<JSGlobalPropertyCell> cell;
621  };
622 
623 
624  class ExpressionContext BASE_EMBEDDED {
625  public:
626  explicit ExpressionContext(FullCodeGenerator* codegen)
627  : masm_(codegen->masm()), old_(codegen->context()), codegen_(codegen) {
628  codegen->set_new_context(this);
629  }
630 
631  virtual ~ExpressionContext() {
632  codegen_->set_new_context(old_);
633  }
634 
635  Isolate* isolate() const { return codegen_->isolate(); }
636 
637  // Convert constant control flow (true or false) to the result expected for
638  // this expression context.
639  virtual void Plug(bool flag) const = 0;
640 
641  // Emit code to convert a pure value (in a register, known variable
642  // location, as a literal, or on top of the stack) into the result
643  // expected according to this expression context.
644  virtual void Plug(Register reg) const = 0;
645  virtual void Plug(Variable* var) const = 0;
646  virtual void Plug(Handle<Object> lit) const = 0;
647  virtual void Plug(Heap::RootListIndex index) const = 0;
648  virtual void PlugTOS() const = 0;
649 
650  // Emit code to convert pure control flow to a pair of unbound labels into
651  // the result expected according to this expression context. The
652  // implementation will bind both labels unless it's a TestContext, which
653  // won't bind them at this point.
654  virtual void Plug(Label* materialize_true,
655  Label* materialize_false) const = 0;
656 
657  // Emit code to discard count elements from the top of stack, then convert
658  // a pure value into the result expected according to this expression
659  // context.
660  virtual void DropAndPlug(int count, Register reg) const = 0;
661 
662  // Set up branch labels for a test expression. The three Label** parameters
663  // are output parameters.
664  virtual void PrepareTest(Label* materialize_true,
665  Label* materialize_false,
666  Label** if_true,
667  Label** if_false,
668  Label** fall_through) const = 0;
669 
670  // Returns true if we are evaluating only for side effects (i.e. if the
671  // result will be discarded).
672  virtual bool IsEffect() const { return false; }
673 
674  // Returns true if we are evaluating for the value (in accu/on stack).
675  virtual bool IsAccumulatorValue() const { return false; }
676  virtual bool IsStackValue() const { return false; }
677 
678  // Returns true if we are branching on the value rather than materializing
679  // it. Only used for asserts.
680  virtual bool IsTest() const { return false; }
681 
682  protected:
683  FullCodeGenerator* codegen() const { return codegen_; }
684  MacroAssembler* masm() const { return masm_; }
685  MacroAssembler* masm_;
686 
687  private:
688  const ExpressionContext* old_;
689  FullCodeGenerator* codegen_;
690  };
691 
692  class AccumulatorValueContext : public ExpressionContext {
693  public:
694  explicit AccumulatorValueContext(FullCodeGenerator* codegen)
695  : ExpressionContext(codegen) { }
696 
697  virtual void Plug(bool flag) const;
698  virtual void Plug(Register reg) const;
699  virtual void Plug(Label* materialize_true, Label* materialize_false) const;
700  virtual void Plug(Variable* var) const;
701  virtual void Plug(Handle<Object> lit) const;
702  virtual void Plug(Heap::RootListIndex) const;
703  virtual void PlugTOS() const;
704  virtual void DropAndPlug(int count, Register reg) const;
705  virtual void PrepareTest(Label* materialize_true,
706  Label* materialize_false,
707  Label** if_true,
708  Label** if_false,
709  Label** fall_through) const;
710  virtual bool IsAccumulatorValue() const { return true; }
711  };
712 
713  class StackValueContext : public ExpressionContext {
714  public:
715  explicit StackValueContext(FullCodeGenerator* codegen)
716  : ExpressionContext(codegen) { }
717 
718  virtual void Plug(bool flag) const;
719  virtual void Plug(Register reg) const;
720  virtual void Plug(Label* materialize_true, Label* materialize_false) const;
721  virtual void Plug(Variable* var) const;
722  virtual void Plug(Handle<Object> lit) const;
723  virtual void Plug(Heap::RootListIndex) const;
724  virtual void PlugTOS() const;
725  virtual void DropAndPlug(int count, Register reg) const;
726  virtual void PrepareTest(Label* materialize_true,
727  Label* materialize_false,
728  Label** if_true,
729  Label** if_false,
730  Label** fall_through) const;
731  virtual bool IsStackValue() const { return true; }
732  };
733 
734  class TestContext : public ExpressionContext {
735  public:
736  TestContext(FullCodeGenerator* codegen,
737  Expression* condition,
738  Label* true_label,
739  Label* false_label,
740  Label* fall_through)
741  : ExpressionContext(codegen),
742  condition_(condition),
743  true_label_(true_label),
744  false_label_(false_label),
745  fall_through_(fall_through) { }
746 
747  static const TestContext* cast(const ExpressionContext* context) {
748  ASSERT(context->IsTest());
749  return reinterpret_cast<const TestContext*>(context);
750  }
751 
752  Expression* condition() const { return condition_; }
753  Label* true_label() const { return true_label_; }
754  Label* false_label() const { return false_label_; }
755  Label* fall_through() const { return fall_through_; }
756 
757  virtual void Plug(bool flag) const;
758  virtual void Plug(Register reg) const;
759  virtual void Plug(Label* materialize_true, Label* materialize_false) const;
760  virtual void Plug(Variable* var) const;
761  virtual void Plug(Handle<Object> lit) const;
762  virtual void Plug(Heap::RootListIndex) const;
763  virtual void PlugTOS() const;
764  virtual void DropAndPlug(int count, Register reg) const;
765  virtual void PrepareTest(Label* materialize_true,
766  Label* materialize_false,
767  Label** if_true,
768  Label** if_false,
769  Label** fall_through) const;
770  virtual bool IsTest() const { return true; }
771 
772  private:
773  Expression* condition_;
774  Label* true_label_;
775  Label* false_label_;
776  Label* fall_through_;
777  };
778 
779  class EffectContext : public ExpressionContext {
780  public:
781  explicit EffectContext(FullCodeGenerator* codegen)
782  : ExpressionContext(codegen) { }
783 
784  virtual void Plug(bool flag) const;
785  virtual void Plug(Register reg) const;
786  virtual void Plug(Label* materialize_true, Label* materialize_false) const;
787  virtual void Plug(Variable* var) const;
788  virtual void Plug(Handle<Object> lit) const;
789  virtual void Plug(Heap::RootListIndex) const;
790  virtual void PlugTOS() const;
791  virtual void DropAndPlug(int count, Register reg) const;
792  virtual void PrepareTest(Label* materialize_true,
793  Label* materialize_false,
794  Label** if_true,
795  Label** if_false,
796  Label** fall_through) const;
797  virtual bool IsEffect() const { return true; }
798  };
799 
800  MacroAssembler* masm_;
801  CompilationInfo* info_;
802  Scope* scope_;
803  Label return_label_;
804  NestedStatement* nesting_stack_;
805  int loop_depth_;
806  ZoneList<Handle<Object> >* globals_;
807  const ExpressionContext* context_;
808  ZoneList<BailoutEntry> bailout_entries_;
809  ZoneList<BailoutEntry> stack_checks_;
810  ZoneList<TypeFeedbackCellEntry> type_feedback_cells_;
811  int ic_total_count_;
812  Handle<FixedArray> handler_table_;
813  Handle<JSGlobalPropertyCell> profiling_counter_;
814  bool generate_debug_code_;
815  Zone* zone_;
816 
817  friend class NestedStatement;
818 
820 };
821 
822 
823 // A map from property names to getter/setter pairs allocated in the zone.
824 class AccessorTable: public TemplateHashMap<Literal,
825  ObjectLiteral::Accessors,
826  ZoneAllocationPolicy> {
827  public:
828  explicit AccessorTable(Zone* zone) :
831  ZoneAllocationPolicy(zone)),
832  zone_(zone) { }
833 
834  Iterator lookup(Literal* literal) {
835  Iterator it = find(literal, true, ZoneAllocationPolicy(zone_));
836  if (it->second == NULL) it->second = new(zone_) ObjectLiteral::Accessors();
837  return it;
838  }
839 
840  private:
841  Zone* zone_;
842 };
843 
844 
845 } } // namespace v8::internal
846 
847 #endif // V8_FULL_CODEGEN_H_
#define INLINE_FUNCTION_LIST(F)
Definition: runtime.h:503
#define DECLARE_VISIT(type)
Definition: full-codegen.h:594
static bool MakeCode(CompilationInfo *info)
static TypeFeedbackId None()
Definition: utils.h:999
Handle< Script > script() const
Definition: compiler.h:72
Iterator find(Literal *key, bool insert=false, ZoneAllocationPolicyallocator=ZoneAllocationPolicy())
Definition: hashmap.h:356
#define ASSERT(condition)
Definition: checks.h:270
static const int kMaxBackEdgeWeight
Definition: full-codegen.h:120
#define UNREACHABLE()
Definition: checks.h:50
FullCodeGenerator(MacroAssembler *masm, CompilationInfo *info)
Definition: full-codegen.h:80
NilValue
Definition: v8.h:141
FunctionLiteral * function() const
Definition: compiler.h:66
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition: globals.h:307
static const char * State2String(State state)
Definition: full-codegen.h:109
#define BASE_EMBEDDED
Definition: allocation.h:68
activate correct semantics for inheriting readonliness false
Definition: flags.cc:141
#define AST_NODE_LIST(V)
Definition: ast.h:115
#define EMIT_INLINE_RUNTIME_CALL(name, x, y)
Definition: full-codegen.h:473
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_bool(enable_unaligned_accesses
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
Iterator lookup(Literal *literal)
Definition: full-codegen.h:834
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
#define INLINE_RUNTIME_FUNCTION_LIST(F)
Definition: runtime.h:541
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
Definition: flags.cc:495
void check(i::Vector< const char > string)
TypeofState
Definition: codegen.h:70
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset flag
Definition: objects-inl.h:3923