v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
full-codegen.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "codegen.h"
31 #include "compiler.h"
32 #include "debug.h"
33 #include "full-codegen.h"
34 #include "liveedit.h"
35 #include "macro-assembler.h"
36 #include "prettyprinter.h"
37 #include "scopes.h"
38 #include "scopeinfo.h"
39 #include "snapshot.h"
40 #include "stub-cache.h"
41 
42 namespace v8 {
43 namespace internal {
44 
46  Visit(stmt);
47 }
48 
49 
51  Visit(expr);
52 }
53 
54 
55 void BreakableStatementChecker::VisitVariableDeclaration(
56  VariableDeclaration* decl) {
57 }
58 
59 void BreakableStatementChecker::VisitFunctionDeclaration(
60  FunctionDeclaration* decl) {
61 }
62 
63 void BreakableStatementChecker::VisitModuleDeclaration(
64  ModuleDeclaration* decl) {
65 }
66 
67 void BreakableStatementChecker::VisitImportDeclaration(
68  ImportDeclaration* decl) {
69 }
70 
71 void BreakableStatementChecker::VisitExportDeclaration(
72  ExportDeclaration* decl) {
73 }
74 
75 
76 void BreakableStatementChecker::VisitModuleLiteral(ModuleLiteral* module) {
77 }
78 
79 
80 void BreakableStatementChecker::VisitModuleVariable(ModuleVariable* module) {
81 }
82 
83 
84 void BreakableStatementChecker::VisitModulePath(ModulePath* module) {
85 }
86 
87 
88 void BreakableStatementChecker::VisitModuleUrl(ModuleUrl* module) {
89 }
90 
91 
92 void BreakableStatementChecker::VisitModuleStatement(ModuleStatement* stmt) {
93 }
94 
95 
96 void BreakableStatementChecker::VisitBlock(Block* stmt) {
97 }
98 
99 
100 void BreakableStatementChecker::VisitExpressionStatement(
101  ExpressionStatement* stmt) {
102  // Check if expression is breakable.
103  Visit(stmt->expression());
104 }
105 
106 
107 void BreakableStatementChecker::VisitEmptyStatement(EmptyStatement* stmt) {
108 }
109 
110 
111 void BreakableStatementChecker::VisitIfStatement(IfStatement* stmt) {
112  // If the condition is breakable the if statement is breakable.
113  Visit(stmt->condition());
114 }
115 
116 
117 void BreakableStatementChecker::VisitContinueStatement(
118  ContinueStatement* stmt) {
119 }
120 
121 
122 void BreakableStatementChecker::VisitBreakStatement(BreakStatement* stmt) {
123 }
124 
125 
126 void BreakableStatementChecker::VisitReturnStatement(ReturnStatement* stmt) {
127  // Return is breakable if the expression is.
128  Visit(stmt->expression());
129 }
130 
131 
132 void BreakableStatementChecker::VisitWithStatement(WithStatement* stmt) {
133  Visit(stmt->expression());
134 }
135 
136 
137 void BreakableStatementChecker::VisitSwitchStatement(SwitchStatement* stmt) {
138  // Switch statements breakable if the tag expression is.
139  Visit(stmt->tag());
140 }
141 
142 
143 void BreakableStatementChecker::VisitDoWhileStatement(DoWhileStatement* stmt) {
144  // Mark do while as breakable to avoid adding a break slot in front of it.
145  is_breakable_ = true;
146 }
147 
148 
149 void BreakableStatementChecker::VisitWhileStatement(WhileStatement* stmt) {
150  // Mark while statements breakable if the condition expression is.
151  Visit(stmt->cond());
152 }
153 
154 
155 void BreakableStatementChecker::VisitForStatement(ForStatement* stmt) {
156  // Mark for statements breakable if the condition expression is.
157  if (stmt->cond() != NULL) {
158  Visit(stmt->cond());
159  }
160 }
161 
162 
163 void BreakableStatementChecker::VisitForInStatement(ForInStatement* stmt) {
164  // Mark for in statements breakable if the enumerable expression is.
165  Visit(stmt->enumerable());
166 }
167 
168 
169 void BreakableStatementChecker::VisitForOfStatement(ForOfStatement* stmt) {
170  // For-of is breakable because of the next() call.
171  is_breakable_ = true;
172 }
173 
174 
175 void BreakableStatementChecker::VisitTryCatchStatement(
176  TryCatchStatement* stmt) {
177  // Mark try catch as breakable to avoid adding a break slot in front of it.
178  is_breakable_ = true;
179 }
180 
181 
182 void BreakableStatementChecker::VisitTryFinallyStatement(
183  TryFinallyStatement* stmt) {
184  // Mark try finally as breakable to avoid adding a break slot in front of it.
185  is_breakable_ = true;
186 }
187 
188 
189 void BreakableStatementChecker::VisitDebuggerStatement(
190  DebuggerStatement* stmt) {
191  // The debugger statement is breakable.
192  is_breakable_ = true;
193 }
194 
195 
196 void BreakableStatementChecker::VisitCaseClause(CaseClause* clause) {
197 }
198 
199 
200 void BreakableStatementChecker::VisitFunctionLiteral(FunctionLiteral* expr) {
201 }
202 
203 
204 void BreakableStatementChecker::VisitNativeFunctionLiteral(
205  NativeFunctionLiteral* expr) {
206 }
207 
208 
209 void BreakableStatementChecker::VisitConditional(Conditional* expr) {
210 }
211 
212 
213 void BreakableStatementChecker::VisitVariableProxy(VariableProxy* expr) {
214 }
215 
216 
217 void BreakableStatementChecker::VisitLiteral(Literal* expr) {
218 }
219 
220 
221 void BreakableStatementChecker::VisitRegExpLiteral(RegExpLiteral* expr) {
222 }
223 
224 
225 void BreakableStatementChecker::VisitObjectLiteral(ObjectLiteral* expr) {
226 }
227 
228 
229 void BreakableStatementChecker::VisitArrayLiteral(ArrayLiteral* expr) {
230 }
231 
232 
233 void BreakableStatementChecker::VisitAssignment(Assignment* expr) {
234  // If assigning to a property (including a global property) the assignment is
235  // breakable.
236  VariableProxy* proxy = expr->target()->AsVariableProxy();
237  Property* prop = expr->target()->AsProperty();
238  if (prop != NULL || (proxy != NULL && proxy->var()->IsUnallocated())) {
239  is_breakable_ = true;
240  return;
241  }
242 
243  // Otherwise the assignment is breakable if the assigned value is.
244  Visit(expr->value());
245 }
246 
247 
248 void BreakableStatementChecker::VisitYield(Yield* expr) {
249  // Yield is breakable if the expression is.
250  Visit(expr->expression());
251 }
252 
253 
254 void BreakableStatementChecker::VisitThrow(Throw* expr) {
255  // Throw is breakable if the expression is.
256  Visit(expr->exception());
257 }
258 
259 
260 void BreakableStatementChecker::VisitProperty(Property* expr) {
261  // Property load is breakable.
262  is_breakable_ = true;
263 }
264 
265 
266 void BreakableStatementChecker::VisitCall(Call* expr) {
267  // Function calls both through IC and call stub are breakable.
268  is_breakable_ = true;
269 }
270 
271 
272 void BreakableStatementChecker::VisitCallNew(CallNew* expr) {
273  // Function calls through new are breakable.
274  is_breakable_ = true;
275 }
276 
277 
278 void BreakableStatementChecker::VisitCallRuntime(CallRuntime* expr) {
279 }
280 
281 
282 void BreakableStatementChecker::VisitUnaryOperation(UnaryOperation* expr) {
283  Visit(expr->expression());
284 }
285 
286 
287 void BreakableStatementChecker::VisitCountOperation(CountOperation* expr) {
288  Visit(expr->expression());
289 }
290 
291 
292 void BreakableStatementChecker::VisitBinaryOperation(BinaryOperation* expr) {
293  Visit(expr->left());
294  if (expr->op() != Token::AND &&
295  expr->op() != Token::OR) {
296  Visit(expr->right());
297  }
298 }
299 
300 
301 void BreakableStatementChecker::VisitCompareOperation(CompareOperation* expr) {
302  Visit(expr->left());
303  Visit(expr->right());
304 }
305 
306 
307 void BreakableStatementChecker::VisitThisFunction(ThisFunction* expr) {
308 }
309 
310 
311 #define __ ACCESS_MASM(masm())
312 
314  Isolate* isolate = info->isolate();
315 
318 
319  Handle<Script> script = info->script();
320  if (!script->IsUndefined() && !script->source()->IsUndefined()) {
321  int len = String::cast(script->source())->length();
322  isolate->counters()->total_full_codegen_source_size()->Increment(len);
323  }
324  CodeGenerator::MakeCodePrologue(info, "full");
325  const int kInitialBufferSize = 4 * KB;
326  MacroAssembler masm(info->isolate(), NULL, kInitialBufferSize);
327 #ifdef ENABLE_GDB_JIT_INTERFACE
328  masm.positions_recorder()->StartGDBJITLineInfoRecording();
329 #endif
330  LOG_CODE_EVENT(isolate,
331  CodeStartLinePosInfoRecordEvent(masm.positions_recorder()));
332 
333  FullCodeGenerator cgen(&masm, info);
334  cgen.Generate();
335  if (cgen.HasStackOverflow()) {
336  ASSERT(!isolate->has_pending_exception());
337  return false;
338  }
339  unsigned table_offset = cgen.EmitBackEdgeTable();
340 
341  Code::Flags flags = Code::ComputeFlags(Code::FUNCTION);
342  Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&masm, flags, info);
343  code->set_optimizable(info->IsOptimizable() &&
344  !info->function()->dont_optimize() &&
345  info->function()->scope()->AllowsLazyCompilation());
346  cgen.PopulateDeoptimizationData(code);
347  cgen.PopulateTypeFeedbackInfo(code);
348  code->set_has_deoptimization_support(info->HasDeoptimizationSupport());
349  code->set_handler_table(*cgen.handler_table());
350 #ifdef ENABLE_DEBUGGER_SUPPORT
351  code->set_compiled_optimizable(info->IsOptimizable());
352 #endif // ENABLE_DEBUGGER_SUPPORT
353  code->set_allow_osr_at_loop_nesting_level(0);
354  code->set_profiler_ticks(0);
355  code->set_back_edge_table_offset(table_offset);
356  code->set_back_edges_patched_for_osr(false);
357  CodeGenerator::PrintCode(code, info);
358  info->SetCode(code);
359 #ifdef ENABLE_GDB_JIT_INTERFACE
360  if (FLAG_gdbjit) {
361  GDBJITLineInfo* lineinfo =
362  masm.positions_recorder()->DetachGDBJITLineInfo();
363  GDBJIT(RegisterDetailedLineInfo(*code, lineinfo));
364  }
365 #endif
366  void* line_info = masm.positions_recorder()->DetachJITHandlerData();
367  LOG_CODE_EVENT(isolate, CodeEndLinePosInfoRecordEvent(*code, line_info));
368  return true;
369 }
370 
371 
372 unsigned FullCodeGenerator::EmitBackEdgeTable() {
373  // The back edge table consists of a length (in number of entries)
374  // field, and then a sequence of entries. Each entry is a pair of AST id
375  // and code-relative pc offset.
376  masm()->Align(kIntSize);
377  unsigned offset = masm()->pc_offset();
378  unsigned length = back_edges_.length();
379  __ dd(length);
380  for (unsigned i = 0; i < length; ++i) {
381  __ dd(back_edges_[i].id.ToInt());
382  __ dd(back_edges_[i].pc);
383  __ dd(back_edges_[i].loop_depth);
384  }
385  return offset;
386 }
387 
388 
389 void FullCodeGenerator::InitializeFeedbackVector() {
390  int length = info_->function()->slot_count();
391  feedback_vector_ = isolate()->factory()->NewFixedArray(length, TENURED);
392  Handle<Object> sentinel = TypeFeedbackInfo::UninitializedSentinel(isolate());
393  // Ensure that it's safe to set without using a write barrier.
394  ASSERT_EQ(isolate()->heap()->uninitialized_symbol(), *sentinel);
395  for (int i = 0; i < length; i++) {
396  feedback_vector_->set(i, *sentinel, SKIP_WRITE_BARRIER);
397  }
398 }
399 
400 
401 void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
402  // Fill in the deoptimization information.
403  ASSERT(info_->HasDeoptimizationSupport() || bailout_entries_.is_empty());
404  if (!info_->HasDeoptimizationSupport()) return;
405  int length = bailout_entries_.length();
406  Handle<DeoptimizationOutputData> data = isolate()->factory()->
407  NewDeoptimizationOutputData(length, TENURED);
408  for (int i = 0; i < length; i++) {
409  data->SetAstId(i, bailout_entries_[i].id);
410  data->SetPcAndState(i, Smi::FromInt(bailout_entries_[i].pc_and_state));
411  }
412  code->set_deoptimization_data(*data);
413 }
414 
415 
416 void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
417  Handle<TypeFeedbackInfo> info = isolate()->factory()->NewTypeFeedbackInfo();
418  info->set_ic_total_count(ic_total_count_);
419  info->set_feedback_vector(*FeedbackVector());
420  ASSERT(!isolate()->heap()->InNewSpace(*info));
421  code->set_type_feedback_info(*info);
422 }
423 
424 
426  // The generation of debug code must match between the snapshot code and the
427  // code that is generated later. This is assumed by the debugger when it is
428  // calculating PC offsets after generating a debug version of code. Therefore
429  // we disable the production of debug code in the full compiler if we are
430  // either generating a snapshot or we booted from a snapshot.
431  generate_debug_code_ = FLAG_debug_code &&
432  !Serializer::enabled() &&
434  masm_->set_emit_debug_code(generate_debug_code_);
435  masm_->set_predictable_code_size(true);
436  InitializeAstVisitor(info_->zone());
437 }
438 
439 
440 void FullCodeGenerator::PrepareForBailout(Expression* node, State state) {
441  PrepareForBailoutForId(node->id(), state);
442 }
443 
444 
445 void FullCodeGenerator::CallLoadIC(ContextualMode contextual_mode,
446  TypeFeedbackId id) {
447  ExtraICState extra_state = LoadIC::ComputeExtraICState(contextual_mode);
448  Handle<Code> ic = LoadIC::initialize_stub(isolate(), extra_state);
449  CallIC(ic, id);
450 }
451 
452 
453 void FullCodeGenerator::CallStoreIC(TypeFeedbackId id) {
454  Handle<Code> ic = StoreIC::initialize_stub(isolate(), strict_mode());
455  CallIC(ic, id);
456 }
457 
458 
459 void FullCodeGenerator::RecordJSReturnSite(Call* call) {
460  // We record the offset of the function return so we can rebuild the frame
461  // if the function was inlined, i.e., this is the return address in the
462  // inlined function's frame.
463  //
464  // The state is ignored. We defensively set it to TOS_REG, which is the
465  // real state of the unoptimized code at the return site.
466  PrepareForBailoutForId(call->ReturnId(), TOS_REG);
467 #ifdef DEBUG
468  // In debug builds, mark the return so we can verify that this function
469  // was called.
470  ASSERT(!call->return_is_recorded_);
471  call->return_is_recorded_ = true;
472 #endif
473 }
474 
475 
476 void FullCodeGenerator::PrepareForBailoutForId(BailoutId id, State state) {
477  // There's no need to prepare this code for bailouts from already optimized
478  // code or code that can't be optimized.
479  if (!info_->HasDeoptimizationSupport()) return;
480  unsigned pc_and_state =
481  StateField::encode(state) | PcField::encode(masm_->pc_offset());
482  ASSERT(Smi::IsValid(pc_and_state));
483  BailoutEntry entry = { id, pc_and_state };
484  ASSERT(!prepared_bailout_ids_.Contains(id.ToInt()));
485  prepared_bailout_ids_.Add(id.ToInt(), zone());
486  bailout_entries_.Add(entry, zone());
487 }
488 
489 
490 void FullCodeGenerator::RecordBackEdge(BailoutId ast_id) {
491  // The pc offset does not need to be encoded and packed together with a state.
492  ASSERT(masm_->pc_offset() > 0);
493  ASSERT(loop_depth() > 0);
494  uint8_t depth = Min(loop_depth(), Code::kMaxLoopNestingMarker);
495  BackEdgeEntry entry =
496  { ast_id, static_cast<unsigned>(masm_->pc_offset()), depth };
497  back_edges_.Add(entry, zone());
498 }
499 
500 
501 bool FullCodeGenerator::ShouldInlineSmiCase(Token::Value op) {
502  // Inline smi case inside loops, but not division and modulo which
503  // are too complicated and take up too much space.
504  if (op == Token::DIV ||op == Token::MOD) return false;
505  if (FLAG_always_inline_smi_code) return true;
506  return loop_depth_ > 0;
507 }
508 
509 
510 void FullCodeGenerator::EffectContext::Plug(Register reg) const {
511 }
512 
513 
514 void FullCodeGenerator::AccumulatorValueContext::Plug(Register reg) const {
515  __ Move(result_register(), reg);
516 }
517 
518 
519 void FullCodeGenerator::StackValueContext::Plug(Register reg) const {
520  __ Push(reg);
521 }
522 
523 
524 void FullCodeGenerator::TestContext::Plug(Register reg) const {
525  // For simplicity we always test the accumulator register.
526  __ Move(result_register(), reg);
527  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
528  codegen()->DoTest(this);
529 }
530 
531 
532 void FullCodeGenerator::EffectContext::PlugTOS() const {
533  __ Drop(1);
534 }
535 
536 
537 void FullCodeGenerator::AccumulatorValueContext::PlugTOS() const {
538  __ Pop(result_register());
539 }
540 
541 
542 void FullCodeGenerator::StackValueContext::PlugTOS() const {
543 }
544 
545 
546 void FullCodeGenerator::TestContext::PlugTOS() const {
547  // For simplicity we always test the accumulator register.
548  __ Pop(result_register());
549  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
550  codegen()->DoTest(this);
551 }
552 
553 
554 void FullCodeGenerator::EffectContext::PrepareTest(
555  Label* materialize_true,
556  Label* materialize_false,
557  Label** if_true,
558  Label** if_false,
559  Label** fall_through) const {
560  // In an effect context, the true and the false case branch to the
561  // same label.
562  *if_true = *if_false = *fall_through = materialize_true;
563 }
564 
565 
566 void FullCodeGenerator::AccumulatorValueContext::PrepareTest(
567  Label* materialize_true,
568  Label* materialize_false,
569  Label** if_true,
570  Label** if_false,
571  Label** fall_through) const {
572  *if_true = *fall_through = materialize_true;
573  *if_false = materialize_false;
574 }
575 
576 
577 void FullCodeGenerator::StackValueContext::PrepareTest(
578  Label* materialize_true,
579  Label* materialize_false,
580  Label** if_true,
581  Label** if_false,
582  Label** fall_through) const {
583  *if_true = *fall_through = materialize_true;
584  *if_false = materialize_false;
585 }
586 
587 
588 void FullCodeGenerator::TestContext::PrepareTest(
589  Label* materialize_true,
590  Label* materialize_false,
591  Label** if_true,
592  Label** if_false,
593  Label** fall_through) const {
594  *if_true = true_label_;
595  *if_false = false_label_;
596  *fall_through = fall_through_;
597 }
598 
599 
600 void FullCodeGenerator::DoTest(const TestContext* context) {
601  DoTest(context->condition(),
602  context->true_label(),
603  context->false_label(),
604  context->fall_through());
605 }
606 
607 
608 void FullCodeGenerator::AllocateModules(ZoneList<Declaration*>* declarations) {
609  ASSERT(scope_->is_global_scope());
610 
611  for (int i = 0; i < declarations->length(); i++) {
612  ModuleDeclaration* declaration = declarations->at(i)->AsModuleDeclaration();
613  if (declaration != NULL) {
614  ModuleLiteral* module = declaration->module()->AsModuleLiteral();
615  if (module != NULL) {
616  Comment cmnt(masm_, "[ Link nested modules");
617  Scope* scope = module->body()->scope();
618  Interface* interface = scope->interface();
619  ASSERT(interface->IsModule() && interface->IsFrozen());
620 
621  interface->Allocate(scope->module_var()->index());
622 
623  // Set up module context.
624  ASSERT(scope->interface()->Index() >= 0);
625  __ Push(Smi::FromInt(scope->interface()->Index()));
626  __ Push(scope->GetScopeInfo());
627  __ CallRuntime(Runtime::kHiddenPushModuleContext, 2);
628  StoreToFrameField(StandardFrameConstants::kContextOffset,
629  context_register());
630 
631  AllocateModules(scope->declarations());
632 
633  // Pop module context.
634  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
635  // Update local stack frame context field.
636  StoreToFrameField(StandardFrameConstants::kContextOffset,
637  context_register());
638  }
639  }
640  }
641 }
642 
643 
644 // Modules have their own local scope, represented by their own context.
645 // Module instance objects have an accessor for every export that forwards
646 // access to the respective slot from the module's context. (Exports that are
647 // modules themselves, however, are simple data properties.)
648 //
649 // All modules have a _hosting_ scope/context, which (currently) is the
650 // (innermost) enclosing global scope. To deal with recursion, nested modules
651 // are hosted by the same scope as global ones.
652 //
653 // For every (global or nested) module literal, the hosting context has an
654 // internal slot that points directly to the respective module context. This
655 // enables quick access to (statically resolved) module members by 2-dimensional
656 // access through the hosting context. For example,
657 //
658 // module A {
659 // let x;
660 // module B { let y; }
661 // }
662 // module C { let z; }
663 //
664 // allocates contexts as follows:
665 //
666 // [header| .A | .B | .C | A | C ] (global)
667 // | | |
668 // | | +-- [header| z ] (module)
669 // | |
670 // | +------- [header| y ] (module)
671 // |
672 // +------------ [header| x | B ] (module)
673 //
674 // Here, .A, .B, .C are the internal slots pointing to the hosted module
675 // contexts, whereas A, B, C hold the actual instance objects (note that every
676 // module context also points to the respective instance object through its
677 // extension slot in the header).
678 //
679 // To deal with arbitrary recursion and aliases between modules,
680 // they are created and initialized in several stages. Each stage applies to
681 // all modules in the hosting global scope, including nested ones.
682 //
683 // 1. Allocate: for each module _literal_, allocate the module contexts and
684 // respective instance object and wire them up. This happens in the
685 // PushModuleContext runtime function, as generated by AllocateModules
686 // (invoked by VisitDeclarations in the hosting scope).
687 //
688 // 2. Bind: for each module _declaration_ (i.e. literals as well as aliases),
689 // assign the respective instance object to respective local variables. This
690 // happens in VisitModuleDeclaration, and uses the instance objects created
691 // in the previous stage.
692 // For each module _literal_, this phase also constructs a module descriptor
693 // for the next stage. This happens in VisitModuleLiteral.
694 //
695 // 3. Populate: invoke the DeclareModules runtime function to populate each
696 // _instance_ object with accessors for it exports. This is generated by
697 // DeclareModules (invoked by VisitDeclarations in the hosting scope again),
698 // and uses the descriptors generated in the previous stage.
699 //
700 // 4. Initialize: execute the module bodies (and other code) in sequence. This
701 // happens by the separate statements generated for module bodies. To reenter
702 // the module scopes properly, the parser inserted ModuleStatements.
703 
704 void FullCodeGenerator::VisitDeclarations(
705  ZoneList<Declaration*>* declarations) {
706  Handle<FixedArray> saved_modules = modules_;
707  int saved_module_index = module_index_;
708  ZoneList<Handle<Object> >* saved_globals = globals_;
709  ZoneList<Handle<Object> > inner_globals(10, zone());
710  globals_ = &inner_globals;
711 
712  if (scope_->num_modules() != 0) {
713  // This is a scope hosting modules. Allocate a descriptor array to pass
714  // to the runtime for initialization.
715  Comment cmnt(masm_, "[ Allocate modules");
716  ASSERT(scope_->is_global_scope());
717  modules_ =
718  isolate()->factory()->NewFixedArray(scope_->num_modules(), TENURED);
719  module_index_ = 0;
720 
721  // Generate code for allocating all modules, including nested ones.
722  // The allocated contexts are stored in internal variables in this scope.
723  AllocateModules(declarations);
724  }
725 
726  AstVisitor::VisitDeclarations(declarations);
727 
728  if (scope_->num_modules() != 0) {
729  // Initialize modules from descriptor array.
730  ASSERT(module_index_ == modules_->length());
731  DeclareModules(modules_);
732  modules_ = saved_modules;
733  module_index_ = saved_module_index;
734  }
735 
736  if (!globals_->is_empty()) {
737  // Invoke the platform-dependent code generator to do the actual
738  // declaration of the global functions and variables.
739  Handle<FixedArray> array =
740  isolate()->factory()->NewFixedArray(globals_->length(), TENURED);
741  for (int i = 0; i < globals_->length(); ++i)
742  array->set(i, *globals_->at(i));
743  DeclareGlobals(array);
744  }
745 
746  globals_ = saved_globals;
747 }
748 
749 
750 void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) {
751  Block* block = module->body();
752  Scope* saved_scope = scope();
753  scope_ = block->scope();
754  Interface* interface = scope_->interface();
755 
756  Comment cmnt(masm_, "[ ModuleLiteral");
757  SetStatementPosition(block);
758 
759  ASSERT(!modules_.is_null());
760  ASSERT(module_index_ < modules_->length());
761  int index = module_index_++;
762 
763  // Set up module context.
764  ASSERT(interface->Index() >= 0);
765  __ Push(Smi::FromInt(interface->Index()));
766  __ Push(Smi::FromInt(0));
767  __ CallRuntime(Runtime::kHiddenPushModuleContext, 2);
768  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
769 
770  {
771  Comment cmnt(masm_, "[ Declarations");
772  VisitDeclarations(scope_->declarations());
773  }
774 
775  // Populate the module description.
776  Handle<ModuleInfo> description =
777  ModuleInfo::Create(isolate(), interface, scope_);
778  modules_->set(index, *description);
779 
780  scope_ = saved_scope;
781  // Pop module context.
782  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
783  // Update local stack frame context field.
784  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
785 }
786 
787 
788 void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) {
789  // Nothing to do.
790  // The instance object is resolved statically through the module's interface.
791 }
792 
793 
794 void FullCodeGenerator::VisitModulePath(ModulePath* module) {
795  // Nothing to do.
796  // The instance object is resolved statically through the module's interface.
797 }
798 
799 
800 void FullCodeGenerator::VisitModuleUrl(ModuleUrl* module) {
801  // TODO(rossberg): dummy allocation for now.
802  Scope* scope = module->body()->scope();
803  Interface* interface = scope_->interface();
804 
805  ASSERT(interface->IsModule() && interface->IsFrozen());
806  ASSERT(!modules_.is_null());
807  ASSERT(module_index_ < modules_->length());
808  interface->Allocate(scope->module_var()->index());
809  int index = module_index_++;
810 
811  Handle<ModuleInfo> description =
812  ModuleInfo::Create(isolate(), interface, scope_);
813  modules_->set(index, *description);
814 }
815 
816 
817 int FullCodeGenerator::DeclareGlobalsFlags() {
819  return DeclareGlobalsEvalFlag::encode(is_eval()) |
820  DeclareGlobalsNativeFlag::encode(is_native()) |
821  DeclareGlobalsStrictMode::encode(strict_mode());
822 }
823 
824 
825 void FullCodeGenerator::SetFunctionPosition(FunctionLiteral* fun) {
826  CodeGenerator::RecordPositions(masm_, fun->start_position());
827 }
828 
829 
830 void FullCodeGenerator::SetReturnPosition(FunctionLiteral* fun) {
831  CodeGenerator::RecordPositions(masm_, fun->end_position() - 1);
832 }
833 
834 
835 void FullCodeGenerator::SetStatementPosition(Statement* stmt) {
836 #ifdef ENABLE_DEBUGGER_SUPPORT
837  if (!isolate()->debugger()->IsDebuggerActive()) {
838  CodeGenerator::RecordPositions(masm_, stmt->position());
839  } else {
840  // Check if the statement will be breakable without adding a debug break
841  // slot.
842  BreakableStatementChecker checker(zone());
843  checker.Check(stmt);
844  // Record the statement position right here if the statement is not
845  // breakable. For breakable statements the actual recording of the
846  // position will be postponed to the breakable code (typically an IC).
847  bool position_recorded = CodeGenerator::RecordPositions(
848  masm_, stmt->position(), !checker.is_breakable());
849  // If the position recording did record a new position generate a debug
850  // break slot to make the statement breakable.
851  if (position_recorded) {
852  Debug::GenerateSlot(masm_);
853  }
854  }
855 #else
856  CodeGenerator::RecordPositions(masm_, stmt->position());
857 #endif
858 }
859 
860 
861 void FullCodeGenerator::SetExpressionPosition(Expression* expr) {
862 #ifdef ENABLE_DEBUGGER_SUPPORT
863  if (!isolate()->debugger()->IsDebuggerActive()) {
864  CodeGenerator::RecordPositions(masm_, expr->position());
865  } else {
866  // Check if the expression will be breakable without adding a debug break
867  // slot.
868  BreakableStatementChecker checker(zone());
869  checker.Check(expr);
870  // Record a statement position right here if the expression is not
871  // breakable. For breakable expressions the actual recording of the
872  // position will be postponed to the breakable code (typically an IC).
873  // NOTE this will record a statement position for something which might
874  // not be a statement. As stepping in the debugger will only stop at
875  // statement positions this is used for e.g. the condition expression of
876  // a do while loop.
877  bool position_recorded = CodeGenerator::RecordPositions(
878  masm_, expr->position(), !checker.is_breakable());
879  // If the position recording did record a new position generate a debug
880  // break slot to make the statement breakable.
881  if (position_recorded) {
882  Debug::GenerateSlot(masm_);
883  }
884  }
885 #else
886  CodeGenerator::RecordPositions(masm_, expr->position());
887 #endif
888 }
889 
890 
891 void FullCodeGenerator::SetStatementPosition(int pos) {
892  CodeGenerator::RecordPositions(masm_, pos);
893 }
894 
895 
896 void FullCodeGenerator::SetSourcePosition(int pos) {
897  if (pos != RelocInfo::kNoPosition) {
898  masm_->positions_recorder()->RecordPosition(pos);
899  }
900 }
901 
902 
903 // Lookup table for code generators for special runtime calls which are
904 // generated inline.
905 #define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize) \
906  &FullCodeGenerator::Emit##Name,
907 
908 const FullCodeGenerator::InlineFunctionGenerator
909  FullCodeGenerator::kInlineFunctionGenerators[] = {
911  };
912 #undef INLINE_FUNCTION_GENERATOR_ADDRESS
913 
914 
915 FullCodeGenerator::InlineFunctionGenerator
916  FullCodeGenerator::FindInlineFunctionGenerator(Runtime::FunctionId id) {
917  int lookup_index =
918  static_cast<int>(id) - static_cast<int>(Runtime::kFirstInlineFunction);
919  ASSERT(lookup_index >= 0);
920  ASSERT(static_cast<size_t>(lookup_index) <
921  ARRAY_SIZE(kInlineFunctionGenerators));
922  return kInlineFunctionGenerators[lookup_index];
923 }
924 
925 
926 void FullCodeGenerator::EmitInlineRuntimeCall(CallRuntime* expr) {
927  const Runtime::Function* function = expr->function();
928  ASSERT(function != NULL);
929  ASSERT(function->intrinsic_type == Runtime::INLINE);
930  InlineFunctionGenerator generator =
931  FindInlineFunctionGenerator(function->function_id);
932  ((*this).*(generator))(expr);
933 }
934 
935 
936 void FullCodeGenerator::EmitGeneratorNext(CallRuntime* expr) {
937  ZoneList<Expression*>* args = expr->arguments();
938  ASSERT(args->length() == 2);
939  EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::NEXT);
940 }
941 
942 
943 void FullCodeGenerator::EmitGeneratorThrow(CallRuntime* expr) {
944  ZoneList<Expression*>* args = expr->arguments();
945  ASSERT(args->length() == 2);
946  EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::THROW);
947 }
948 
949 
950 void FullCodeGenerator::EmitDebugBreakInOptimizedCode(CallRuntime* expr) {
951  context()->Plug(handle(Smi::FromInt(0), isolate()));
952 }
953 
954 
955 void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
956  switch (expr->op()) {
957  case Token::COMMA:
958  return VisitComma(expr);
959  case Token::OR:
960  case Token::AND:
961  return VisitLogicalExpression(expr);
962  default:
963  return VisitArithmeticExpression(expr);
964  }
965 }
966 
967 
968 void FullCodeGenerator::VisitInDuplicateContext(Expression* expr) {
969  if (context()->IsEffect()) {
970  VisitForEffect(expr);
971  } else if (context()->IsAccumulatorValue()) {
972  VisitForAccumulatorValue(expr);
973  } else if (context()->IsStackValue()) {
974  VisitForStackValue(expr);
975  } else if (context()->IsTest()) {
976  const TestContext* test = TestContext::cast(context());
977  VisitForControl(expr, test->true_label(), test->false_label(),
978  test->fall_through());
979  }
980 }
981 
982 
983 void FullCodeGenerator::VisitComma(BinaryOperation* expr) {
984  Comment cmnt(masm_, "[ Comma");
985  VisitForEffect(expr->left());
986  VisitInDuplicateContext(expr->right());
987 }
988 
989 
990 void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) {
991  bool is_logical_and = expr->op() == Token::AND;
992  Comment cmnt(masm_, is_logical_and ? "[ Logical AND" : "[ Logical OR");
993  Expression* left = expr->left();
994  Expression* right = expr->right();
995  BailoutId right_id = expr->RightId();
996  Label done;
997 
998  if (context()->IsTest()) {
999  Label eval_right;
1000  const TestContext* test = TestContext::cast(context());
1001  if (is_logical_and) {
1002  VisitForControl(left, &eval_right, test->false_label(), &eval_right);
1003  } else {
1004  VisitForControl(left, test->true_label(), &eval_right, &eval_right);
1005  }
1006  PrepareForBailoutForId(right_id, NO_REGISTERS);
1007  __ bind(&eval_right);
1008 
1009  } else if (context()->IsAccumulatorValue()) {
1010  VisitForAccumulatorValue(left);
1011  // We want the value in the accumulator for the test, and on the stack in
1012  // case we need it.
1013  __ Push(result_register());
1014  Label discard, restore;
1015  if (is_logical_and) {
1016  DoTest(left, &discard, &restore, &restore);
1017  } else {
1018  DoTest(left, &restore, &discard, &restore);
1019  }
1020  __ bind(&restore);
1021  __ Pop(result_register());
1022  __ jmp(&done);
1023  __ bind(&discard);
1024  __ Drop(1);
1025  PrepareForBailoutForId(right_id, NO_REGISTERS);
1026 
1027  } else if (context()->IsStackValue()) {
1028  VisitForAccumulatorValue(left);
1029  // We want the value in the accumulator for the test, and on the stack in
1030  // case we need it.
1031  __ Push(result_register());
1032  Label discard;
1033  if (is_logical_and) {
1034  DoTest(left, &discard, &done, &discard);
1035  } else {
1036  DoTest(left, &done, &discard, &discard);
1037  }
1038  __ bind(&discard);
1039  __ Drop(1);
1040  PrepareForBailoutForId(right_id, NO_REGISTERS);
1041 
1042  } else {
1043  ASSERT(context()->IsEffect());
1044  Label eval_right;
1045  if (is_logical_and) {
1046  VisitForControl(left, &eval_right, &done, &eval_right);
1047  } else {
1048  VisitForControl(left, &done, &eval_right, &eval_right);
1049  }
1050  PrepareForBailoutForId(right_id, NO_REGISTERS);
1051  __ bind(&eval_right);
1052  }
1053 
1054  VisitInDuplicateContext(right);
1055  __ bind(&done);
1056 }
1057 
1058 
1059 void FullCodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
1060  Token::Value op = expr->op();
1061  Comment cmnt(masm_, "[ ArithmeticExpression");
1062  Expression* left = expr->left();
1063  Expression* right = expr->right();
1065  left->ResultOverwriteAllowed()
1066  ? OVERWRITE_LEFT
1067  : (right->ResultOverwriteAllowed() ? OVERWRITE_RIGHT : NO_OVERWRITE);
1068 
1069  VisitForStackValue(left);
1070  VisitForAccumulatorValue(right);
1071 
1072  SetSourcePosition(expr->position());
1073  if (ShouldInlineSmiCase(op)) {
1074  EmitInlineSmiBinaryOp(expr, op, mode, left, right);
1075  } else {
1076  EmitBinaryOp(expr, op, mode);
1077  }
1078 }
1079 
1080 
1081 void FullCodeGenerator::VisitBlock(Block* stmt) {
1082  Comment cmnt(masm_, "[ Block");
1083  NestedBlock nested_block(this, stmt);
1084  SetStatementPosition(stmt);
1085 
1086  Scope* saved_scope = scope();
1087  // Push a block context when entering a block with block scoped variables.
1088  if (stmt->scope() != NULL) {
1089  scope_ = stmt->scope();
1090  ASSERT(!scope_->is_module_scope());
1091  { Comment cmnt(masm_, "[ Extend block context");
1092  __ Push(scope_->GetScopeInfo());
1093  PushFunctionArgumentForContextAllocation();
1094  __ CallRuntime(Runtime::kHiddenPushBlockContext, 2);
1095 
1096  // Replace the context stored in the frame.
1097  StoreToFrameField(StandardFrameConstants::kContextOffset,
1098  context_register());
1099  }
1100  { Comment cmnt(masm_, "[ Declarations");
1101  VisitDeclarations(scope_->declarations());
1102  }
1103  }
1104 
1105  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1106  VisitStatements(stmt->statements());
1107  scope_ = saved_scope;
1108  __ bind(nested_block.break_label());
1109  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1110 
1111  // Pop block context if necessary.
1112  if (stmt->scope() != NULL) {
1113  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1114  // Update local stack frame context field.
1115  StoreToFrameField(StandardFrameConstants::kContextOffset,
1116  context_register());
1117  }
1118 }
1119 
1120 
1121 void FullCodeGenerator::VisitModuleStatement(ModuleStatement* stmt) {
1122  Comment cmnt(masm_, "[ Module context");
1123 
1124  __ Push(Smi::FromInt(stmt->proxy()->interface()->Index()));
1125  __ Push(Smi::FromInt(0));
1126  __ CallRuntime(Runtime::kHiddenPushModuleContext, 2);
1127  StoreToFrameField(
1128  StandardFrameConstants::kContextOffset, context_register());
1129 
1130  Scope* saved_scope = scope_;
1131  scope_ = stmt->body()->scope();
1132  VisitStatements(stmt->body()->statements());
1133  scope_ = saved_scope;
1134  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1135  // Update local stack frame context field.
1136  StoreToFrameField(StandardFrameConstants::kContextOffset,
1137  context_register());
1138 }
1139 
1140 
1141 void FullCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
1142  Comment cmnt(masm_, "[ ExpressionStatement");
1143  SetStatementPosition(stmt);
1144  VisitForEffect(stmt->expression());
1145 }
1146 
1147 
1148 void FullCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {
1149  Comment cmnt(masm_, "[ EmptyStatement");
1150  SetStatementPosition(stmt);
1151 }
1152 
1153 
1154 void FullCodeGenerator::VisitIfStatement(IfStatement* stmt) {
1155  Comment cmnt(masm_, "[ IfStatement");
1156  SetStatementPosition(stmt);
1157  Label then_part, else_part, done;
1158 
1159  if (stmt->HasElseStatement()) {
1160  VisitForControl(stmt->condition(), &then_part, &else_part, &then_part);
1161  PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS);
1162  __ bind(&then_part);
1163  Visit(stmt->then_statement());
1164  __ jmp(&done);
1165 
1166  PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS);
1167  __ bind(&else_part);
1168  Visit(stmt->else_statement());
1169  } else {
1170  VisitForControl(stmt->condition(), &then_part, &done, &then_part);
1171  PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS);
1172  __ bind(&then_part);
1173  Visit(stmt->then_statement());
1174 
1175  PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS);
1176  }
1177  __ bind(&done);
1178  PrepareForBailoutForId(stmt->IfId(), NO_REGISTERS);
1179 }
1180 
1181 
1182 void FullCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
1183  Comment cmnt(masm_, "[ ContinueStatement");
1184  SetStatementPosition(stmt);
1185  NestedStatement* current = nesting_stack_;
1186  int stack_depth = 0;
1187  int context_length = 0;
1188  // When continuing, we clobber the unpredictable value in the accumulator
1189  // with one that's safe for GC. If we hit an exit from the try block of
1190  // try...finally on our way out, we will unconditionally preserve the
1191  // accumulator on the stack.
1192  ClearAccumulator();
1193  while (!current->IsContinueTarget(stmt->target())) {
1194  current = current->Exit(&stack_depth, &context_length);
1195  }
1196  __ Drop(stack_depth);
1197  if (context_length > 0) {
1198  while (context_length > 0) {
1199  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1200  --context_length;
1201  }
1202  StoreToFrameField(StandardFrameConstants::kContextOffset,
1203  context_register());
1204  }
1205 
1206  __ jmp(current->AsIteration()->continue_label());
1207 }
1208 
1209 
1210 void FullCodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
1211  Comment cmnt(masm_, "[ BreakStatement");
1212  SetStatementPosition(stmt);
1213  NestedStatement* current = nesting_stack_;
1214  int stack_depth = 0;
1215  int context_length = 0;
1216  // When breaking, we clobber the unpredictable value in the accumulator
1217  // with one that's safe for GC. If we hit an exit from the try block of
1218  // try...finally on our way out, we will unconditionally preserve the
1219  // accumulator on the stack.
1220  ClearAccumulator();
1221  while (!current->IsBreakTarget(stmt->target())) {
1222  current = current->Exit(&stack_depth, &context_length);
1223  }
1224  __ Drop(stack_depth);
1225  if (context_length > 0) {
1226  while (context_length > 0) {
1227  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1228  --context_length;
1229  }
1230  StoreToFrameField(StandardFrameConstants::kContextOffset,
1231  context_register());
1232  }
1233 
1234  __ jmp(current->AsBreakable()->break_label());
1235 }
1236 
1237 
1238 void FullCodeGenerator::EmitUnwindBeforeReturn() {
1239  NestedStatement* current = nesting_stack_;
1240  int stack_depth = 0;
1241  int context_length = 0;
1242  while (current != NULL) {
1243  current = current->Exit(&stack_depth, &context_length);
1244  }
1245  __ Drop(stack_depth);
1246 }
1247 
1248 
1249 void FullCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
1250  Comment cmnt(masm_, "[ ReturnStatement");
1251  SetStatementPosition(stmt);
1252  Expression* expr = stmt->expression();
1253  VisitForAccumulatorValue(expr);
1254  EmitUnwindBeforeReturn();
1255  EmitReturnSequence();
1256 }
1257 
1258 
1259 void FullCodeGenerator::VisitWithStatement(WithStatement* stmt) {
1260  Comment cmnt(masm_, "[ WithStatement");
1261  SetStatementPosition(stmt);
1262 
1263  VisitForStackValue(stmt->expression());
1264  PushFunctionArgumentForContextAllocation();
1265  __ CallRuntime(Runtime::kHiddenPushWithContext, 2);
1266  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1267 
1268  Scope* saved_scope = scope();
1269  scope_ = stmt->scope();
1270  { WithOrCatch body(this);
1271  Visit(stmt->statement());
1272  }
1273  scope_ = saved_scope;
1274 
1275  // Pop context.
1276  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1277  // Update local stack frame context field.
1278  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1279 }
1280 
1281 
1282 void FullCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
1283  Comment cmnt(masm_, "[ DoWhileStatement");
1284  SetStatementPosition(stmt);
1285  Label body, book_keeping;
1286 
1287  Iteration loop_statement(this, stmt);
1288  increment_loop_depth();
1289 
1290  __ bind(&body);
1291  Visit(stmt->body());
1292 
1293  // Record the position of the do while condition and make sure it is
1294  // possible to break on the condition.
1295  __ bind(loop_statement.continue_label());
1296  PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
1297  SetExpressionPosition(stmt->cond());
1298  VisitForControl(stmt->cond(),
1299  &book_keeping,
1300  loop_statement.break_label(),
1301  &book_keeping);
1302 
1303  // Check stack before looping.
1304  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1305  __ bind(&book_keeping);
1306  EmitBackEdgeBookkeeping(stmt, &body);
1307  __ jmp(&body);
1308 
1309  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1310  __ bind(loop_statement.break_label());
1311  decrement_loop_depth();
1312 }
1313 
1314 
1315 void FullCodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
1316  Comment cmnt(masm_, "[ WhileStatement");
1317  Label test, body;
1318 
1319  Iteration loop_statement(this, stmt);
1320  increment_loop_depth();
1321 
1322  // Emit the test at the bottom of the loop.
1323  __ jmp(&test);
1324 
1325  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1326  __ bind(&body);
1327  Visit(stmt->body());
1328 
1329  // Emit the statement position here as this is where the while
1330  // statement code starts.
1331  __ bind(loop_statement.continue_label());
1332  SetStatementPosition(stmt);
1333 
1334  // Check stack before looping.
1335  EmitBackEdgeBookkeeping(stmt, &body);
1336 
1337  __ bind(&test);
1338  VisitForControl(stmt->cond(),
1339  &body,
1340  loop_statement.break_label(),
1341  loop_statement.break_label());
1342 
1343  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1344  __ bind(loop_statement.break_label());
1345  decrement_loop_depth();
1346 }
1347 
1348 
1349 void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
1350  Comment cmnt(masm_, "[ ForStatement");
1351  Label test, body;
1352 
1353  Iteration loop_statement(this, stmt);
1354 
1355  // Set statement position for a break slot before entering the for-body.
1356  SetStatementPosition(stmt);
1357 
1358  if (stmt->init() != NULL) {
1359  Visit(stmt->init());
1360  }
1361 
1362  increment_loop_depth();
1363  // Emit the test at the bottom of the loop (even if empty).
1364  __ jmp(&test);
1365 
1366  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1367  __ bind(&body);
1368  Visit(stmt->body());
1369 
1370  PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
1371  __ bind(loop_statement.continue_label());
1372  if (stmt->next() != NULL) {
1373  Visit(stmt->next());
1374  }
1375 
1376  // Emit the statement position here as this is where the for
1377  // statement code starts.
1378  SetStatementPosition(stmt);
1379 
1380  // Check stack before looping.
1381  EmitBackEdgeBookkeeping(stmt, &body);
1382 
1383  __ bind(&test);
1384  if (stmt->cond() != NULL) {
1385  VisitForControl(stmt->cond(),
1386  &body,
1387  loop_statement.break_label(),
1388  loop_statement.break_label());
1389  } else {
1390  __ jmp(&body);
1391  }
1392 
1393  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1394  __ bind(loop_statement.break_label());
1395  decrement_loop_depth();
1396 }
1397 
1398 
1399 void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
1400  Comment cmnt(masm_, "[ TryCatchStatement");
1401  SetStatementPosition(stmt);
1402  // The try block adds a handler to the exception handler chain before
1403  // entering, and removes it again when exiting normally. If an exception
1404  // is thrown during execution of the try block, the handler is consumed
1405  // and control is passed to the catch block with the exception in the
1406  // result register.
1407 
1408  Label try_entry, handler_entry, exit;
1409  __ jmp(&try_entry);
1410  __ bind(&handler_entry);
1411  handler_table()->set(stmt->index(), Smi::FromInt(handler_entry.pos()));
1412  // Exception handler code, the exception is in the result register.
1413  // Extend the context before executing the catch block.
1414  { Comment cmnt(masm_, "[ Extend catch context");
1415  __ Push(stmt->variable()->name());
1416  __ Push(result_register());
1417  PushFunctionArgumentForContextAllocation();
1418  __ CallRuntime(Runtime::kHiddenPushCatchContext, 3);
1419  StoreToFrameField(StandardFrameConstants::kContextOffset,
1420  context_register());
1421  }
1422 
1423  Scope* saved_scope = scope();
1424  scope_ = stmt->scope();
1425  ASSERT(scope_->declarations()->is_empty());
1426  { WithOrCatch catch_body(this);
1427  Visit(stmt->catch_block());
1428  }
1429  // Restore the context.
1430  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1431  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1432  scope_ = saved_scope;
1433  __ jmp(&exit);
1434 
1435  // Try block code. Sets up the exception handler chain.
1436  __ bind(&try_entry);
1437  __ PushTryHandler(StackHandler::CATCH, stmt->index());
1438  { TryCatch try_body(this);
1439  Visit(stmt->try_block());
1440  }
1441  __ PopTryHandler();
1442  __ bind(&exit);
1443 }
1444 
1445 
1446 void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
1447  Comment cmnt(masm_, "[ TryFinallyStatement");
1448  SetStatementPosition(stmt);
1449  // Try finally is compiled by setting up a try-handler on the stack while
1450  // executing the try body, and removing it again afterwards.
1451  //
1452  // The try-finally construct can enter the finally block in three ways:
1453  // 1. By exiting the try-block normally. This removes the try-handler and
1454  // calls the finally block code before continuing.
1455  // 2. By exiting the try-block with a function-local control flow transfer
1456  // (break/continue/return). The site of the, e.g., break removes the
1457  // try handler and calls the finally block code before continuing
1458  // its outward control transfer.
1459  // 3. By exiting the try-block with a thrown exception.
1460  // This can happen in nested function calls. It traverses the try-handler
1461  // chain and consumes the try-handler entry before jumping to the
1462  // handler code. The handler code then calls the finally-block before
1463  // rethrowing the exception.
1464  //
1465  // The finally block must assume a return address on top of the stack
1466  // (or in the link register on ARM chips) and a value (return value or
1467  // exception) in the result register (rax/eax/r0), both of which must
1468  // be preserved. The return address isn't GC-safe, so it should be
1469  // cooked before GC.
1470  Label try_entry, handler_entry, finally_entry;
1471 
1472  // Jump to try-handler setup and try-block code.
1473  __ jmp(&try_entry);
1474  __ bind(&handler_entry);
1475  handler_table()->set(stmt->index(), Smi::FromInt(handler_entry.pos()));
1476  // Exception handler code. This code is only executed when an exception
1477  // is thrown. The exception is in the result register, and must be
1478  // preserved by the finally block. Call the finally block and then
1479  // rethrow the exception if it returns.
1480  __ Call(&finally_entry);
1481  __ Push(result_register());
1482  __ CallRuntime(Runtime::kHiddenReThrow, 1);
1483 
1484  // Finally block implementation.
1485  __ bind(&finally_entry);
1486  EnterFinallyBlock();
1487  { Finally finally_body(this);
1488  Visit(stmt->finally_block());
1489  }
1490  ExitFinallyBlock(); // Return to the calling code.
1491 
1492  // Set up try handler.
1493  __ bind(&try_entry);
1494  __ PushTryHandler(StackHandler::FINALLY, stmt->index());
1495  { TryFinally try_body(this, &finally_entry);
1496  Visit(stmt->try_block());
1497  }
1498  __ PopTryHandler();
1499  // Execute the finally block on the way out. Clobber the unpredictable
1500  // value in the result register with one that's safe for GC because the
1501  // finally block will unconditionally preserve the result register on the
1502  // stack.
1503  ClearAccumulator();
1504  __ Call(&finally_entry);
1505 }
1506 
1507 
1508 void FullCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
1509 #ifdef ENABLE_DEBUGGER_SUPPORT
1510  Comment cmnt(masm_, "[ DebuggerStatement");
1511  SetStatementPosition(stmt);
1512 
1513  __ DebugBreak();
1514  // Ignore the return value.
1515 #endif
1516 }
1517 
1518 
1519 void FullCodeGenerator::VisitCaseClause(CaseClause* clause) {
1520  UNREACHABLE();
1521 }
1522 
1523 
1524 void FullCodeGenerator::VisitConditional(Conditional* expr) {
1525  Comment cmnt(masm_, "[ Conditional");
1526  Label true_case, false_case, done;
1527  VisitForControl(expr->condition(), &true_case, &false_case, &true_case);
1528 
1529  PrepareForBailoutForId(expr->ThenId(), NO_REGISTERS);
1530  __ bind(&true_case);
1531  SetExpressionPosition(expr->then_expression());
1532  if (context()->IsTest()) {
1533  const TestContext* for_test = TestContext::cast(context());
1534  VisitForControl(expr->then_expression(),
1535  for_test->true_label(),
1536  for_test->false_label(),
1537  NULL);
1538  } else {
1539  VisitInDuplicateContext(expr->then_expression());
1540  __ jmp(&done);
1541  }
1542 
1543  PrepareForBailoutForId(expr->ElseId(), NO_REGISTERS);
1544  __ bind(&false_case);
1545  SetExpressionPosition(expr->else_expression());
1546  VisitInDuplicateContext(expr->else_expression());
1547  // If control flow falls through Visit, merge it with true case here.
1548  if (!context()->IsTest()) {
1549  __ bind(&done);
1550  }
1551 }
1552 
1553 
1554 void FullCodeGenerator::VisitLiteral(Literal* expr) {
1555  Comment cmnt(masm_, "[ Literal");
1556  context()->Plug(expr->value());
1557 }
1558 
1559 
1560 void FullCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
1561  Comment cmnt(masm_, "[ FunctionLiteral");
1562 
1563  // Build the function boilerplate and instantiate it.
1564  Handle<SharedFunctionInfo> function_info =
1565  Compiler::BuildFunctionInfo(expr, script());
1566  if (function_info.is_null()) {
1567  SetStackOverflow();
1568  return;
1569  }
1570  EmitNewClosure(function_info, expr->pretenure());
1571 }
1572 
1573 
1574 void FullCodeGenerator::VisitNativeFunctionLiteral(
1575  NativeFunctionLiteral* expr) {
1576  Comment cmnt(masm_, "[ NativeFunctionLiteral");
1577 
1578  // Compute the function template for the native function.
1579  Handle<String> name = expr->name();
1580  v8::Handle<v8::FunctionTemplate> fun_template =
1581  expr->extension()->GetNativeFunctionTemplate(
1582  reinterpret_cast<v8::Isolate*>(isolate()), v8::Utils::ToLocal(name));
1583  ASSERT(!fun_template.IsEmpty());
1584 
1585  // Instantiate the function and create a shared function info from it.
1586  Handle<JSFunction> fun = Utils::OpenHandle(*fun_template->GetFunction());
1587  const int literals = fun->NumberOfLiterals();
1588  Handle<Code> code = Handle<Code>(fun->shared()->code());
1589  Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub());
1590  bool is_generator = false;
1591  Handle<SharedFunctionInfo> shared =
1592  isolate()->factory()->NewSharedFunctionInfo(name, literals, is_generator,
1593  code, Handle<ScopeInfo>(fun->shared()->scope_info()));
1594  shared->set_construct_stub(*construct_stub);
1595 
1596  // Copy the function data to the shared function info.
1597  shared->set_function_data(fun->shared()->function_data());
1598  int parameters = fun->shared()->formal_parameter_count();
1599  shared->set_formal_parameter_count(parameters);
1600 
1601  EmitNewClosure(shared, false);
1602 }
1603 
1604 
1605 void FullCodeGenerator::VisitThrow(Throw* expr) {
1606  Comment cmnt(masm_, "[ Throw");
1607  VisitForStackValue(expr->exception());
1608  __ CallRuntime(Runtime::kHiddenThrow, 1);
1609  // Never returns here.
1610 }
1611 
1612 
1613 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryCatch::Exit(
1614  int* stack_depth,
1615  int* context_length) {
1616  // The macros used here must preserve the result register.
1617  __ Drop(*stack_depth);
1618  __ PopTryHandler();
1619  *stack_depth = 0;
1620  return previous_;
1621 }
1622 
1623 
1624 bool FullCodeGenerator::TryLiteralCompare(CompareOperation* expr) {
1625  Expression* sub_expr;
1626  Handle<String> check;
1627  if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
1628  EmitLiteralCompareTypeof(expr, sub_expr, check);
1629  return true;
1630  }
1631 
1632  if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) {
1633  EmitLiteralCompareNil(expr, sub_expr, kUndefinedValue);
1634  return true;
1635  }
1636 
1637  if (expr->IsLiteralCompareNull(&sub_expr)) {
1638  EmitLiteralCompareNil(expr, sub_expr, kNullValue);
1639  return true;
1640  }
1641 
1642  return false;
1643 }
1644 
1645 
1646 void BackEdgeTable::Patch(Isolate* isolate, Code* unoptimized) {
1647  DisallowHeapAllocation no_gc;
1648  Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
1649 
1650  // Iterate over the back edge table and patch every interrupt
1651  // call to an unconditional call to the replacement code.
1652  int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1653 
1654  BackEdgeTable back_edges(unoptimized, &no_gc);
1655  for (uint32_t i = 0; i < back_edges.length(); i++) {
1656  if (static_cast<int>(back_edges.loop_depth(i)) == loop_nesting_level) {
1657  ASSERT_EQ(INTERRUPT, GetBackEdgeState(isolate,
1658  unoptimized,
1659  back_edges.pc(i)));
1660  PatchAt(unoptimized, back_edges.pc(i), ON_STACK_REPLACEMENT, patch);
1661  }
1662  }
1663 
1664  unoptimized->set_back_edges_patched_for_osr(true);
1665  ASSERT(Verify(isolate, unoptimized, loop_nesting_level));
1666 }
1667 
1668 
1669 void BackEdgeTable::Revert(Isolate* isolate, Code* unoptimized) {
1670  DisallowHeapAllocation no_gc;
1671  Code* patch = isolate->builtins()->builtin(Builtins::kInterruptCheck);
1672 
1673  // Iterate over the back edge table and revert the patched interrupt calls.
1674  ASSERT(unoptimized->back_edges_patched_for_osr());
1675  int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1676 
1677  BackEdgeTable back_edges(unoptimized, &no_gc);
1678  for (uint32_t i = 0; i < back_edges.length(); i++) {
1679  if (static_cast<int>(back_edges.loop_depth(i)) <= loop_nesting_level) {
1680  ASSERT_NE(INTERRUPT, GetBackEdgeState(isolate,
1681  unoptimized,
1682  back_edges.pc(i)));
1683  PatchAt(unoptimized, back_edges.pc(i), INTERRUPT, patch);
1684  }
1685  }
1686 
1687  unoptimized->set_back_edges_patched_for_osr(false);
1688  unoptimized->set_allow_osr_at_loop_nesting_level(0);
1689  // Assert that none of the back edges are patched anymore.
1690  ASSERT(Verify(isolate, unoptimized, -1));
1691 }
1692 
1693 
1694 void BackEdgeTable::AddStackCheck(Handle<Code> code, uint32_t pc_offset) {
1695  DisallowHeapAllocation no_gc;
1696  Isolate* isolate = code->GetIsolate();
1697  Address pc = code->instruction_start() + pc_offset;
1698  Code* patch = isolate->builtins()->builtin(Builtins::kOsrAfterStackCheck);
1699  PatchAt(*code, pc, OSR_AFTER_STACK_CHECK, patch);
1700 }
1701 
1702 
1703 void BackEdgeTable::RemoveStackCheck(Handle<Code> code, uint32_t pc_offset) {
1704  DisallowHeapAllocation no_gc;
1705  Isolate* isolate = code->GetIsolate();
1706  Address pc = code->instruction_start() + pc_offset;
1707 
1708  if (OSR_AFTER_STACK_CHECK == GetBackEdgeState(isolate, *code, pc)) {
1709  Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
1710  PatchAt(*code, pc, ON_STACK_REPLACEMENT, patch);
1711  }
1712 }
1713 
1714 
1715 #ifdef DEBUG
1716 bool BackEdgeTable::Verify(Isolate* isolate,
1717  Code* unoptimized,
1718  int loop_nesting_level) {
1719  DisallowHeapAllocation no_gc;
1720  BackEdgeTable back_edges(unoptimized, &no_gc);
1721  for (uint32_t i = 0; i < back_edges.length(); i++) {
1722  uint32_t loop_depth = back_edges.loop_depth(i);
1723  CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker);
1724  // Assert that all back edges for shallower loops (and only those)
1725  // have already been patched.
1726  CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level),
1727  GetBackEdgeState(isolate,
1728  unoptimized,
1729  back_edges.pc(i)) != INTERRUPT);
1730  }
1731  return true;
1732 }
1733 #endif // DEBUG
1734 
1735 
1736 #undef __
1737 
1738 
1739 } } // namespace v8::internal
byte * Address
Definition: globals.h:186
bool is_global_scope() const
Definition: scopes.h:286
static Handle< Code > initialize_stub(Isolate *isolate, StrictMode strict_mode)
Definition: ic.cc:1267
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
#define INLINE_FUNCTION_LIST(F)
Definition: runtime.h:643
static void RemoveStackCheck(Handle< Code > code, uint32_t pc_offset)
static void Patch(Isolate *isolate, Code *unoptimized_code)
Code * builtin(Name name)
Definition: builtins.h:322
int allow_osr_at_loop_nesting_level()
Definition: objects-inl.h:4442
#define CHECK_EQ(expected, value)
Definition: checks.h:252
bool HasDeoptimizationSupport() const
Definition: compiler.h:244
static String * cast(Object *obj)
Handle< ScopeInfo > GetScopeInfo()
Definition: scopes.cc:752
void SetCode(Handle< Code > code)
Definition: compiler.h:187
static ExtraICState ComputeExtraICState(ContextualMode contextual_mode)
Definition: ic.h:310
static Smi * FromInt(int value)
Definition: objects-inl.h:1209
static bool MakeCode(CompilationInfo *info)
const int KB
Definition: globals.h:245
int num_modules() const
Definition: scopes.h:375
Handle< Script > script() const
Definition: compiler.h:83
BailoutId id() const
Definition: ast.h:377
Builtins * builtins()
Definition: isolate.h:948
static bool enabled()
Definition: serialize.h:485
uint32_t Flags
Definition: objects.h:5184
Isolate * isolate() const
Definition: compiler.h:67
#define ASSERT(condition)
Definition: checks.h:329
static const int kContextOffset
Definition: frames.h:185
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including on console Map counters to a file Enable debugger compile events enable GDBJIT interface(disables compacting GC)") DEFINE_bool(gdbjit_full
bool back_edges_patched_for_osr()
Definition: objects-inl.h:4517
static Handle< Code > initialize_stub(Isolate *isolate, ExtraICState extra_state)
Definition: ic.cc:782
bool IsOptimizable() const
Definition: compiler.h:232
static void AddStackCheck(Handle< Code > code, uint32_t pc_offset)
void set_emit_debug_code(bool value)
Definition: assembler.h:66
const int kIntSize
Definition: globals.h:263
static void PrintCode(Handle< Code > code, CompilationInfo *info)
Definition: codegen.cc:126
void set_back_edges_patched_for_osr(bool value)
Definition: objects-inl.h:4524
#define UNREACHABLE()
Definition: checks.h:52
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
Definition: flags.cc:665
uint32_t loop_depth(uint32_t index)
Definition: full-codegen.h:897
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1278
void check(i::Vector< const uint8_t > string)
static Handle< ModuleInfo > Create(Isolate *isolate, Interface *interface, Scope *scope)
Definition: scopeinfo.cc:537
FunctionLiteral * function() const
Definition: compiler.h:77
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
const Register pc
bool has_pending_exception()
Definition: isolate.h:587
Definition: v8.h:123
#define COMMA
Definition: flags.h:101
OverwriteMode
Definition: ic.h:690
static v8::internal::Handle< To > OpenHandle(v8::Local< From > handle)
Definition: api.h:308
static Local< Context > ToLocal(v8::internal::Handle< v8::internal::Context > obj)
#define CHECK_LE(a, b)
Definition: checks.h:263
static void MakeCodePrologue(CompilationInfo *info, const char *kind)
Definition: codegen.cc:62
static const int kMaxLoopNestingMarker
Definition: objects.h:5578
#define GDBJIT(action)
Definition: gdb-jit.h:137
Address pc(uint32_t index)
Definition: full-codegen.h:905
void set_predictable_code_size(bool value)
Definition: assembler.h:69
static bool HaveASnapshotToStartFrom()
#define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize)
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Definition: objects-inl.h:6675
Handle< T > handle(T *t, Isolate *isolate)
Definition: handles.h:103
V8_INLINE bool IsEmpty() const
Definition: v8.h:248
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
Definition: codegen.cc:206
static void Revert(Isolate *isolate, Code *unoptimized_code)
Local< Function > GetFunction()
Definition: api.cc:5299
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, InlineCacheHolderFlag holder=OWN_MAP)
Definition: objects-inl.h:4601
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
Definition: compiler.cc:996
static const char * v8_compile_full_code
Definition: log.h:337
Counters * counters()
Definition: isolate.h:859
#define ASSERT_NE(v1, v2)
Definition: checks.h:331
#define __
PositionsRecorder * positions_recorder()
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:39
int ExtraICState
Definition: objects.h:310
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric literals(0o77, 0b11)") DEFINE_bool(harmony_strings
T Min(T a, T b)
Definition: utils.h:234
static Handle< Code > MakeCodeEpilogue(MacroAssembler *masm, Code::Flags flags, CompilationInfo *info)
Definition: codegen.cc:103
#define LOG_CODE_EVENT(isolate, Call)
Definition: log.h:94
bool is_module_scope() const
Definition: scopes.h:285
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
#define ARRAY_SIZE(a)
Definition: globals.h:333
void set_allow_osr_at_loop_nesting_level(int level)
Definition: objects-inl.h:4448
ZoneList< Declaration * > * declarations()
Definition: scopes.h:344