v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
full-codegen.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "codegen.h"
31 #include "compiler.h"
32 #include "debug.h"
33 #include "full-codegen.h"
34 #include "liveedit.h"
35 #include "macro-assembler.h"
36 #include "prettyprinter.h"
37 #include "scopes.h"
38 #include "scopeinfo.h"
39 #include "snapshot.h"
40 #include "stub-cache.h"
41 
42 namespace v8 {
43 namespace internal {
44 
46  Visit(stmt);
47 }
48 
49 
51  Visit(expr);
52 }
53 
54 
55 void BreakableStatementChecker::VisitVariableDeclaration(
56  VariableDeclaration* decl) {
57 }
58 
59 void BreakableStatementChecker::VisitFunctionDeclaration(
60  FunctionDeclaration* decl) {
61 }
62 
63 void BreakableStatementChecker::VisitModuleDeclaration(
64  ModuleDeclaration* decl) {
65 }
66 
67 void BreakableStatementChecker::VisitImportDeclaration(
68  ImportDeclaration* decl) {
69 }
70 
71 void BreakableStatementChecker::VisitExportDeclaration(
72  ExportDeclaration* decl) {
73 }
74 
75 
76 void BreakableStatementChecker::VisitModuleLiteral(ModuleLiteral* module) {
77 }
78 
79 void BreakableStatementChecker::VisitModuleVariable(ModuleVariable* module) {
80 }
81 
82 void BreakableStatementChecker::VisitModulePath(ModulePath* module) {
83 }
84 
85 void BreakableStatementChecker::VisitModuleUrl(ModuleUrl* module) {
86 }
87 
88 
89 void BreakableStatementChecker::VisitBlock(Block* stmt) {
90 }
91 
92 
93 void BreakableStatementChecker::VisitExpressionStatement(
94  ExpressionStatement* stmt) {
95  // Check if expression is breakable.
96  Visit(stmt->expression());
97 }
98 
99 
100 void BreakableStatementChecker::VisitEmptyStatement(EmptyStatement* stmt) {
101 }
102 
103 
104 void BreakableStatementChecker::VisitIfStatement(IfStatement* stmt) {
105  // If the condition is breakable the if statement is breakable.
106  Visit(stmt->condition());
107 }
108 
109 
110 void BreakableStatementChecker::VisitContinueStatement(
111  ContinueStatement* stmt) {
112 }
113 
114 
115 void BreakableStatementChecker::VisitBreakStatement(BreakStatement* stmt) {
116 }
117 
118 
119 void BreakableStatementChecker::VisitReturnStatement(ReturnStatement* stmt) {
120  // Return is breakable if the expression is.
121  Visit(stmt->expression());
122 }
123 
124 
125 void BreakableStatementChecker::VisitWithStatement(WithStatement* stmt) {
126  Visit(stmt->expression());
127 }
128 
129 
130 void BreakableStatementChecker::VisitSwitchStatement(SwitchStatement* stmt) {
131  // Switch statements breakable if the tag expression is.
132  Visit(stmt->tag());
133 }
134 
135 
136 void BreakableStatementChecker::VisitDoWhileStatement(DoWhileStatement* stmt) {
137  // Mark do while as breakable to avoid adding a break slot in front of it.
138  is_breakable_ = true;
139 }
140 
141 
142 void BreakableStatementChecker::VisitWhileStatement(WhileStatement* stmt) {
143  // Mark while statements breakable if the condition expression is.
144  Visit(stmt->cond());
145 }
146 
147 
148 void BreakableStatementChecker::VisitForStatement(ForStatement* stmt) {
149  // Mark for statements breakable if the condition expression is.
150  if (stmt->cond() != NULL) {
151  Visit(stmt->cond());
152  }
153 }
154 
155 
156 void BreakableStatementChecker::VisitForInStatement(ForInStatement* stmt) {
157  // Mark for in statements breakable if the enumerable expression is.
158  Visit(stmt->enumerable());
159 }
160 
161 
162 void BreakableStatementChecker::VisitTryCatchStatement(
163  TryCatchStatement* stmt) {
164  // Mark try catch as breakable to avoid adding a break slot in front of it.
165  is_breakable_ = true;
166 }
167 
168 
169 void BreakableStatementChecker::VisitTryFinallyStatement(
170  TryFinallyStatement* stmt) {
171  // Mark try finally as breakable to avoid adding a break slot in front of it.
172  is_breakable_ = true;
173 }
174 
175 
176 void BreakableStatementChecker::VisitDebuggerStatement(
177  DebuggerStatement* stmt) {
178  // The debugger statement is breakable.
179  is_breakable_ = true;
180 }
181 
182 
183 void BreakableStatementChecker::VisitFunctionLiteral(FunctionLiteral* expr) {
184 }
185 
186 
187 void BreakableStatementChecker::VisitSharedFunctionInfoLiteral(
188  SharedFunctionInfoLiteral* expr) {
189 }
190 
191 
192 void BreakableStatementChecker::VisitConditional(Conditional* expr) {
193 }
194 
195 
196 void BreakableStatementChecker::VisitVariableProxy(VariableProxy* expr) {
197 }
198 
199 
200 void BreakableStatementChecker::VisitLiteral(Literal* expr) {
201 }
202 
203 
204 void BreakableStatementChecker::VisitRegExpLiteral(RegExpLiteral* expr) {
205 }
206 
207 
208 void BreakableStatementChecker::VisitObjectLiteral(ObjectLiteral* expr) {
209 }
210 
211 
212 void BreakableStatementChecker::VisitArrayLiteral(ArrayLiteral* expr) {
213 }
214 
215 
216 void BreakableStatementChecker::VisitAssignment(Assignment* expr) {
217  // If assigning to a property (including a global property) the assignment is
218  // breakable.
219  VariableProxy* proxy = expr->target()->AsVariableProxy();
220  Property* prop = expr->target()->AsProperty();
221  if (prop != NULL || (proxy != NULL && proxy->var()->IsUnallocated())) {
222  is_breakable_ = true;
223  return;
224  }
225 
226  // Otherwise the assignment is breakable if the assigned value is.
227  Visit(expr->value());
228 }
229 
230 
231 void BreakableStatementChecker::VisitThrow(Throw* expr) {
232  // Throw is breakable if the expression is.
233  Visit(expr->exception());
234 }
235 
236 
237 void BreakableStatementChecker::VisitProperty(Property* expr) {
238  // Property load is breakable.
239  is_breakable_ = true;
240 }
241 
242 
243 void BreakableStatementChecker::VisitCall(Call* expr) {
244  // Function calls both through IC and call stub are breakable.
245  is_breakable_ = true;
246 }
247 
248 
249 void BreakableStatementChecker::VisitCallNew(CallNew* expr) {
250  // Function calls through new are breakable.
251  is_breakable_ = true;
252 }
253 
254 
255 void BreakableStatementChecker::VisitCallRuntime(CallRuntime* expr) {
256 }
257 
258 
259 void BreakableStatementChecker::VisitUnaryOperation(UnaryOperation* expr) {
260  Visit(expr->expression());
261 }
262 
263 
264 void BreakableStatementChecker::VisitCountOperation(CountOperation* expr) {
265  Visit(expr->expression());
266 }
267 
268 
269 void BreakableStatementChecker::VisitBinaryOperation(BinaryOperation* expr) {
270  Visit(expr->left());
271  if (expr->op() != Token::AND &&
272  expr->op() != Token::OR) {
273  Visit(expr->right());
274  }
275 }
276 
277 
278 void BreakableStatementChecker::VisitCompareOperation(CompareOperation* expr) {
279  Visit(expr->left());
280  Visit(expr->right());
281 }
282 
283 
284 void BreakableStatementChecker::VisitThisFunction(ThisFunction* expr) {
285 }
286 
287 
288 #define __ ACCESS_MASM(masm())
289 
291  Isolate* isolate = info->isolate();
292  Handle<Script> script = info->script();
293  if (!script->IsUndefined() && !script->source()->IsUndefined()) {
294  int len = String::cast(script->source())->length();
295  isolate->counters()->total_full_codegen_source_size()->Increment(len);
296  }
297  if (FLAG_trace_codegen) {
298  PrintF("Full Compiler - ");
299  }
301  const int kInitialBufferSize = 4 * KB;
302  MacroAssembler masm(info->isolate(), NULL, kInitialBufferSize);
303 #ifdef ENABLE_GDB_JIT_INTERFACE
304  masm.positions_recorder()->StartGDBJITLineInfoRecording();
305 #endif
306 
307  FullCodeGenerator cgen(&masm, info);
308  cgen.Generate();
309  if (cgen.HasStackOverflow()) {
310  ASSERT(!isolate->has_pending_exception());
311  return false;
312  }
313  unsigned table_offset = cgen.EmitStackCheckTable();
314 
315  Code::Flags flags = Code::ComputeFlags(Code::FUNCTION);
316  Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&masm, flags, info);
317  code->set_optimizable(info->IsOptimizable() &&
318  !info->function()->flags()->Contains(kDontOptimize) &&
319  info->function()->scope()->AllowsLazyCompilation());
320  cgen.PopulateDeoptimizationData(code);
321  cgen.PopulateTypeFeedbackInfo(code);
322  cgen.PopulateTypeFeedbackCells(code);
323  code->set_has_deoptimization_support(info->HasDeoptimizationSupport());
324  code->set_handler_table(*cgen.handler_table());
325 #ifdef ENABLE_DEBUGGER_SUPPORT
326  code->set_has_debug_break_slots(
327  info->isolate()->debugger()->IsDebuggerActive());
328  code->set_compiled_optimizable(info->IsOptimizable());
329 #endif // ENABLE_DEBUGGER_SUPPORT
330  code->set_allow_osr_at_loop_nesting_level(0);
331  code->set_profiler_ticks(0);
332  code->set_stack_check_table_offset(table_offset);
333  CodeGenerator::PrintCode(code, info);
334  info->SetCode(code); // May be an empty handle.
335 #ifdef ENABLE_GDB_JIT_INTERFACE
336  if (FLAG_gdbjit && !code.is_null()) {
337  GDBJITLineInfo* lineinfo =
338  masm.positions_recorder()->DetachGDBJITLineInfo();
339 
340  GDBJIT(RegisterDetailedLineInfo(*code, lineinfo));
341  }
342 #endif
343  return !code.is_null();
344 }
345 
346 
347 unsigned FullCodeGenerator::EmitStackCheckTable() {
348  // The stack check table consists of a length (in number of entries)
349  // field, and then a sequence of entries. Each entry is a pair of AST id
350  // and code-relative pc offset.
351  masm()->Align(kIntSize);
352  unsigned offset = masm()->pc_offset();
353  unsigned length = stack_checks_.length();
354  __ dd(length);
355  for (unsigned i = 0; i < length; ++i) {
356  __ dd(stack_checks_[i].id.ToInt());
357  __ dd(stack_checks_[i].pc_and_state);
358  }
359  return offset;
360 }
361 
362 
363 void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
364  // Fill in the deoptimization information.
365  ASSERT(info_->HasDeoptimizationSupport() || bailout_entries_.is_empty());
366  if (!info_->HasDeoptimizationSupport()) return;
367  int length = bailout_entries_.length();
368  Handle<DeoptimizationOutputData> data = isolate()->factory()->
369  NewDeoptimizationOutputData(length, TENURED);
370  for (int i = 0; i < length; i++) {
371  data->SetAstId(i, bailout_entries_[i].id);
372  data->SetPcAndState(i, Smi::FromInt(bailout_entries_[i].pc_and_state));
373  }
374  code->set_deoptimization_data(*data);
375 }
376 
377 
378 void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
379  Handle<TypeFeedbackInfo> info = isolate()->factory()->NewTypeFeedbackInfo();
380  info->set_ic_total_count(ic_total_count_);
381  ASSERT(!isolate()->heap()->InNewSpace(*info));
382  code->set_type_feedback_info(*info);
383 }
384 
385 
387  // The generation of debug code must match between the snapshot code and the
388  // code that is generated later. This is assumed by the debugger when it is
389  // calculating PC offsets after generating a debug version of code. Therefore
390  // we disable the production of debug code in the full compiler if we are
391  // either generating a snapshot or we booted from a snapshot.
392  generate_debug_code_ = FLAG_debug_code &&
393  !Serializer::enabled() &&
395  masm_->set_emit_debug_code(generate_debug_code_);
396  masm_->set_predictable_code_size(true);
397 }
398 
399 
400 void FullCodeGenerator::PopulateTypeFeedbackCells(Handle<Code> code) {
401  if (type_feedback_cells_.is_empty()) return;
402  int length = type_feedback_cells_.length();
403  int array_size = TypeFeedbackCells::LengthOfFixedArray(length);
405  isolate()->factory()->NewFixedArray(array_size, TENURED));
406  for (int i = 0; i < length; i++) {
407  cache->SetAstId(i, type_feedback_cells_[i].ast_id);
408  cache->SetCell(i, *type_feedback_cells_[i].cell);
409  }
410  TypeFeedbackInfo::cast(code->type_feedback_info())->set_type_feedback_cells(
411  *cache);
412 }
413 
414 
415 
416 void FullCodeGenerator::PrepareForBailout(Expression* node, State state) {
417  PrepareForBailoutForId(node->id(), state);
418 }
419 
420 
421 void FullCodeGenerator::RecordJSReturnSite(Call* call) {
422  // We record the offset of the function return so we can rebuild the frame
423  // if the function was inlined, i.e., this is the return address in the
424  // inlined function's frame.
425  //
426  // The state is ignored. We defensively set it to TOS_REG, which is the
427  // real state of the unoptimized code at the return site.
428  PrepareForBailoutForId(call->ReturnId(), TOS_REG);
429 #ifdef DEBUG
430  // In debug builds, mark the return so we can verify that this function
431  // was called.
432  ASSERT(!call->return_is_recorded_);
433  call->return_is_recorded_ = true;
434 #endif
435 }
436 
437 
438 void FullCodeGenerator::PrepareForBailoutForId(BailoutId id, State state) {
439  // There's no need to prepare this code for bailouts from already optimized
440  // code or code that can't be optimized.
441  if (!info_->HasDeoptimizationSupport()) return;
442  unsigned pc_and_state =
443  StateField::encode(state) | PcField::encode(masm_->pc_offset());
444  ASSERT(Smi::IsValid(pc_and_state));
445  BailoutEntry entry = { id, pc_and_state };
446 #ifdef DEBUG
448  // Assert that we don't have multiple bailout entries for the same node.
449  for (int i = 0; i < bailout_entries_.length(); i++) {
450  if (bailout_entries_.at(i).id == entry.id) {
451  AstPrinter printer;
452  PrintF("%s", printer.PrintProgram(info_->function()));
453  UNREACHABLE();
454  }
455  }
456  }
457 #endif // DEBUG
458  bailout_entries_.Add(entry, zone());
459 }
460 
461 
462 void FullCodeGenerator::RecordTypeFeedbackCell(
463  TypeFeedbackId id, Handle<JSGlobalPropertyCell> cell) {
464  TypeFeedbackCellEntry entry = { id, cell };
465  type_feedback_cells_.Add(entry, zone());
466 }
467 
468 
469 void FullCodeGenerator::RecordStackCheck(BailoutId ast_id) {
470  // The pc offset does not need to be encoded and packed together with a
471  // state.
472  ASSERT(masm_->pc_offset() > 0);
473  BailoutEntry entry = { ast_id, static_cast<unsigned>(masm_->pc_offset()) };
474  stack_checks_.Add(entry, zone());
475 }
476 
477 
478 bool FullCodeGenerator::ShouldInlineSmiCase(Token::Value op) {
479  // Inline smi case inside loops, but not division and modulo which
480  // are too complicated and take up too much space.
481  if (op == Token::DIV ||op == Token::MOD) return false;
482  if (FLAG_always_inline_smi_code) return true;
483  return loop_depth_ > 0;
484 }
485 
486 
487 void FullCodeGenerator::EffectContext::Plug(Register reg) const {
488 }
489 
490 
491 void FullCodeGenerator::AccumulatorValueContext::Plug(Register reg) const {
492  __ Move(result_register(), reg);
493 }
494 
495 
496 void FullCodeGenerator::StackValueContext::Plug(Register reg) const {
497  __ push(reg);
498 }
499 
500 
501 void FullCodeGenerator::TestContext::Plug(Register reg) const {
502  // For simplicity we always test the accumulator register.
503  __ Move(result_register(), reg);
504  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
505  codegen()->DoTest(this);
506 }
507 
508 
509 void FullCodeGenerator::EffectContext::PlugTOS() const {
510  __ Drop(1);
511 }
512 
513 
514 void FullCodeGenerator::AccumulatorValueContext::PlugTOS() const {
515  __ pop(result_register());
516 }
517 
518 
519 void FullCodeGenerator::StackValueContext::PlugTOS() const {
520 }
521 
522 
523 void FullCodeGenerator::TestContext::PlugTOS() const {
524  // For simplicity we always test the accumulator register.
525  __ pop(result_register());
526  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
527  codegen()->DoTest(this);
528 }
529 
530 
531 void FullCodeGenerator::EffectContext::PrepareTest(
532  Label* materialize_true,
533  Label* materialize_false,
534  Label** if_true,
535  Label** if_false,
536  Label** fall_through) const {
537  // In an effect context, the true and the false case branch to the
538  // same label.
539  *if_true = *if_false = *fall_through = materialize_true;
540 }
541 
542 
543 void FullCodeGenerator::AccumulatorValueContext::PrepareTest(
544  Label* materialize_true,
545  Label* materialize_false,
546  Label** if_true,
547  Label** if_false,
548  Label** fall_through) const {
549  *if_true = *fall_through = materialize_true;
550  *if_false = materialize_false;
551 }
552 
553 
554 void FullCodeGenerator::StackValueContext::PrepareTest(
555  Label* materialize_true,
556  Label* materialize_false,
557  Label** if_true,
558  Label** if_false,
559  Label** fall_through) const {
560  *if_true = *fall_through = materialize_true;
561  *if_false = materialize_false;
562 }
563 
564 
565 void FullCodeGenerator::TestContext::PrepareTest(
566  Label* materialize_true,
567  Label* materialize_false,
568  Label** if_true,
569  Label** if_false,
570  Label** fall_through) const {
571  *if_true = true_label_;
572  *if_false = false_label_;
573  *fall_through = fall_through_;
574 }
575 
576 
577 void FullCodeGenerator::DoTest(const TestContext* context) {
578  DoTest(context->condition(),
579  context->true_label(),
580  context->false_label(),
581  context->fall_through());
582 }
583 
584 
585 void FullCodeGenerator::VisitDeclarations(
586  ZoneList<Declaration*>* declarations) {
587  ZoneList<Handle<Object> >* saved_globals = globals_;
588  ZoneList<Handle<Object> > inner_globals(10, zone());
589  globals_ = &inner_globals;
590 
591  AstVisitor::VisitDeclarations(declarations);
592  if (!globals_->is_empty()) {
593  // Invoke the platform-dependent code generator to do the actual
594  // declaration the global functions and variables.
595  Handle<FixedArray> array =
596  isolate()->factory()->NewFixedArray(globals_->length(), TENURED);
597  for (int i = 0; i < globals_->length(); ++i)
598  array->set(i, *globals_->at(i));
599  DeclareGlobals(array);
600  }
601 
602  globals_ = saved_globals;
603 }
604 
605 
606 void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) {
607  // Allocate a module context statically.
608  Block* block = module->body();
609  Scope* saved_scope = scope();
610  scope_ = block->scope();
611  Interface* interface = module->interface();
612  Handle<JSModule> instance = interface->Instance();
613 
614  Comment cmnt(masm_, "[ ModuleLiteral");
615  SetStatementPosition(block);
616 
617  // Set up module context.
618  __ Push(instance);
619  __ CallRuntime(Runtime::kPushModuleContext, 1);
620  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
621 
622  {
623  Comment cmnt(masm_, "[ Declarations");
624  VisitDeclarations(scope_->declarations());
625  }
626 
627  scope_ = saved_scope;
628  // Pop module context.
629  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
630  // Update local stack frame context field.
631  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
632 }
633 
634 
635 void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) {
636  // Nothing to do.
637  // The instance object is resolved statically through the module's interface.
638 }
639 
640 
641 void FullCodeGenerator::VisitModulePath(ModulePath* module) {
642  // Nothing to do.
643  // The instance object is resolved statically through the module's interface.
644 }
645 
646 
647 void FullCodeGenerator::VisitModuleUrl(ModuleUrl* decl) {
648  // TODO(rossberg)
649 }
650 
651 
652 int FullCodeGenerator::DeclareGlobalsFlags() {
654  return DeclareGlobalsEvalFlag::encode(is_eval()) |
655  DeclareGlobalsNativeFlag::encode(is_native()) |
656  DeclareGlobalsLanguageMode::encode(language_mode());
657 }
658 
659 
660 void FullCodeGenerator::SetFunctionPosition(FunctionLiteral* fun) {
661  CodeGenerator::RecordPositions(masm_, fun->start_position());
662 }
663 
664 
665 void FullCodeGenerator::SetReturnPosition(FunctionLiteral* fun) {
666  CodeGenerator::RecordPositions(masm_, fun->end_position() - 1);
667 }
668 
669 
670 void FullCodeGenerator::SetStatementPosition(Statement* stmt) {
671 #ifdef ENABLE_DEBUGGER_SUPPORT
672  if (!isolate()->debugger()->IsDebuggerActive()) {
673  CodeGenerator::RecordPositions(masm_, stmt->statement_pos());
674  } else {
675  // Check if the statement will be breakable without adding a debug break
676  // slot.
677  BreakableStatementChecker checker;
678  checker.Check(stmt);
679  // Record the statement position right here if the statement is not
680  // breakable. For breakable statements the actual recording of the
681  // position will be postponed to the breakable code (typically an IC).
682  bool position_recorded = CodeGenerator::RecordPositions(
683  masm_, stmt->statement_pos(), !checker.is_breakable());
684  // If the position recording did record a new position generate a debug
685  // break slot to make the statement breakable.
686  if (position_recorded) {
687  Debug::GenerateSlot(masm_);
688  }
689  }
690 #else
691  CodeGenerator::RecordPositions(masm_, stmt->statement_pos());
692 #endif
693 }
694 
695 
696 void FullCodeGenerator::SetExpressionPosition(Expression* expr, int pos) {
697 #ifdef ENABLE_DEBUGGER_SUPPORT
698  if (!isolate()->debugger()->IsDebuggerActive()) {
699  CodeGenerator::RecordPositions(masm_, pos);
700  } else {
701  // Check if the expression will be breakable without adding a debug break
702  // slot.
703  BreakableStatementChecker checker;
704  checker.Check(expr);
705  // Record a statement position right here if the expression is not
706  // breakable. For breakable expressions the actual recording of the
707  // position will be postponed to the breakable code (typically an IC).
708  // NOTE this will record a statement position for something which might
709  // not be a statement. As stepping in the debugger will only stop at
710  // statement positions this is used for e.g. the condition expression of
711  // a do while loop.
712  bool position_recorded = CodeGenerator::RecordPositions(
713  masm_, pos, !checker.is_breakable());
714  // If the position recording did record a new position generate a debug
715  // break slot to make the statement breakable.
716  if (position_recorded) {
717  Debug::GenerateSlot(masm_);
718  }
719  }
720 #else
721  CodeGenerator::RecordPositions(masm_, pos);
722 #endif
723 }
724 
725 
726 void FullCodeGenerator::SetStatementPosition(int pos) {
727  CodeGenerator::RecordPositions(masm_, pos);
728 }
729 
730 
731 void FullCodeGenerator::SetSourcePosition(int pos) {
732  if (pos != RelocInfo::kNoPosition) {
733  masm_->positions_recorder()->RecordPosition(pos);
734  }
735 }
736 
737 
738 // Lookup table for code generators for special runtime calls which are
739 // generated inline.
740 #define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize) \
741  &FullCodeGenerator::Emit##Name,
742 
743 const FullCodeGenerator::InlineFunctionGenerator
744  FullCodeGenerator::kInlineFunctionGenerators[] = {
747  };
748 #undef INLINE_FUNCTION_GENERATOR_ADDRESS
749 
750 
751 FullCodeGenerator::InlineFunctionGenerator
752  FullCodeGenerator::FindInlineFunctionGenerator(Runtime::FunctionId id) {
753  int lookup_index =
754  static_cast<int>(id) - static_cast<int>(Runtime::kFirstInlineFunction);
755  ASSERT(lookup_index >= 0);
756  ASSERT(static_cast<size_t>(lookup_index) <
757  ARRAY_SIZE(kInlineFunctionGenerators));
758  return kInlineFunctionGenerators[lookup_index];
759 }
760 
761 
762 void FullCodeGenerator::EmitInlineRuntimeCall(CallRuntime* expr) {
763  const Runtime::Function* function = expr->function();
764  ASSERT(function != NULL);
765  ASSERT(function->intrinsic_type == Runtime::INLINE);
766  InlineFunctionGenerator generator =
767  FindInlineFunctionGenerator(function->function_id);
768  ((*this).*(generator))(expr);
769 }
770 
771 
772 void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
773  switch (expr->op()) {
774  case Token::COMMA:
775  return VisitComma(expr);
776  case Token::OR:
777  case Token::AND:
778  return VisitLogicalExpression(expr);
779  default:
780  return VisitArithmeticExpression(expr);
781  }
782 }
783 
784 
785 void FullCodeGenerator::VisitInDuplicateContext(Expression* expr) {
786  if (context()->IsEffect()) {
787  VisitForEffect(expr);
788  } else if (context()->IsAccumulatorValue()) {
789  VisitForAccumulatorValue(expr);
790  } else if (context()->IsStackValue()) {
791  VisitForStackValue(expr);
792  } else if (context()->IsTest()) {
793  const TestContext* test = TestContext::cast(context());
794  VisitForControl(expr, test->true_label(), test->false_label(),
795  test->fall_through());
796  }
797 }
798 
799 
800 void FullCodeGenerator::VisitComma(BinaryOperation* expr) {
801  Comment cmnt(masm_, "[ Comma");
802  VisitForEffect(expr->left());
803  VisitInDuplicateContext(expr->right());
804 }
805 
806 
807 void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) {
808  bool is_logical_and = expr->op() == Token::AND;
809  Comment cmnt(masm_, is_logical_and ? "[ Logical AND" : "[ Logical OR");
810  Expression* left = expr->left();
811  Expression* right = expr->right();
812  BailoutId right_id = expr->RightId();
813  Label done;
814 
815  if (context()->IsTest()) {
816  Label eval_right;
817  const TestContext* test = TestContext::cast(context());
818  if (is_logical_and) {
819  VisitForControl(left, &eval_right, test->false_label(), &eval_right);
820  } else {
821  VisitForControl(left, test->true_label(), &eval_right, &eval_right);
822  }
823  PrepareForBailoutForId(right_id, NO_REGISTERS);
824  __ bind(&eval_right);
825 
826  } else if (context()->IsAccumulatorValue()) {
827  VisitForAccumulatorValue(left);
828  // We want the value in the accumulator for the test, and on the stack in
829  // case we need it.
830  __ push(result_register());
831  Label discard, restore;
832  if (is_logical_and) {
833  DoTest(left, &discard, &restore, &restore);
834  } else {
835  DoTest(left, &restore, &discard, &restore);
836  }
837  __ bind(&restore);
838  __ pop(result_register());
839  __ jmp(&done);
840  __ bind(&discard);
841  __ Drop(1);
842  PrepareForBailoutForId(right_id, NO_REGISTERS);
843 
844  } else if (context()->IsStackValue()) {
845  VisitForAccumulatorValue(left);
846  // We want the value in the accumulator for the test, and on the stack in
847  // case we need it.
848  __ push(result_register());
849  Label discard;
850  if (is_logical_and) {
851  DoTest(left, &discard, &done, &discard);
852  } else {
853  DoTest(left, &done, &discard, &discard);
854  }
855  __ bind(&discard);
856  __ Drop(1);
857  PrepareForBailoutForId(right_id, NO_REGISTERS);
858 
859  } else {
860  ASSERT(context()->IsEffect());
861  Label eval_right;
862  if (is_logical_and) {
863  VisitForControl(left, &eval_right, &done, &eval_right);
864  } else {
865  VisitForControl(left, &done, &eval_right, &eval_right);
866  }
867  PrepareForBailoutForId(right_id, NO_REGISTERS);
868  __ bind(&eval_right);
869  }
870 
871  VisitInDuplicateContext(right);
872  __ bind(&done);
873 }
874 
875 
876 void FullCodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
877  Token::Value op = expr->op();
878  Comment cmnt(masm_, "[ ArithmeticExpression");
879  Expression* left = expr->left();
880  Expression* right = expr->right();
881  OverwriteMode mode =
882  left->ResultOverwriteAllowed()
884  : (right->ResultOverwriteAllowed() ? OVERWRITE_RIGHT : NO_OVERWRITE);
885 
886  VisitForStackValue(left);
887  VisitForAccumulatorValue(right);
888 
889  SetSourcePosition(expr->position());
890  if (ShouldInlineSmiCase(op)) {
891  EmitInlineSmiBinaryOp(expr, op, mode, left, right);
892  } else {
893  EmitBinaryOp(expr, op, mode);
894  }
895 }
896 
897 
898 void FullCodeGenerator::VisitBlock(Block* stmt) {
899  Comment cmnt(masm_, "[ Block");
900  NestedBlock nested_block(this, stmt);
901  SetStatementPosition(stmt);
902 
903  Scope* saved_scope = scope();
904  // Push a block context when entering a block with block scoped variables.
905  if (stmt->scope() != NULL) {
906  scope_ = stmt->scope();
907  if (scope_->is_module_scope()) {
908  // If this block is a module body, then we have already allocated and
909  // initialized the declarations earlier. Just push the context.
910  ASSERT(!scope_->interface()->Instance().is_null());
911  __ Push(scope_->interface()->Instance());
912  __ CallRuntime(Runtime::kPushModuleContext, 1);
913  StoreToFrameField(
914  StandardFrameConstants::kContextOffset, context_register());
915  } else {
916  { Comment cmnt(masm_, "[ Extend block context");
917  Handle<ScopeInfo> scope_info = scope_->GetScopeInfo();
918  int heap_slots =
919  scope_info->ContextLength() - Context::MIN_CONTEXT_SLOTS;
920  __ Push(scope_info);
921  PushFunctionArgumentForContextAllocation();
922  if (heap_slots <= FastNewBlockContextStub::kMaximumSlots) {
923  FastNewBlockContextStub stub(heap_slots);
924  __ CallStub(&stub);
925  } else {
926  __ CallRuntime(Runtime::kPushBlockContext, 2);
927  }
928 
929  // Replace the context stored in the frame.
930  StoreToFrameField(StandardFrameConstants::kContextOffset,
931  context_register());
932  }
933  { Comment cmnt(masm_, "[ Declarations");
934  VisitDeclarations(scope_->declarations());
935  }
936  }
937  }
938  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
939  VisitStatements(stmt->statements());
940  scope_ = saved_scope;
941  __ bind(nested_block.break_label());
942  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
943 
944  // Pop block context if necessary.
945  if (stmt->scope() != NULL) {
946  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
947  // Update local stack frame context field.
948  StoreToFrameField(StandardFrameConstants::kContextOffset,
949  context_register());
950  }
951 }
952 
953 
954 void FullCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
955  Comment cmnt(masm_, "[ ExpressionStatement");
956  SetStatementPosition(stmt);
957  VisitForEffect(stmt->expression());
958 }
959 
960 
961 void FullCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {
962  Comment cmnt(masm_, "[ EmptyStatement");
963  SetStatementPosition(stmt);
964 }
965 
966 
967 void FullCodeGenerator::VisitIfStatement(IfStatement* stmt) {
968  Comment cmnt(masm_, "[ IfStatement");
969  SetStatementPosition(stmt);
970  Label then_part, else_part, done;
971 
972  if (stmt->HasElseStatement()) {
973  VisitForControl(stmt->condition(), &then_part, &else_part, &then_part);
974  PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS);
975  __ bind(&then_part);
976  Visit(stmt->then_statement());
977  __ jmp(&done);
978 
979  PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS);
980  __ bind(&else_part);
981  Visit(stmt->else_statement());
982  } else {
983  VisitForControl(stmt->condition(), &then_part, &done, &then_part);
984  PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS);
985  __ bind(&then_part);
986  Visit(stmt->then_statement());
987 
988  PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS);
989  }
990  __ bind(&done);
991  PrepareForBailoutForId(stmt->IfId(), NO_REGISTERS);
992 }
993 
994 
995 void FullCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
996  Comment cmnt(masm_, "[ ContinueStatement");
997  SetStatementPosition(stmt);
998  NestedStatement* current = nesting_stack_;
999  int stack_depth = 0;
1000  int context_length = 0;
1001  // When continuing, we clobber the unpredictable value in the accumulator
1002  // with one that's safe for GC. If we hit an exit from the try block of
1003  // try...finally on our way out, we will unconditionally preserve the
1004  // accumulator on the stack.
1005  ClearAccumulator();
1006  while (!current->IsContinueTarget(stmt->target())) {
1007  current = current->Exit(&stack_depth, &context_length);
1008  }
1009  __ Drop(stack_depth);
1010  if (context_length > 0) {
1011  while (context_length > 0) {
1012  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1013  --context_length;
1014  }
1015  StoreToFrameField(StandardFrameConstants::kContextOffset,
1016  context_register());
1017  }
1018 
1019  __ jmp(current->AsIteration()->continue_label());
1020 }
1021 
1022 
1023 void FullCodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
1024  Comment cmnt(masm_, "[ BreakStatement");
1025  SetStatementPosition(stmt);
1026  NestedStatement* current = nesting_stack_;
1027  int stack_depth = 0;
1028  int context_length = 0;
1029  // When breaking, we clobber the unpredictable value in the accumulator
1030  // with one that's safe for GC. If we hit an exit from the try block of
1031  // try...finally on our way out, we will unconditionally preserve the
1032  // accumulator on the stack.
1033  ClearAccumulator();
1034  while (!current->IsBreakTarget(stmt->target())) {
1035  current = current->Exit(&stack_depth, &context_length);
1036  }
1037  __ Drop(stack_depth);
1038  if (context_length > 0) {
1039  while (context_length > 0) {
1040  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1041  --context_length;
1042  }
1043  StoreToFrameField(StandardFrameConstants::kContextOffset,
1044  context_register());
1045  }
1046 
1047  __ jmp(current->AsBreakable()->break_label());
1048 }
1049 
1050 
1051 void FullCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
1052  Comment cmnt(masm_, "[ ReturnStatement");
1053  SetStatementPosition(stmt);
1054  Expression* expr = stmt->expression();
1055  VisitForAccumulatorValue(expr);
1056 
1057  // Exit all nested statements.
1058  NestedStatement* current = nesting_stack_;
1059  int stack_depth = 0;
1060  int context_length = 0;
1061  while (current != NULL) {
1062  current = current->Exit(&stack_depth, &context_length);
1063  }
1064  __ Drop(stack_depth);
1065 
1066  EmitReturnSequence();
1067 }
1068 
1069 
1070 void FullCodeGenerator::VisitWithStatement(WithStatement* stmt) {
1071  Comment cmnt(masm_, "[ WithStatement");
1072  SetStatementPosition(stmt);
1073 
1074  VisitForStackValue(stmt->expression());
1075  PushFunctionArgumentForContextAllocation();
1076  __ CallRuntime(Runtime::kPushWithContext, 2);
1077  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1078 
1079  { WithOrCatch body(this);
1080  Visit(stmt->statement());
1081  }
1082 
1083  // Pop context.
1084  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1085  // Update local stack frame context field.
1086  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1087 }
1088 
1089 
1090 void FullCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
1091  Comment cmnt(masm_, "[ DoWhileStatement");
1092  SetStatementPosition(stmt);
1093  Label body, stack_check;
1094 
1095  Iteration loop_statement(this, stmt);
1096  increment_loop_depth();
1097 
1098  __ bind(&body);
1099  Visit(stmt->body());
1100 
1101  // Record the position of the do while condition and make sure it is
1102  // possible to break on the condition.
1103  __ bind(loop_statement.continue_label());
1104  PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
1105  SetExpressionPosition(stmt->cond(), stmt->condition_position());
1106  VisitForControl(stmt->cond(),
1107  &stack_check,
1108  loop_statement.break_label(),
1109  &stack_check);
1110 
1111  // Check stack before looping.
1112  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1113  __ bind(&stack_check);
1114  EmitStackCheck(stmt, &body);
1115  __ jmp(&body);
1116 
1117  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1118  __ bind(loop_statement.break_label());
1119  decrement_loop_depth();
1120 }
1121 
1122 
1123 void FullCodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
1124  Comment cmnt(masm_, "[ WhileStatement");
1125  Label test, body;
1126 
1127  Iteration loop_statement(this, stmt);
1128  increment_loop_depth();
1129 
1130  // Emit the test at the bottom of the loop.
1131  __ jmp(&test);
1132 
1133  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1134  __ bind(&body);
1135  Visit(stmt->body());
1136 
1137  // Emit the statement position here as this is where the while
1138  // statement code starts.
1139  __ bind(loop_statement.continue_label());
1140  SetStatementPosition(stmt);
1141 
1142  // Check stack before looping.
1143  EmitStackCheck(stmt, &body);
1144 
1145  __ bind(&test);
1146  VisitForControl(stmt->cond(),
1147  &body,
1148  loop_statement.break_label(),
1149  loop_statement.break_label());
1150 
1151  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1152  __ bind(loop_statement.break_label());
1153  decrement_loop_depth();
1154 }
1155 
1156 
1157 void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
1158  Comment cmnt(masm_, "[ ForStatement");
1159  Label test, body;
1160 
1161  Iteration loop_statement(this, stmt);
1162 
1163  // Set statement position for a break slot before entering the for-body.
1164  SetStatementPosition(stmt);
1165 
1166  if (stmt->init() != NULL) {
1167  Visit(stmt->init());
1168  }
1169 
1170  increment_loop_depth();
1171  // Emit the test at the bottom of the loop (even if empty).
1172  __ jmp(&test);
1173 
1174  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1175  __ bind(&body);
1176  Visit(stmt->body());
1177 
1178  PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
1179  __ bind(loop_statement.continue_label());
1180  if (stmt->next() != NULL) {
1181  Visit(stmt->next());
1182  }
1183 
1184  // Emit the statement position here as this is where the for
1185  // statement code starts.
1186  SetStatementPosition(stmt);
1187 
1188  // Check stack before looping.
1189  EmitStackCheck(stmt, &body);
1190 
1191  __ bind(&test);
1192  if (stmt->cond() != NULL) {
1193  VisitForControl(stmt->cond(),
1194  &body,
1195  loop_statement.break_label(),
1196  loop_statement.break_label());
1197  } else {
1198  __ jmp(&body);
1199  }
1200 
1201  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1202  __ bind(loop_statement.break_label());
1203  decrement_loop_depth();
1204 }
1205 
1206 
1207 void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
1208  Comment cmnt(masm_, "[ TryCatchStatement");
1209  SetStatementPosition(stmt);
1210  // The try block adds a handler to the exception handler chain before
1211  // entering, and removes it again when exiting normally. If an exception
1212  // is thrown during execution of the try block, the handler is consumed
1213  // and control is passed to the catch block with the exception in the
1214  // result register.
1215 
1216  Label try_entry, handler_entry, exit;
1217  __ jmp(&try_entry);
1218  __ bind(&handler_entry);
1219  handler_table()->set(stmt->index(), Smi::FromInt(handler_entry.pos()));
1220  // Exception handler code, the exception is in the result register.
1221  // Extend the context before executing the catch block.
1222  { Comment cmnt(masm_, "[ Extend catch context");
1223  __ Push(stmt->variable()->name());
1224  __ push(result_register());
1225  PushFunctionArgumentForContextAllocation();
1226  __ CallRuntime(Runtime::kPushCatchContext, 3);
1227  StoreToFrameField(StandardFrameConstants::kContextOffset,
1228  context_register());
1229  }
1230 
1231  Scope* saved_scope = scope();
1232  scope_ = stmt->scope();
1233  ASSERT(scope_->declarations()->is_empty());
1234  { WithOrCatch catch_body(this);
1235  Visit(stmt->catch_block());
1236  }
1237  // Restore the context.
1238  LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1239  StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1240  scope_ = saved_scope;
1241  __ jmp(&exit);
1242 
1243  // Try block code. Sets up the exception handler chain.
1244  __ bind(&try_entry);
1245  __ PushTryHandler(StackHandler::CATCH, stmt->index());
1246  { TryCatch try_body(this);
1247  Visit(stmt->try_block());
1248  }
1249  __ PopTryHandler();
1250  __ bind(&exit);
1251 }
1252 
1253 
1254 void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
1255  Comment cmnt(masm_, "[ TryFinallyStatement");
1256  SetStatementPosition(stmt);
1257  // Try finally is compiled by setting up a try-handler on the stack while
1258  // executing the try body, and removing it again afterwards.
1259  //
1260  // The try-finally construct can enter the finally block in three ways:
1261  // 1. By exiting the try-block normally. This removes the try-handler and
1262  // calls the finally block code before continuing.
1263  // 2. By exiting the try-block with a function-local control flow transfer
1264  // (break/continue/return). The site of the, e.g., break removes the
1265  // try handler and calls the finally block code before continuing
1266  // its outward control transfer.
1267  // 3. By exiting the try-block with a thrown exception.
1268  // This can happen in nested function calls. It traverses the try-handler
1269  // chain and consumes the try-handler entry before jumping to the
1270  // handler code. The handler code then calls the finally-block before
1271  // rethrowing the exception.
1272  //
1273  // The finally block must assume a return address on top of the stack
1274  // (or in the link register on ARM chips) and a value (return value or
1275  // exception) in the result register (rax/eax/r0), both of which must
1276  // be preserved. The return address isn't GC-safe, so it should be
1277  // cooked before GC.
1278  Label try_entry, handler_entry, finally_entry;
1279 
1280  // Jump to try-handler setup and try-block code.
1281  __ jmp(&try_entry);
1282  __ bind(&handler_entry);
1283  handler_table()->set(stmt->index(), Smi::FromInt(handler_entry.pos()));
1284  // Exception handler code. This code is only executed when an exception
1285  // is thrown. The exception is in the result register, and must be
1286  // preserved by the finally block. Call the finally block and then
1287  // rethrow the exception if it returns.
1288  __ Call(&finally_entry);
1289  __ push(result_register());
1290  __ CallRuntime(Runtime::kReThrow, 1);
1291 
1292  // Finally block implementation.
1293  __ bind(&finally_entry);
1294  EnterFinallyBlock();
1295  { Finally finally_body(this);
1296  Visit(stmt->finally_block());
1297  }
1298  ExitFinallyBlock(); // Return to the calling code.
1299 
1300  // Set up try handler.
1301  __ bind(&try_entry);
1302  __ PushTryHandler(StackHandler::FINALLY, stmt->index());
1303  { TryFinally try_body(this, &finally_entry);
1304  Visit(stmt->try_block());
1305  }
1306  __ PopTryHandler();
1307  // Execute the finally block on the way out. Clobber the unpredictable
1308  // value in the result register with one that's safe for GC because the
1309  // finally block will unconditionally preserve the result register on the
1310  // stack.
1311  ClearAccumulator();
1312  __ Call(&finally_entry);
1313 }
1314 
1315 
1316 void FullCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
1317 #ifdef ENABLE_DEBUGGER_SUPPORT
1318  Comment cmnt(masm_, "[ DebuggerStatement");
1319  SetStatementPosition(stmt);
1320 
1321  __ DebugBreak();
1322  // Ignore the return value.
1323 #endif
1324 }
1325 
1326 
1327 void FullCodeGenerator::VisitConditional(Conditional* expr) {
1328  Comment cmnt(masm_, "[ Conditional");
1329  Label true_case, false_case, done;
1330  VisitForControl(expr->condition(), &true_case, &false_case, &true_case);
1331 
1332  PrepareForBailoutForId(expr->ThenId(), NO_REGISTERS);
1333  __ bind(&true_case);
1334  SetExpressionPosition(expr->then_expression(),
1335  expr->then_expression_position());
1336  if (context()->IsTest()) {
1337  const TestContext* for_test = TestContext::cast(context());
1338  VisitForControl(expr->then_expression(),
1339  for_test->true_label(),
1340  for_test->false_label(),
1341  NULL);
1342  } else {
1343  VisitInDuplicateContext(expr->then_expression());
1344  __ jmp(&done);
1345  }
1346 
1347  PrepareForBailoutForId(expr->ElseId(), NO_REGISTERS);
1348  __ bind(&false_case);
1349  SetExpressionPosition(expr->else_expression(),
1350  expr->else_expression_position());
1351  VisitInDuplicateContext(expr->else_expression());
1352  // If control flow falls through Visit, merge it with true case here.
1353  if (!context()->IsTest()) {
1354  __ bind(&done);
1355  }
1356 }
1357 
1358 
1359 void FullCodeGenerator::VisitLiteral(Literal* expr) {
1360  Comment cmnt(masm_, "[ Literal");
1361  context()->Plug(expr->handle());
1362 }
1363 
1364 
1365 void FullCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
1366  Comment cmnt(masm_, "[ FunctionLiteral");
1367 
1368  // Build the function boilerplate and instantiate it.
1369  Handle<SharedFunctionInfo> function_info =
1370  Compiler::BuildFunctionInfo(expr, script());
1371  if (function_info.is_null()) {
1372  SetStackOverflow();
1373  return;
1374  }
1375  EmitNewClosure(function_info, expr->pretenure());
1376 }
1377 
1378 
1379 void FullCodeGenerator::VisitSharedFunctionInfoLiteral(
1380  SharedFunctionInfoLiteral* expr) {
1381  Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
1382  EmitNewClosure(expr->shared_function_info(), false);
1383 }
1384 
1385 
1386 void FullCodeGenerator::VisitThrow(Throw* expr) {
1387  Comment cmnt(masm_, "[ Throw");
1388  VisitForStackValue(expr->exception());
1389  __ CallRuntime(Runtime::kThrow, 1);
1390  // Never returns here.
1391 }
1392 
1393 
1394 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryCatch::Exit(
1395  int* stack_depth,
1396  int* context_length) {
1397  // The macros used here must preserve the result register.
1398  __ Drop(*stack_depth);
1399  __ PopTryHandler();
1400  *stack_depth = 0;
1401  return previous_;
1402 }
1403 
1404 
1405 bool FullCodeGenerator::TryLiteralCompare(CompareOperation* expr) {
1406  Expression* sub_expr;
1407  Handle<String> check;
1408  if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
1409  EmitLiteralCompareTypeof(expr, sub_expr, check);
1410  return true;
1411  }
1412 
1413  if (expr->IsLiteralCompareUndefined(&sub_expr)) {
1414  EmitLiteralCompareNil(expr, sub_expr, kUndefinedValue);
1415  return true;
1416  }
1417 
1418  if (expr->IsLiteralCompareNull(&sub_expr)) {
1419  EmitLiteralCompareNil(expr, sub_expr, kNullValue);
1420  return true;
1421  }
1422 
1423  return false;
1424 }
1425 
1426 
1427 #undef __
1428 
1429 
1430 } } // namespace v8::internal
bool FLAG_enable_slow_asserts
#define INLINE_FUNCTION_LIST(F)
Definition: runtime.h:503
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including on console Map counters to a file Enable debugger compile events enable GDBJIT interface(disables compacting GC)") DEFINE_bool(gdbjit_full
void PrintF(const char *format,...)
Definition: v8utils.cc:40
bool HasDeoptimizationSupport() const
Definition: compiler.h:159
static TypeFeedbackInfo * cast(Object *obj)
static String * cast(Object *obj)
Handle< ScopeInfo > GetScopeInfo()
Definition: scopes.cc:754
void SetCode(Handle< Code > code)
Definition: compiler.h:114
static uint32_t encode(Statevalue)
Definition: utils.h:262
static Smi * FromInt(int value)
Definition: objects-inl.h:981
static bool MakeCode(CompilationInfo *info)
const int KB
Definition: globals.h:207
Handle< Script > script() const
Definition: compiler.h:72
static Handle< T > cast(Handle< S > that)
Definition: handles.h:81
T & at(int i) const
Definition: list.h:90
static bool enabled()
Definition: serialize.h:481
#define ASSERT(condition)
Definition: checks.h:270
void set_predictable_code_size(bool value)
bool IsOptimizable() const
Definition: compiler.h:151
const int kIntSize
Definition: globals.h:217
static void PrintCode(Handle< Code > code, CompilationInfo *info)
Definition: codegen.cc:115
bool AllowsLazyCompilation() const
Definition: scopes.cc:725
#define UNREACHABLE()
Definition: checks.h:50
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1059
FunctionLiteral * function() const
Definition: compiler.h:66
static TestContext * cast(AstContext *context)
Definition: hydrogen.h:721
void set_emit_debug_code(bool value)
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
Definition: objects-inl.h:3491
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
bool has_pending_exception()
Definition: isolate.h:561
#define GDBJIT(action)
Definition: gdb-jit.h:141
Scope * scope() const
Definition: ast.h:1953
Interface * interface() const
Definition: scopes.h:351
static bool HaveASnapshotToStartFrom()
#define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize)
bool is_null() const
Definition: handles.h:87
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
Definition: codegen.cc:168
Handle< JSModule > Instance()
Definition: interface.h:141
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
Definition: compiler.cc:926
Counters * counters()
Definition: isolate.h:819
AstProperties::Flags * flags()
Definition: ast.h:2011
#define __
static int LengthOfFixedArray(int cell_count)
Definition: objects.h:4143
PositionsRecorder * positions_recorder()
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:38
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
static bool is_valid(LanguageModevalue)
Definition: utils.h:257
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
#define INLINE_RUNTIME_FUNCTION_LIST(F)
Definition: runtime.h:541
static Handle< Code > MakeCodeEpilogue(MacroAssembler *masm, Code::Flags flags, CompilationInfo *info)
Definition: codegen.cc:96
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
Definition: flags.cc:495
bool is_module_scope() const
Definition: scopes.h:277
void check(i::Vector< const char > string)
static void MakeCodePrologue(CompilationInfo *info)
Definition: codegen.cc:61
#define ARRAY_SIZE(a)
Definition: globals.h:281
ZoneList< Declaration * > * declarations()
Definition: scopes.h:342