55 void BreakableStatementChecker::VisitVariableDeclaration(
59 void BreakableStatementChecker::VisitFunctionDeclaration(
60 FunctionDeclaration* decl) {
63 void BreakableStatementChecker::VisitModuleDeclaration(
64 ModuleDeclaration* decl) {
67 void BreakableStatementChecker::VisitImportDeclaration(
68 ImportDeclaration* decl) {
71 void BreakableStatementChecker::VisitExportDeclaration(
72 ExportDeclaration* decl) {
76 void BreakableStatementChecker::VisitModuleLiteral(ModuleLiteral* module) {
79 void BreakableStatementChecker::VisitModuleVariable(ModuleVariable* module) {
82 void BreakableStatementChecker::VisitModulePath(ModulePath* module) {
85 void BreakableStatementChecker::VisitModuleUrl(ModuleUrl* module) {
89 void BreakableStatementChecker::VisitBlock(
Block* stmt) {
93 void BreakableStatementChecker::VisitExpressionStatement(
94 ExpressionStatement* stmt) {
96 Visit(stmt->expression());
100 void BreakableStatementChecker::VisitEmptyStatement(EmptyStatement* stmt) {
104 void BreakableStatementChecker::VisitIfStatement(IfStatement* stmt) {
106 Visit(stmt->condition());
110 void BreakableStatementChecker::VisitContinueStatement(
111 ContinueStatement* stmt) {
115 void BreakableStatementChecker::VisitBreakStatement(BreakStatement* stmt) {
119 void BreakableStatementChecker::VisitReturnStatement(ReturnStatement* stmt) {
121 Visit(stmt->expression());
125 void BreakableStatementChecker::VisitWithStatement(WithStatement* stmt) {
126 Visit(stmt->expression());
130 void BreakableStatementChecker::VisitSwitchStatement(SwitchStatement* stmt) {
136 void BreakableStatementChecker::VisitDoWhileStatement(DoWhileStatement* stmt) {
138 is_breakable_ =
true;
142 void BreakableStatementChecker::VisitWhileStatement(WhileStatement* stmt) {
148 void BreakableStatementChecker::VisitForStatement(ForStatement* stmt) {
150 if (stmt->cond() !=
NULL) {
156 void BreakableStatementChecker::VisitForInStatement(ForInStatement* stmt) {
158 Visit(stmt->enumerable());
162 void BreakableStatementChecker::VisitTryCatchStatement(
163 TryCatchStatement* stmt) {
165 is_breakable_ =
true;
169 void BreakableStatementChecker::VisitTryFinallyStatement(
170 TryFinallyStatement* stmt) {
172 is_breakable_ =
true;
176 void BreakableStatementChecker::VisitDebuggerStatement(
177 DebuggerStatement* stmt) {
179 is_breakable_ =
true;
183 void BreakableStatementChecker::VisitFunctionLiteral(FunctionLiteral* expr) {
187 void BreakableStatementChecker::VisitSharedFunctionInfoLiteral(
188 SharedFunctionInfoLiteral* expr) {
192 void BreakableStatementChecker::VisitConditional(Conditional* expr) {
196 void BreakableStatementChecker::VisitVariableProxy(VariableProxy* expr) {
200 void BreakableStatementChecker::VisitLiteral(Literal* expr) {
204 void BreakableStatementChecker::VisitRegExpLiteral(RegExpLiteral* expr) {
208 void BreakableStatementChecker::VisitObjectLiteral(ObjectLiteral* expr) {
212 void BreakableStatementChecker::VisitArrayLiteral(ArrayLiteral* expr) {
216 void BreakableStatementChecker::VisitAssignment(Assignment* expr) {
219 VariableProxy* proxy = expr->target()->AsVariableProxy();
220 Property* prop = expr->target()->AsProperty();
221 if (prop !=
NULL || (proxy !=
NULL && proxy->var()->IsUnallocated())) {
222 is_breakable_ =
true;
227 Visit(expr->value());
231 void BreakableStatementChecker::VisitThrow(Throw* expr) {
233 Visit(expr->exception());
237 void BreakableStatementChecker::VisitProperty(Property* expr) {
239 is_breakable_ =
true;
243 void BreakableStatementChecker::VisitCall(Call* expr) {
245 is_breakable_ =
true;
249 void BreakableStatementChecker::VisitCallNew(CallNew* expr) {
251 is_breakable_ =
true;
255 void BreakableStatementChecker::VisitCallRuntime(CallRuntime* expr) {
259 void BreakableStatementChecker::VisitUnaryOperation(UnaryOperation* expr) {
260 Visit(expr->expression());
264 void BreakableStatementChecker::VisitCountOperation(CountOperation* expr) {
265 Visit(expr->expression());
269 void BreakableStatementChecker::VisitBinaryOperation(BinaryOperation* expr) {
273 Visit(expr->right());
278 void BreakableStatementChecker::VisitCompareOperation(CompareOperation* expr) {
280 Visit(expr->right());
284 void BreakableStatementChecker::VisitThisFunction(ThisFunction* expr) {
288 #define __ ACCESS_MASM(masm())
293 if (!script->IsUndefined() && !script->source()->IsUndefined()) {
295 isolate->
counters()->total_full_codegen_source_size()->Increment(len);
297 if (FLAG_trace_codegen) {
298 PrintF(
"Full Compiler - ");
301 const int kInitialBufferSize = 4 *
KB;
303 #ifdef ENABLE_GDB_JIT_INTERFACE
309 if (cgen.HasStackOverflow()) {
313 unsigned table_offset = cgen.EmitStackCheckTable();
320 cgen.PopulateDeoptimizationData(code);
321 cgen.PopulateTypeFeedbackInfo(code);
322 cgen.PopulateTypeFeedbackCells(code);
324 code->set_handler_table(*cgen.handler_table());
325 #ifdef ENABLE_DEBUGGER_SUPPORT
326 code->set_has_debug_break_slots(
329 #endif // ENABLE_DEBUGGER_SUPPORT
330 code->set_allow_osr_at_loop_nesting_level(0);
331 code->set_profiler_ticks(0);
332 code->set_stack_check_table_offset(table_offset);
335 #ifdef ENABLE_GDB_JIT_INTERFACE
336 if (FLAG_gdbjit && !code.
is_null()) {
337 GDBJITLineInfo* lineinfo =
340 GDBJIT(RegisterDetailedLineInfo(*code, lineinfo));
347 unsigned FullCodeGenerator::EmitStackCheckTable() {
353 unsigned length = stack_checks_.length();
355 for (
unsigned i = 0; i < length; ++i) {
356 __ dd(stack_checks_[i].
id.ToInt());
357 __ dd(stack_checks_[i].pc_and_state);
363 void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code>
code) {
367 int length = bailout_entries_.length();
368 Handle<DeoptimizationOutputData> data = isolate()->factory()->
369 NewDeoptimizationOutputData(length,
TENURED);
370 for (
int i = 0; i < length; i++) {
371 data->SetAstId(i, bailout_entries_[i].
id);
372 data->SetPcAndState(i,
Smi::FromInt(bailout_entries_[i].pc_and_state));
374 code->set_deoptimization_data(*data);
378 void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
379 Handle<TypeFeedbackInfo> info = isolate()->factory()->NewTypeFeedbackInfo();
380 info->set_ic_total_count(ic_total_count_);
381 ASSERT(!isolate()->heap()->InNewSpace(*info));
382 code->set_type_feedback_info(*info);
392 generate_debug_code_ = FLAG_debug_code &&
400 void FullCodeGenerator::PopulateTypeFeedbackCells(
Handle<Code> code) {
401 if (type_feedback_cells_.is_empty())
return;
402 int length = type_feedback_cells_.length();
405 isolate()->factory()->NewFixedArray(array_size,
TENURED));
406 for (
int i = 0; i < length; i++) {
407 cache->SetAstId(i, type_feedback_cells_[i].ast_id);
408 cache->SetCell(i, *type_feedback_cells_[i].cell);
416 void FullCodeGenerator::PrepareForBailout(Expression* node, State state) {
417 PrepareForBailoutForId(node->id(), state);
421 void FullCodeGenerator::RecordJSReturnSite(Call* call) {
428 PrepareForBailoutForId(call->ReturnId(),
TOS_REG);
432 ASSERT(!call->return_is_recorded_);
433 call->return_is_recorded_ =
true;
438 void FullCodeGenerator::PrepareForBailoutForId(BailoutId
id, State state) {
442 unsigned pc_and_state =
445 BailoutEntry entry = { id, pc_and_state };
449 for (
int i = 0; i < bailout_entries_.length(); i++) {
450 if (bailout_entries_.
at(i).id == entry.id) {
458 bailout_entries_.
Add(entry,
zone());
462 void FullCodeGenerator::RecordTypeFeedbackCell(
463 TypeFeedbackId
id, Handle<JSGlobalPropertyCell> cell) {
464 TypeFeedbackCellEntry entry = { id, cell };
465 type_feedback_cells_.
Add(entry,
zone());
469 void FullCodeGenerator::RecordStackCheck(BailoutId ast_id) {
473 BailoutEntry entry = { ast_id,
static_cast<unsigned>(masm_->
pc_offset()) };
474 stack_checks_.
Add(entry,
zone());
478 bool FullCodeGenerator::ShouldInlineSmiCase(
Token::Value op) {
481 if (op ==
Token::DIV ||op == Token::MOD)
return false;
482 if (FLAG_always_inline_smi_code)
return true;
483 return loop_depth_ > 0;
487 void FullCodeGenerator::EffectContext::Plug(Register reg)
const {
491 void FullCodeGenerator::AccumulatorValueContext::Plug(Register reg)
const {
492 __ Move(result_register(), reg);
496 void FullCodeGenerator::StackValueContext::Plug(Register reg)
const {
501 void FullCodeGenerator::TestContext::Plug(Register reg)
const {
503 __ Move(result_register(), reg);
504 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
505 codegen()->DoTest(
this);
509 void FullCodeGenerator::EffectContext::PlugTOS()
const {
514 void FullCodeGenerator::AccumulatorValueContext::PlugTOS()
const {
515 __ pop(result_register());
519 void FullCodeGenerator::StackValueContext::PlugTOS()
const {
523 void FullCodeGenerator::TestContext::PlugTOS()
const {
525 __ pop(result_register());
526 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
527 codegen()->DoTest(
this);
531 void FullCodeGenerator::EffectContext::PrepareTest(
532 Label* materialize_true,
533 Label* materialize_false,
536 Label** fall_through)
const {
539 *if_true = *if_false = *fall_through = materialize_true;
543 void FullCodeGenerator::AccumulatorValueContext::PrepareTest(
544 Label* materialize_true,
545 Label* materialize_false,
548 Label** fall_through)
const {
549 *if_true = *fall_through = materialize_true;
550 *if_false = materialize_false;
554 void FullCodeGenerator::StackValueContext::PrepareTest(
555 Label* materialize_true,
556 Label* materialize_false,
559 Label** fall_through)
const {
560 *if_true = *fall_through = materialize_true;
561 *if_false = materialize_false;
565 void FullCodeGenerator::TestContext::PrepareTest(
566 Label* materialize_true,
567 Label* materialize_false,
570 Label** fall_through)
const {
571 *if_true = true_label_;
572 *if_false = false_label_;
573 *fall_through = fall_through_;
577 void FullCodeGenerator::DoTest(
const TestContext* context) {
578 DoTest(context->condition(),
579 context->true_label(),
580 context->false_label(),
581 context->fall_through());
585 void FullCodeGenerator::VisitDeclarations(
586 ZoneList<Declaration*>* declarations) {
587 ZoneList<Handle<Object> >* saved_globals = globals_;
588 ZoneList<Handle<Object> > inner_globals(10,
zone());
589 globals_ = &inner_globals;
591 AstVisitor::VisitDeclarations(declarations);
592 if (!globals_->is_empty()) {
595 Handle<FixedArray> array =
596 isolate()->factory()->NewFixedArray(globals_->length(),
TENURED);
597 for (
int i = 0; i < globals_->length(); ++i)
598 array->set(i, *globals_->
at(i));
599 DeclareGlobals(array);
602 globals_ = saved_globals;
606 void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) {
608 Block* block = module->body();
609 Scope* saved_scope = scope();
610 scope_ = block->scope();
611 Interface*
interface = module->
interface();
612 Handle<JSModule> instance =
interface->Instance();
614 Comment cmnt(masm_,
"[ ModuleLiteral");
615 SetStatementPosition(block);
619 __ CallRuntime(Runtime::kPushModuleContext, 1);
623 Comment cmnt(masm_,
"[ Declarations");
627 scope_ = saved_scope;
635 void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) {
641 void FullCodeGenerator::VisitModulePath(ModulePath* module) {
647 void FullCodeGenerator::VisitModuleUrl(ModuleUrl* decl) {
652 int FullCodeGenerator::DeclareGlobalsFlags() {
660 void FullCodeGenerator::SetFunctionPosition(FunctionLiteral* fun) {
665 void FullCodeGenerator::SetReturnPosition(FunctionLiteral* fun) {
670 void FullCodeGenerator::SetStatementPosition(Statement* stmt) {
671 #ifdef ENABLE_DEBUGGER_SUPPORT
672 if (!isolate()->debugger()->IsDebuggerActive()) {
677 BreakableStatementChecker checker;
683 masm_, stmt->statement_pos(), !checker.is_breakable());
686 if (position_recorded) {
687 Debug::GenerateSlot(masm_);
696 void FullCodeGenerator::SetExpressionPosition(Expression* expr,
int pos) {
697 #ifdef ENABLE_DEBUGGER_SUPPORT
698 if (!isolate()->debugger()->IsDebuggerActive()) {
703 BreakableStatementChecker checker;
713 masm_, pos, !checker.is_breakable());
716 if (position_recorded) {
717 Debug::GenerateSlot(masm_);
726 void FullCodeGenerator::SetStatementPosition(
int pos) {
731 void FullCodeGenerator::SetSourcePosition(
int pos) {
732 if (pos != RelocInfo::kNoPosition) {
740 #define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize) \
741 &FullCodeGenerator::Emit##Name,
743 const FullCodeGenerator::InlineFunctionGenerator
744 FullCodeGenerator::kInlineFunctionGenerators[] = {
748 #undef INLINE_FUNCTION_GENERATOR_ADDRESS
751 FullCodeGenerator::InlineFunctionGenerator
755 ASSERT(lookup_index >= 0);
756 ASSERT(static_cast<size_t>(lookup_index) <
758 return kInlineFunctionGenerators[lookup_index];
762 void FullCodeGenerator::EmitInlineRuntimeCall(CallRuntime* expr) {
763 const Runtime::Function*
function = expr->function();
767 FindInlineFunctionGenerator(function->function_id);
772 void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
773 switch (expr->op()) {
775 return VisitComma(expr);
778 return VisitLogicalExpression(expr);
780 return VisitArithmeticExpression(expr);
785 void FullCodeGenerator::VisitInDuplicateContext(Expression* expr) {
786 if (context()->IsEffect()) {
787 VisitForEffect(expr);
788 }
else if (context()->IsAccumulatorValue()) {
789 VisitForAccumulatorValue(expr);
790 }
else if (context()->IsStackValue()) {
791 VisitForStackValue(expr);
792 }
else if (context()->IsTest()) {
794 VisitForControl(expr, test->true_label(), test->false_label(),
795 test->fall_through());
800 void FullCodeGenerator::VisitComma(BinaryOperation* expr) {
801 Comment cmnt(masm_,
"[ Comma");
802 VisitForEffect(expr->left());
803 VisitInDuplicateContext(expr->right());
807 void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) {
808 bool is_logical_and = expr->op() ==
Token::AND;
809 Comment cmnt(masm_, is_logical_and ?
"[ Logical AND" :
"[ Logical OR");
810 Expression* left = expr->left();
811 Expression* right = expr->right();
812 BailoutId right_id = expr->RightId();
815 if (context()->IsTest()) {
818 if (is_logical_and) {
819 VisitForControl(left, &eval_right, test->false_label(), &eval_right);
821 VisitForControl(left, test->true_label(), &eval_right, &eval_right);
824 __ bind(&eval_right);
826 }
else if (context()->IsAccumulatorValue()) {
827 VisitForAccumulatorValue(left);
830 __ push(result_register());
831 Label discard, restore;
832 if (is_logical_and) {
833 DoTest(left, &discard, &restore, &restore);
835 DoTest(left, &restore, &discard, &restore);
838 __ pop(result_register());
844 }
else if (context()->IsStackValue()) {
845 VisitForAccumulatorValue(left);
848 __ push(result_register());
850 if (is_logical_and) {
851 DoTest(left, &discard, &done, &discard);
853 DoTest(left, &done, &discard, &discard);
860 ASSERT(context()->IsEffect());
862 if (is_logical_and) {
863 VisitForControl(left, &eval_right, &done, &eval_right);
865 VisitForControl(left, &done, &eval_right, &eval_right);
868 __ bind(&eval_right);
871 VisitInDuplicateContext(right);
876 void FullCodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
878 Comment cmnt(masm_,
"[ ArithmeticExpression");
879 Expression* left = expr->left();
880 Expression* right = expr->right();
882 left->ResultOverwriteAllowed()
886 VisitForStackValue(left);
887 VisitForAccumulatorValue(right);
889 SetSourcePosition(expr->position());
890 if (ShouldInlineSmiCase(op)) {
891 EmitInlineSmiBinaryOp(expr, op, mode, left, right);
893 EmitBinaryOp(expr, op, mode);
898 void FullCodeGenerator::VisitBlock(
Block* stmt) {
899 Comment cmnt(masm_,
"[ Block");
900 NestedBlock nested_block(
this, stmt);
901 SetStatementPosition(stmt);
903 Scope* saved_scope = scope();
905 if (stmt->scope() !=
NULL) {
906 scope_ = stmt->scope();
912 __ CallRuntime(Runtime::kPushModuleContext, 1);
916 { Comment cmnt(masm_,
"[ Extend block context");
921 PushFunctionArgumentForContextAllocation();
923 FastNewBlockContextStub stub(heap_slots);
926 __ CallRuntime(Runtime::kPushBlockContext, 2);
933 { Comment cmnt(masm_,
"[ Declarations");
939 VisitStatements(stmt->statements());
940 scope_ = saved_scope;
941 __ bind(nested_block.break_label());
945 if (stmt->scope() !=
NULL) {
954 void FullCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
955 Comment cmnt(masm_,
"[ ExpressionStatement");
956 SetStatementPosition(stmt);
957 VisitForEffect(stmt->expression());
961 void FullCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {
962 Comment cmnt(masm_,
"[ EmptyStatement");
963 SetStatementPosition(stmt);
967 void FullCodeGenerator::VisitIfStatement(IfStatement* stmt) {
968 Comment cmnt(masm_,
"[ IfStatement");
969 SetStatementPosition(stmt);
970 Label then_part, else_part, done;
972 if (stmt->HasElseStatement()) {
973 VisitForControl(stmt->condition(), &then_part, &else_part, &then_part);
976 Visit(stmt->then_statement());
981 Visit(stmt->else_statement());
983 VisitForControl(stmt->condition(), &then_part, &done, &then_part);
986 Visit(stmt->then_statement());
995 void FullCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
996 Comment cmnt(masm_,
"[ ContinueStatement");
997 SetStatementPosition(stmt);
1000 int context_length = 0;
1006 while (!current->IsContinueTarget(stmt->target())) {
1007 current = current->Exit(&stack_depth, &context_length);
1009 __ Drop(stack_depth);
1010 if (context_length > 0) {
1011 while (context_length > 0) {
1016 context_register());
1019 __ jmp(current->AsIteration()->continue_label());
1023 void FullCodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
1024 Comment cmnt(masm_,
"[ BreakStatement");
1025 SetStatementPosition(stmt);
1027 int stack_depth = 0;
1028 int context_length = 0;
1034 while (!current->IsBreakTarget(stmt->target())) {
1035 current = current->Exit(&stack_depth, &context_length);
1037 __ Drop(stack_depth);
1038 if (context_length > 0) {
1039 while (context_length > 0) {
1044 context_register());
1047 __ jmp(current->AsBreakable()->break_label());
1051 void FullCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
1052 Comment cmnt(masm_,
"[ ReturnStatement");
1053 SetStatementPosition(stmt);
1054 Expression* expr = stmt->expression();
1055 VisitForAccumulatorValue(expr);
1059 int stack_depth = 0;
1060 int context_length = 0;
1061 while (current !=
NULL) {
1062 current = current->Exit(&stack_depth, &context_length);
1064 __ Drop(stack_depth);
1066 EmitReturnSequence();
1070 void FullCodeGenerator::VisitWithStatement(WithStatement* stmt) {
1071 Comment cmnt(masm_,
"[ WithStatement");
1072 SetStatementPosition(stmt);
1074 VisitForStackValue(stmt->expression());
1075 PushFunctionArgumentForContextAllocation();
1076 __ CallRuntime(Runtime::kPushWithContext, 2);
1079 { WithOrCatch body(
this);
1080 Visit(stmt->statement());
1090 void FullCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
1091 Comment cmnt(masm_,
"[ DoWhileStatement");
1092 SetStatementPosition(stmt);
1093 Label body, stack_check;
1095 Iteration loop_statement(
this, stmt);
1096 increment_loop_depth();
1099 Visit(stmt->body());
1103 __ bind(loop_statement.continue_label());
1104 PrepareForBailoutForId(stmt->ContinueId(),
NO_REGISTERS);
1105 SetExpressionPosition(stmt->cond(), stmt->condition_position());
1106 VisitForControl(stmt->cond(),
1108 loop_statement.break_label(),
1112 PrepareForBailoutForId(stmt->BackEdgeId(),
NO_REGISTERS);
1113 __ bind(&stack_check);
1114 EmitStackCheck(stmt, &body);
1118 __ bind(loop_statement.break_label());
1119 decrement_loop_depth();
1123 void FullCodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
1124 Comment cmnt(masm_,
"[ WhileStatement");
1127 Iteration loop_statement(
this, stmt);
1128 increment_loop_depth();
1135 Visit(stmt->body());
1139 __ bind(loop_statement.continue_label());
1140 SetStatementPosition(stmt);
1143 EmitStackCheck(stmt, &body);
1146 VisitForControl(stmt->cond(),
1148 loop_statement.break_label(),
1149 loop_statement.break_label());
1152 __ bind(loop_statement.break_label());
1153 decrement_loop_depth();
1157 void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
1158 Comment cmnt(masm_,
"[ ForStatement");
1161 Iteration loop_statement(
this, stmt);
1164 SetStatementPosition(stmt);
1166 if (stmt->init() !=
NULL) {
1167 Visit(stmt->init());
1170 increment_loop_depth();
1176 Visit(stmt->body());
1178 PrepareForBailoutForId(stmt->ContinueId(),
NO_REGISTERS);
1179 __ bind(loop_statement.continue_label());
1180 if (stmt->next() !=
NULL) {
1181 Visit(stmt->next());
1186 SetStatementPosition(stmt);
1189 EmitStackCheck(stmt, &body);
1192 if (stmt->cond() !=
NULL) {
1193 VisitForControl(stmt->cond(),
1195 loop_statement.break_label(),
1196 loop_statement.break_label());
1202 __ bind(loop_statement.break_label());
1203 decrement_loop_depth();
1207 void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
1208 Comment cmnt(masm_,
"[ TryCatchStatement");
1209 SetStatementPosition(stmt);
1216 Label try_entry, handler_entry, exit;
1218 __ bind(&handler_entry);
1219 handler_table()->set(stmt->index(),
Smi::FromInt(handler_entry.pos()));
1222 { Comment cmnt(masm_,
"[ Extend catch context");
1223 __ Push(stmt->variable()->name());
1224 __ push(result_register());
1225 PushFunctionArgumentForContextAllocation();
1226 __ CallRuntime(Runtime::kPushCatchContext, 3);
1228 context_register());
1231 Scope* saved_scope = scope();
1232 scope_ = stmt->scope();
1233 ASSERT(scope_->declarations()->is_empty());
1234 { WithOrCatch catch_body(
this);
1235 Visit(stmt->catch_block());
1240 scope_ = saved_scope;
1244 __ bind(&try_entry);
1245 __ PushTryHandler(StackHandler::CATCH, stmt->index());
1246 { TryCatch try_body(
this);
1247 Visit(stmt->try_block());
1254 void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
1255 Comment cmnt(masm_,
"[ TryFinallyStatement");
1256 SetStatementPosition(stmt);
1278 Label try_entry, handler_entry, finally_entry;
1282 __ bind(&handler_entry);
1283 handler_table()->set(stmt->index(),
Smi::FromInt(handler_entry.pos()));
1288 __ Call(&finally_entry);
1289 __ push(result_register());
1290 __ CallRuntime(Runtime::kReThrow, 1);
1293 __ bind(&finally_entry);
1294 EnterFinallyBlock();
1295 { Finally finally_body(
this);
1296 Visit(stmt->finally_block());
1301 __ bind(&try_entry);
1302 __ PushTryHandler(StackHandler::FINALLY, stmt->index());
1303 { TryFinally try_body(
this, &finally_entry);
1304 Visit(stmt->try_block());
1312 __ Call(&finally_entry);
1316 void FullCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
1317 #ifdef ENABLE_DEBUGGER_SUPPORT
1318 Comment cmnt(masm_,
"[ DebuggerStatement");
1319 SetStatementPosition(stmt);
1327 void FullCodeGenerator::VisitConditional(Conditional* expr) {
1328 Comment cmnt(masm_,
"[ Conditional");
1329 Label true_case, false_case, done;
1330 VisitForControl(expr->condition(), &true_case, &false_case, &true_case);
1333 __ bind(&true_case);
1334 SetExpressionPosition(expr->then_expression(),
1335 expr->then_expression_position());
1336 if (context()->IsTest()) {
1338 VisitForControl(expr->then_expression(),
1339 for_test->true_label(),
1340 for_test->false_label(),
1343 VisitInDuplicateContext(expr->then_expression());
1348 __ bind(&false_case);
1349 SetExpressionPosition(expr->else_expression(),
1350 expr->else_expression_position());
1351 VisitInDuplicateContext(expr->else_expression());
1353 if (!context()->IsTest()) {
1359 void FullCodeGenerator::VisitLiteral(Literal* expr) {
1360 Comment cmnt(masm_,
"[ Literal");
1361 context()->Plug(expr->handle());
1365 void FullCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
1366 Comment cmnt(masm_,
"[ FunctionLiteral");
1369 Handle<SharedFunctionInfo> function_info =
1371 if (function_info.is_null()) {
1375 EmitNewClosure(function_info, expr->pretenure());
1379 void FullCodeGenerator::VisitSharedFunctionInfoLiteral(
1380 SharedFunctionInfoLiteral* expr) {
1381 Comment cmnt(masm_,
"[ SharedFunctionInfoLiteral");
1382 EmitNewClosure(expr->shared_function_info(),
false);
1386 void FullCodeGenerator::VisitThrow(Throw* expr) {
1387 Comment cmnt(masm_,
"[ Throw");
1388 VisitForStackValue(expr->exception());
1389 __ CallRuntime(Runtime::kThrow, 1);
1396 int* context_length) {
1398 __ Drop(*stack_depth);
1405 bool FullCodeGenerator::TryLiteralCompare(CompareOperation* expr) {
1406 Expression* sub_expr;
1407 Handle<String>
check;
1408 if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
1409 EmitLiteralCompareTypeof(expr, sub_expr, check);
1413 if (expr->IsLiteralCompareUndefined(&sub_expr)) {
1418 if (expr->IsLiteralCompareNull(&sub_expr)) {
1419 EmitLiteralCompareNil(expr, sub_expr,
kNullValue);
bool FLAG_enable_slow_asserts
#define INLINE_FUNCTION_LIST(F)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including on console Map counters to a file Enable debugger compile events enable GDBJIT interface(disables compacting GC)") DEFINE_bool(gdbjit_full
void PrintF(const char *format,...)
bool HasDeoptimizationSupport() const
static TypeFeedbackInfo * cast(Object *obj)
static String * cast(Object *obj)
Handle< ScopeInfo > GetScopeInfo()
void SetCode(Handle< Code > code)
static uint32_t encode(Statevalue)
static Smi * FromInt(int value)
static bool MakeCode(CompilationInfo *info)
Handle< Script > script() const
static Handle< T > cast(Handle< S > that)
#define ASSERT(condition)
void set_predictable_code_size(bool value)
bool IsOptimizable() const
static void PrintCode(Handle< Code > code, CompilationInfo *info)
bool AllowsLazyCompilation() const
static bool IsValid(intptr_t value)
FunctionLiteral * function() const
static TestContext * cast(AstContext *context)
void set_emit_debug_code(bool value)
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
bool has_pending_exception()
friend class NestedStatement
Interface * interface() const
static bool HaveASnapshotToStartFrom()
#define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize)
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
static const int kContextOffset
Handle< JSModule > Instance()
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
AstProperties::Flags * flags()
static const int kMaximumSlots
static int LengthOfFixedArray(int cell_count)
PositionsRecorder * positions_recorder()
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
void Check(Statement *stmt)
static bool is_valid(LanguageModevalue)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
#define INLINE_RUNTIME_FUNCTION_LIST(F)
static Handle< Code > MakeCodeEpilogue(MacroAssembler *masm, Code::Flags flags, CompilationInfo *info)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
bool is_module_scope() const
void check(i::Vector< const char > string)
static void MakeCodePrologue(CompilationInfo *info)
ZoneList< Declaration * > * declarations()