55 void BreakableStatementChecker::VisitVariableDeclaration(
56 VariableDeclaration* decl) {
59 void BreakableStatementChecker::VisitFunctionDeclaration(
60 FunctionDeclaration* decl) {
63 void BreakableStatementChecker::VisitModuleDeclaration(
64 ModuleDeclaration* decl) {
67 void BreakableStatementChecker::VisitImportDeclaration(
68 ImportDeclaration* decl) {
71 void BreakableStatementChecker::VisitExportDeclaration(
72 ExportDeclaration* decl) {
76 void BreakableStatementChecker::VisitModuleLiteral(ModuleLiteral* module) {
80 void BreakableStatementChecker::VisitModuleVariable(ModuleVariable* module) {
84 void BreakableStatementChecker::VisitModulePath(ModulePath* module) {
88 void BreakableStatementChecker::VisitModuleUrl(ModuleUrl* module) {
92 void BreakableStatementChecker::VisitModuleStatement(ModuleStatement* stmt) {
96 void BreakableStatementChecker::VisitBlock(
Block* stmt) {
100 void BreakableStatementChecker::VisitExpressionStatement(
101 ExpressionStatement* stmt) {
103 Visit(stmt->expression());
107 void BreakableStatementChecker::VisitEmptyStatement(EmptyStatement* stmt) {
111 void BreakableStatementChecker::VisitIfStatement(IfStatement* stmt) {
113 Visit(stmt->condition());
117 void BreakableStatementChecker::VisitContinueStatement(
118 ContinueStatement* stmt) {
122 void BreakableStatementChecker::VisitBreakStatement(BreakStatement* stmt) {
126 void BreakableStatementChecker::VisitReturnStatement(ReturnStatement* stmt) {
128 Visit(stmt->expression());
132 void BreakableStatementChecker::VisitWithStatement(WithStatement* stmt) {
133 Visit(stmt->expression());
137 void BreakableStatementChecker::VisitSwitchStatement(SwitchStatement* stmt) {
143 void BreakableStatementChecker::VisitDoWhileStatement(DoWhileStatement* stmt) {
145 is_breakable_ =
true;
149 void BreakableStatementChecker::VisitWhileStatement(WhileStatement* stmt) {
155 void BreakableStatementChecker::VisitForStatement(ForStatement* stmt) {
157 if (stmt->cond() !=
NULL) {
163 void BreakableStatementChecker::VisitForInStatement(ForInStatement* stmt) {
165 Visit(stmt->enumerable());
169 void BreakableStatementChecker::VisitForOfStatement(ForOfStatement* stmt) {
171 is_breakable_ =
true;
175 void BreakableStatementChecker::VisitTryCatchStatement(
176 TryCatchStatement* stmt) {
178 is_breakable_ =
true;
182 void BreakableStatementChecker::VisitTryFinallyStatement(
183 TryFinallyStatement* stmt) {
185 is_breakable_ =
true;
189 void BreakableStatementChecker::VisitDebuggerStatement(
190 DebuggerStatement* stmt) {
192 is_breakable_ =
true;
196 void BreakableStatementChecker::VisitCaseClause(CaseClause* clause) {
200 void BreakableStatementChecker::VisitFunctionLiteral(FunctionLiteral* expr) {
204 void BreakableStatementChecker::VisitNativeFunctionLiteral(
205 NativeFunctionLiteral* expr) {
209 void BreakableStatementChecker::VisitConditional(Conditional* expr) {
213 void BreakableStatementChecker::VisitVariableProxy(VariableProxy* expr) {
217 void BreakableStatementChecker::VisitLiteral(Literal* expr) {
221 void BreakableStatementChecker::VisitRegExpLiteral(RegExpLiteral* expr) {
225 void BreakableStatementChecker::VisitObjectLiteral(ObjectLiteral* expr) {
229 void BreakableStatementChecker::VisitArrayLiteral(ArrayLiteral* expr) {
233 void BreakableStatementChecker::VisitAssignment(Assignment* expr) {
236 VariableProxy* proxy = expr->target()->AsVariableProxy();
237 Property* prop = expr->target()->AsProperty();
238 if (prop !=
NULL || (proxy !=
NULL && proxy->var()->IsUnallocated())) {
239 is_breakable_ =
true;
244 Visit(expr->value());
248 void BreakableStatementChecker::VisitYield(Yield* expr) {
250 Visit(expr->expression());
254 void BreakableStatementChecker::VisitThrow(Throw* expr) {
256 Visit(expr->exception());
260 void BreakableStatementChecker::VisitProperty(Property* expr) {
262 is_breakable_ =
true;
266 void BreakableStatementChecker::VisitCall(Call* expr) {
268 is_breakable_ =
true;
272 void BreakableStatementChecker::VisitCallNew(CallNew* expr) {
274 is_breakable_ =
true;
278 void BreakableStatementChecker::VisitCallRuntime(CallRuntime* expr) {
282 void BreakableStatementChecker::VisitUnaryOperation(UnaryOperation* expr) {
283 Visit(expr->expression());
287 void BreakableStatementChecker::VisitCountOperation(CountOperation* expr) {
288 Visit(expr->expression());
292 void BreakableStatementChecker::VisitBinaryOperation(BinaryOperation* expr) {
296 Visit(expr->right());
301 void BreakableStatementChecker::VisitCompareOperation(CompareOperation* expr) {
303 Visit(expr->right());
307 void BreakableStatementChecker::VisitThisFunction(ThisFunction* expr) {
311 #define __ ACCESS_MASM(masm())
320 if (!script->IsUndefined() && !script->source()->IsUndefined()) {
322 isolate->
counters()->total_full_codegen_source_size()->Increment(len);
325 const int kInitialBufferSize = 4 *
KB;
327 #ifdef ENABLE_GDB_JIT_INTERFACE
335 if (cgen.HasStackOverflow()) {
339 unsigned table_offset = cgen.EmitBackEdgeTable();
344 !info->
function()->dont_optimize() &&
345 info->
function()->scope()->AllowsLazyCompilation());
346 cgen.PopulateDeoptimizationData(code);
347 cgen.PopulateTypeFeedbackInfo(code);
349 code->set_handler_table(*cgen.handler_table());
350 #ifdef ENABLE_DEBUGGER_SUPPORT
352 #endif // ENABLE_DEBUGGER_SUPPORT
353 code->set_allow_osr_at_loop_nesting_level(0);
354 code->set_profiler_ticks(0);
355 code->set_back_edge_table_offset(table_offset);
356 code->set_back_edges_patched_for_osr(
false);
359 #ifdef ENABLE_GDB_JIT_INTERFACE
361 GDBJITLineInfo* lineinfo =
363 GDBJIT(RegisterDetailedLineInfo(*code, lineinfo));
367 LOG_CODE_EVENT(isolate, CodeEndLinePosInfoRecordEvent(*code, line_info));
372 unsigned FullCodeGenerator::EmitBackEdgeTable() {
378 unsigned length = back_edges_.length();
380 for (
unsigned i = 0; i < length; ++i) {
381 __ dd(back_edges_[i].
id.ToInt());
382 __ dd(back_edges_[i].
pc);
383 __ dd(back_edges_[i].loop_depth);
389 void FullCodeGenerator::InitializeFeedbackVector() {
390 int length = info_->
function()->slot_count();
391 feedback_vector_ = isolate()->factory()->NewFixedArray(length,
TENURED);
394 ASSERT_EQ(isolate()->heap()->uninitialized_symbol(), *sentinel);
395 for (
int i = 0; i < length; i++) {
401 void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code>
code) {
405 int length = bailout_entries_.length();
406 Handle<DeoptimizationOutputData> data = isolate()->factory()->
407 NewDeoptimizationOutputData(length,
TENURED);
408 for (
int i = 0; i < length; i++) {
409 data->SetAstId(i, bailout_entries_[i].
id);
410 data->SetPcAndState(i,
Smi::FromInt(bailout_entries_[i].pc_and_state));
412 code->set_deoptimization_data(*data);
416 void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
417 Handle<TypeFeedbackInfo>
info = isolate()->factory()->NewTypeFeedbackInfo();
418 info->set_ic_total_count(ic_total_count_);
419 info->set_feedback_vector(*FeedbackVector());
420 ASSERT(!isolate()->heap()->InNewSpace(*info));
421 code->set_type_feedback_info(*info);
431 generate_debug_code_ = FLAG_debug_code &&
436 InitializeAstVisitor(info_->
zone());
440 void FullCodeGenerator::PrepareForBailout(
Expression* node, State state) {
441 PrepareForBailoutForId(node->
id(), state);
445 void FullCodeGenerator::CallLoadIC(
ContextualMode contextual_mode,
453 void FullCodeGenerator::CallStoreIC(TypeFeedbackId
id) {
459 void FullCodeGenerator::RecordJSReturnSite(Call* call) {
466 PrepareForBailoutForId(call->ReturnId(),
TOS_REG);
470 ASSERT(!call->return_is_recorded_);
471 call->return_is_recorded_ =
true;
476 void FullCodeGenerator::PrepareForBailoutForId(BailoutId
id, State state) {
480 unsigned pc_and_state =
483 BailoutEntry entry = { id, pc_and_state };
484 ASSERT(!prepared_bailout_ids_.Contains(
id.ToInt()));
485 prepared_bailout_ids_.Add(
id.ToInt(), zone());
486 bailout_entries_.
Add(entry, zone());
490 void FullCodeGenerator::RecordBackEdge(BailoutId ast_id) {
495 BackEdgeEntry entry =
496 { ast_id,
static_cast<unsigned>(masm_->
pc_offset()), depth };
497 back_edges_.
Add(entry, zone());
501 bool FullCodeGenerator::ShouldInlineSmiCase(
Token::Value op) {
504 if (op ==
Token::DIV ||op == Token::MOD)
return false;
505 if (FLAG_always_inline_smi_code)
return true;
506 return loop_depth_ > 0;
510 void FullCodeGenerator::EffectContext::Plug(Register reg)
const {
514 void FullCodeGenerator::AccumulatorValueContext::Plug(Register reg)
const {
515 __ Move(result_register(), reg);
519 void FullCodeGenerator::StackValueContext::Plug(Register reg)
const {
524 void FullCodeGenerator::TestContext::Plug(Register reg)
const {
526 __ Move(result_register(), reg);
527 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
528 codegen()->DoTest(
this);
532 void FullCodeGenerator::EffectContext::PlugTOS()
const {
537 void FullCodeGenerator::AccumulatorValueContext::PlugTOS()
const {
538 __ Pop(result_register());
542 void FullCodeGenerator::StackValueContext::PlugTOS()
const {
546 void FullCodeGenerator::TestContext::PlugTOS()
const {
548 __ Pop(result_register());
549 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
550 codegen()->DoTest(
this);
554 void FullCodeGenerator::EffectContext::PrepareTest(
555 Label* materialize_true,
556 Label* materialize_false,
559 Label** fall_through)
const {
562 *if_true = *if_false = *fall_through = materialize_true;
566 void FullCodeGenerator::AccumulatorValueContext::PrepareTest(
567 Label* materialize_true,
568 Label* materialize_false,
571 Label** fall_through)
const {
572 *if_true = *fall_through = materialize_true;
573 *if_false = materialize_false;
577 void FullCodeGenerator::StackValueContext::PrepareTest(
578 Label* materialize_true,
579 Label* materialize_false,
582 Label** fall_through)
const {
583 *if_true = *fall_through = materialize_true;
584 *if_false = materialize_false;
588 void FullCodeGenerator::TestContext::PrepareTest(
589 Label* materialize_true,
590 Label* materialize_false,
593 Label** fall_through)
const {
594 *if_true = true_label_;
595 *if_false = false_label_;
596 *fall_through = fall_through_;
600 void FullCodeGenerator::DoTest(
const TestContext* context) {
601 DoTest(context->condition(),
602 context->true_label(),
603 context->false_label(),
604 context->fall_through());
608 void FullCodeGenerator::AllocateModules(ZoneList<Declaration*>* declarations) {
611 for (
int i = 0; i < declarations->length(); i++) {
612 ModuleDeclaration* declaration = declarations->at(i)->AsModuleDeclaration();
613 if (declaration !=
NULL) {
614 ModuleLiteral* module = declaration->module()->AsModuleLiteral();
615 if (module !=
NULL) {
616 Comment cmnt(masm_,
"[ Link nested modules");
617 Scope* scope = module->body()->scope();
618 Interface*
interface = scope->
interface();
621 interface->Allocate(scope->module_var()->index());
624 ASSERT(scope->interface()->Index() >= 0);
626 __ Push(scope->GetScopeInfo());
627 __ CallRuntime(Runtime::kHiddenPushModuleContext, 2);
631 AllocateModules(scope->declarations());
704 void FullCodeGenerator::VisitDeclarations(
705 ZoneList<Declaration*>* declarations) {
706 Handle<FixedArray> saved_modules = modules_;
707 int saved_module_index = module_index_;
708 ZoneList<Handle<Object> >* saved_globals = globals_;
709 ZoneList<Handle<Object> > inner_globals(10, zone());
710 globals_ = &inner_globals;
715 Comment cmnt(masm_,
"[ Allocate modules");
723 AllocateModules(declarations);
726 AstVisitor::VisitDeclarations(declarations);
730 ASSERT(module_index_ == modules_->length());
731 DeclareModules(modules_);
732 modules_ = saved_modules;
733 module_index_ = saved_module_index;
736 if (!globals_->is_empty()) {
739 Handle<FixedArray> array =
740 isolate()->factory()->NewFixedArray(globals_->length(),
TENURED);
741 for (
int i = 0; i < globals_->length(); ++i)
742 array->set(i, *globals_->at(i));
743 DeclareGlobals(array);
746 globals_ = saved_globals;
750 void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) {
751 Block* block = module->body();
752 Scope* saved_scope = scope();
753 scope_ = block->scope();
754 Interface*
interface = scope_->
interface();
756 Comment cmnt(masm_,
"[ ModuleLiteral");
757 SetStatementPosition(block);
759 ASSERT(!modules_.is_null());
760 ASSERT(module_index_ < modules_->length());
761 int index = module_index_++;
767 __ CallRuntime(Runtime::kHiddenPushModuleContext, 2);
771 Comment cmnt(masm_,
"[ Declarations");
776 Handle<ModuleInfo> description =
778 modules_->set(index, *description);
780 scope_ = saved_scope;
788 void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) {
794 void FullCodeGenerator::VisitModulePath(ModulePath* module) {
800 void FullCodeGenerator::VisitModuleUrl(ModuleUrl* module) {
802 Scope* scope = module->body()->scope();
803 Interface*
interface = scope_->
interface();
806 ASSERT(!modules_.is_null());
807 ASSERT(module_index_ < modules_->length());
808 interface->Allocate(scope->module_var()->index());
809 int index = module_index_++;
811 Handle<ModuleInfo> description =
813 modules_->set(index, *description);
817 int FullCodeGenerator::DeclareGlobalsFlags() {
825 void FullCodeGenerator::SetFunctionPosition(FunctionLiteral* fun) {
830 void FullCodeGenerator::SetReturnPosition(FunctionLiteral* fun) {
835 void FullCodeGenerator::SetStatementPosition(Statement* stmt) {
836 #ifdef ENABLE_DEBUGGER_SUPPORT
837 if (!isolate()->debugger()->IsDebuggerActive()) {
842 BreakableStatementChecker checker(zone());
848 masm_, stmt->position(), !checker.is_breakable());
851 if (position_recorded) {
852 Debug::GenerateSlot(masm_);
861 void FullCodeGenerator::SetExpressionPosition(Expression* expr) {
862 #ifdef ENABLE_DEBUGGER_SUPPORT
863 if (!isolate()->debugger()->IsDebuggerActive()) {
868 BreakableStatementChecker checker(zone());
878 masm_, expr->position(), !checker.is_breakable());
881 if (position_recorded) {
882 Debug::GenerateSlot(masm_);
891 void FullCodeGenerator::SetStatementPosition(
int pos) {
896 void FullCodeGenerator::SetSourcePosition(
int pos) {
897 if (pos != RelocInfo::kNoPosition) {
905 #define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize) \
906 &FullCodeGenerator::Emit##Name,
908 const FullCodeGenerator::InlineFunctionGenerator
909 FullCodeGenerator::kInlineFunctionGenerators[] = {
912 #undef INLINE_FUNCTION_GENERATOR_ADDRESS
915 FullCodeGenerator::InlineFunctionGenerator
919 ASSERT(lookup_index >= 0);
920 ASSERT(static_cast<size_t>(lookup_index) <
922 return kInlineFunctionGenerators[lookup_index];
926 void FullCodeGenerator::EmitInlineRuntimeCall(CallRuntime* expr) {
927 const Runtime::Function*
function = expr->function();
930 InlineFunctionGenerator generator =
931 FindInlineFunctionGenerator(function->function_id);
932 ((*this).*(generator))(expr);
936 void FullCodeGenerator::EmitGeneratorNext(CallRuntime* expr) {
937 ZoneList<Expression*>* args = expr->arguments();
938 ASSERT(args->length() == 2);
943 void FullCodeGenerator::EmitGeneratorThrow(CallRuntime* expr) {
944 ZoneList<Expression*>* args = expr->arguments();
945 ASSERT(args->length() == 2);
950 void FullCodeGenerator::EmitDebugBreakInOptimizedCode(CallRuntime* expr) {
955 void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
956 switch (expr->op()) {
958 return VisitComma(expr);
961 return VisitLogicalExpression(expr);
963 return VisitArithmeticExpression(expr);
968 void FullCodeGenerator::VisitInDuplicateContext(Expression* expr) {
969 if (context()->IsEffect()) {
970 VisitForEffect(expr);
971 }
else if (context()->IsAccumulatorValue()) {
972 VisitForAccumulatorValue(expr);
973 }
else if (context()->IsStackValue()) {
974 VisitForStackValue(expr);
975 }
else if (context()->IsTest()) {
976 const TestContext* test = TestContext::cast(context());
977 VisitForControl(expr, test->true_label(), test->false_label(),
978 test->fall_through());
983 void FullCodeGenerator::VisitComma(BinaryOperation* expr) {
984 Comment cmnt(masm_,
"[ Comma");
985 VisitForEffect(expr->left());
986 VisitInDuplicateContext(expr->right());
990 void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) {
991 bool is_logical_and = expr->op() ==
Token::AND;
992 Comment cmnt(masm_, is_logical_and ?
"[ Logical AND" :
"[ Logical OR");
993 Expression* left = expr->left();
994 Expression* right = expr->right();
995 BailoutId right_id = expr->RightId();
998 if (context()->IsTest()) {
1000 const TestContext* test = TestContext::cast(context());
1001 if (is_logical_and) {
1002 VisitForControl(left, &eval_right, test->false_label(), &eval_right);
1004 VisitForControl(left, test->true_label(), &eval_right, &eval_right);
1007 __ bind(&eval_right);
1009 }
else if (context()->IsAccumulatorValue()) {
1010 VisitForAccumulatorValue(left);
1013 __ Push(result_register());
1014 Label discard, restore;
1015 if (is_logical_and) {
1016 DoTest(left, &discard, &restore, &restore);
1018 DoTest(left, &restore, &discard, &restore);
1021 __ Pop(result_register());
1027 }
else if (context()->IsStackValue()) {
1028 VisitForAccumulatorValue(left);
1031 __ Push(result_register());
1033 if (is_logical_and) {
1034 DoTest(left, &discard, &done, &discard);
1036 DoTest(left, &done, &discard, &discard);
1043 ASSERT(context()->IsEffect());
1045 if (is_logical_and) {
1046 VisitForControl(left, &eval_right, &done, &eval_right);
1048 VisitForControl(left, &done, &eval_right, &eval_right);
1051 __ bind(&eval_right);
1054 VisitInDuplicateContext(right);
1059 void FullCodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
1061 Comment cmnt(masm_,
"[ ArithmeticExpression");
1062 Expression* left = expr->left();
1063 Expression* right = expr->right();
1065 left->ResultOverwriteAllowed()
1069 VisitForStackValue(left);
1070 VisitForAccumulatorValue(right);
1072 SetSourcePosition(expr->position());
1073 if (ShouldInlineSmiCase(op)) {
1074 EmitInlineSmiBinaryOp(expr, op, mode, left, right);
1076 EmitBinaryOp(expr, op, mode);
1081 void FullCodeGenerator::VisitBlock(
Block* stmt) {
1082 Comment cmnt(masm_,
"[ Block");
1083 NestedBlock nested_block(
this, stmt);
1084 SetStatementPosition(stmt);
1086 Scope* saved_scope = scope();
1088 if (stmt->scope() !=
NULL) {
1089 scope_ = stmt->scope();
1091 { Comment cmnt(masm_,
"[ Extend block context");
1093 PushFunctionArgumentForContextAllocation();
1094 __ CallRuntime(Runtime::kHiddenPushBlockContext, 2);
1098 context_register());
1100 { Comment cmnt(masm_,
"[ Declarations");
1106 VisitStatements(stmt->statements());
1107 scope_ = saved_scope;
1108 __ bind(nested_block.break_label());
1112 if (stmt->scope() !=
NULL) {
1116 context_register());
1121 void FullCodeGenerator::VisitModuleStatement(ModuleStatement* stmt) {
1122 Comment cmnt(masm_,
"[ Module context");
1126 __ CallRuntime(Runtime::kHiddenPushModuleContext, 2);
1130 Scope* saved_scope = scope_;
1131 scope_ = stmt->body()->scope();
1132 VisitStatements(stmt->body()->statements());
1133 scope_ = saved_scope;
1137 context_register());
1141 void FullCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
1142 Comment cmnt(masm_,
"[ ExpressionStatement");
1143 SetStatementPosition(stmt);
1144 VisitForEffect(stmt->expression());
1148 void FullCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {
1149 Comment cmnt(masm_,
"[ EmptyStatement");
1150 SetStatementPosition(stmt);
1154 void FullCodeGenerator::VisitIfStatement(IfStatement* stmt) {
1155 Comment cmnt(masm_,
"[ IfStatement");
1156 SetStatementPosition(stmt);
1157 Label then_part, else_part, done;
1159 if (stmt->HasElseStatement()) {
1160 VisitForControl(stmt->condition(), &then_part, &else_part, &then_part);
1162 __ bind(&then_part);
1163 Visit(stmt->then_statement());
1167 __ bind(&else_part);
1168 Visit(stmt->else_statement());
1170 VisitForControl(stmt->condition(), &then_part, &done, &then_part);
1172 __ bind(&then_part);
1173 Visit(stmt->then_statement());
1182 void FullCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
1183 Comment cmnt(masm_,
"[ ContinueStatement");
1184 SetStatementPosition(stmt);
1186 int stack_depth = 0;
1187 int context_length = 0;
1193 while (!current->IsContinueTarget(stmt->target())) {
1194 current = current->Exit(&stack_depth, &context_length);
1196 __ Drop(stack_depth);
1197 if (context_length > 0) {
1198 while (context_length > 0) {
1203 context_register());
1206 __ jmp(current->AsIteration()->continue_label());
1210 void FullCodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
1211 Comment cmnt(masm_,
"[ BreakStatement");
1212 SetStatementPosition(stmt);
1214 int stack_depth = 0;
1215 int context_length = 0;
1221 while (!current->IsBreakTarget(stmt->target())) {
1222 current = current->Exit(&stack_depth, &context_length);
1224 __ Drop(stack_depth);
1225 if (context_length > 0) {
1226 while (context_length > 0) {
1231 context_register());
1234 __ jmp(current->AsBreakable()->break_label());
1238 void FullCodeGenerator::EmitUnwindBeforeReturn() {
1240 int stack_depth = 0;
1241 int context_length = 0;
1242 while (current !=
NULL) {
1243 current = current->Exit(&stack_depth, &context_length);
1245 __ Drop(stack_depth);
1249 void FullCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
1250 Comment cmnt(masm_,
"[ ReturnStatement");
1251 SetStatementPosition(stmt);
1252 Expression* expr = stmt->expression();
1253 VisitForAccumulatorValue(expr);
1254 EmitUnwindBeforeReturn();
1255 EmitReturnSequence();
1259 void FullCodeGenerator::VisitWithStatement(WithStatement* stmt) {
1260 Comment cmnt(masm_,
"[ WithStatement");
1261 SetStatementPosition(stmt);
1263 VisitForStackValue(stmt->expression());
1264 PushFunctionArgumentForContextAllocation();
1265 __ CallRuntime(Runtime::kHiddenPushWithContext, 2);
1268 Scope* saved_scope = scope();
1269 scope_ = stmt->scope();
1270 { WithOrCatch body(
this);
1271 Visit(stmt->statement());
1273 scope_ = saved_scope;
1282 void FullCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
1283 Comment cmnt(masm_,
"[ DoWhileStatement");
1284 SetStatementPosition(stmt);
1285 Label body, book_keeping;
1287 Iteration loop_statement(
this, stmt);
1288 increment_loop_depth();
1291 Visit(stmt->body());
1295 __ bind(loop_statement.continue_label());
1296 PrepareForBailoutForId(stmt->ContinueId(),
NO_REGISTERS);
1297 SetExpressionPosition(stmt->cond());
1298 VisitForControl(stmt->cond(),
1300 loop_statement.break_label(),
1304 PrepareForBailoutForId(stmt->BackEdgeId(),
NO_REGISTERS);
1305 __ bind(&book_keeping);
1306 EmitBackEdgeBookkeeping(stmt, &body);
1310 __ bind(loop_statement.break_label());
1311 decrement_loop_depth();
1315 void FullCodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
1316 Comment cmnt(masm_,
"[ WhileStatement");
1319 Iteration loop_statement(
this, stmt);
1320 increment_loop_depth();
1327 Visit(stmt->body());
1331 __ bind(loop_statement.continue_label());
1332 SetStatementPosition(stmt);
1335 EmitBackEdgeBookkeeping(stmt, &body);
1338 VisitForControl(stmt->cond(),
1340 loop_statement.break_label(),
1341 loop_statement.break_label());
1344 __ bind(loop_statement.break_label());
1345 decrement_loop_depth();
1349 void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
1350 Comment cmnt(masm_,
"[ ForStatement");
1353 Iteration loop_statement(
this, stmt);
1356 SetStatementPosition(stmt);
1358 if (stmt->init() !=
NULL) {
1359 Visit(stmt->init());
1362 increment_loop_depth();
1368 Visit(stmt->body());
1370 PrepareForBailoutForId(stmt->ContinueId(),
NO_REGISTERS);
1371 __ bind(loop_statement.continue_label());
1372 if (stmt->next() !=
NULL) {
1373 Visit(stmt->next());
1378 SetStatementPosition(stmt);
1381 EmitBackEdgeBookkeeping(stmt, &body);
1384 if (stmt->cond() !=
NULL) {
1385 VisitForControl(stmt->cond(),
1387 loop_statement.break_label(),
1388 loop_statement.break_label());
1394 __ bind(loop_statement.break_label());
1395 decrement_loop_depth();
1399 void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
1400 Comment cmnt(masm_,
"[ TryCatchStatement");
1401 SetStatementPosition(stmt);
1408 Label try_entry, handler_entry, exit;
1410 __ bind(&handler_entry);
1411 handler_table()->set(stmt->index(),
Smi::FromInt(handler_entry.pos()));
1414 { Comment cmnt(masm_,
"[ Extend catch context");
1415 __ Push(stmt->variable()->name());
1416 __ Push(result_register());
1417 PushFunctionArgumentForContextAllocation();
1418 __ CallRuntime(Runtime::kHiddenPushCatchContext, 3);
1420 context_register());
1423 Scope* saved_scope = scope();
1424 scope_ = stmt->scope();
1425 ASSERT(scope_->declarations()->is_empty());
1426 { WithOrCatch catch_body(
this);
1427 Visit(stmt->catch_block());
1432 scope_ = saved_scope;
1436 __ bind(&try_entry);
1437 __ PushTryHandler(StackHandler::CATCH, stmt->index());
1438 { TryCatch try_body(
this);
1439 Visit(stmt->try_block());
1446 void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
1447 Comment cmnt(masm_,
"[ TryFinallyStatement");
1448 SetStatementPosition(stmt);
1470 Label try_entry, handler_entry, finally_entry;
1474 __ bind(&handler_entry);
1475 handler_table()->set(stmt->index(),
Smi::FromInt(handler_entry.pos()));
1480 __ Call(&finally_entry);
1481 __ Push(result_register());
1482 __ CallRuntime(Runtime::kHiddenReThrow, 1);
1485 __ bind(&finally_entry);
1486 EnterFinallyBlock();
1487 { Finally finally_body(
this);
1488 Visit(stmt->finally_block());
1493 __ bind(&try_entry);
1494 __ PushTryHandler(StackHandler::FINALLY, stmt->index());
1495 { TryFinally try_body(
this, &finally_entry);
1496 Visit(stmt->try_block());
1504 __ Call(&finally_entry);
1508 void FullCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
1509 #ifdef ENABLE_DEBUGGER_SUPPORT
1510 Comment cmnt(masm_,
"[ DebuggerStatement");
1511 SetStatementPosition(stmt);
1519 void FullCodeGenerator::VisitCaseClause(CaseClause* clause) {
1524 void FullCodeGenerator::VisitConditional(Conditional* expr) {
1525 Comment cmnt(masm_,
"[ Conditional");
1526 Label true_case, false_case, done;
1527 VisitForControl(expr->condition(), &true_case, &false_case, &true_case);
1530 __ bind(&true_case);
1531 SetExpressionPosition(expr->then_expression());
1532 if (context()->IsTest()) {
1533 const TestContext* for_test = TestContext::cast(context());
1534 VisitForControl(expr->then_expression(),
1535 for_test->true_label(),
1536 for_test->false_label(),
1539 VisitInDuplicateContext(expr->then_expression());
1544 __ bind(&false_case);
1545 SetExpressionPosition(expr->else_expression());
1546 VisitInDuplicateContext(expr->else_expression());
1548 if (!context()->IsTest()) {
1554 void FullCodeGenerator::VisitLiteral(Literal* expr) {
1555 Comment cmnt(masm_,
"[ Literal");
1556 context()->Plug(expr->value());
1560 void FullCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
1561 Comment cmnt(masm_,
"[ FunctionLiteral");
1564 Handle<SharedFunctionInfo> function_info =
1566 if (function_info.is_null()) {
1570 EmitNewClosure(function_info, expr->pretenure());
1574 void FullCodeGenerator::VisitNativeFunctionLiteral(
1575 NativeFunctionLiteral* expr) {
1576 Comment cmnt(masm_,
"[ NativeFunctionLiteral");
1579 Handle<String>
name = expr->name();
1581 expr->extension()->GetNativeFunctionTemplate(
1587 const int literals = fun->NumberOfLiterals();
1588 Handle<Code> code = Handle<Code>(fun->shared()->code());
1589 Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub());
1590 bool is_generator =
false;
1591 Handle<SharedFunctionInfo> shared =
1592 isolate()->factory()->NewSharedFunctionInfo(name, literals, is_generator,
1593 code, Handle<ScopeInfo>(fun->shared()->scope_info()));
1594 shared->set_construct_stub(*construct_stub);
1597 shared->set_function_data(fun->shared()->function_data());
1598 int parameters = fun->shared()->formal_parameter_count();
1599 shared->set_formal_parameter_count(parameters);
1601 EmitNewClosure(shared,
false);
1605 void FullCodeGenerator::VisitThrow(Throw* expr) {
1606 Comment cmnt(masm_,
"[ Throw");
1607 VisitForStackValue(expr->exception());
1608 __ CallRuntime(Runtime::kHiddenThrow, 1);
1615 int* context_length) {
1617 __ Drop(*stack_depth);
1624 bool FullCodeGenerator::TryLiteralCompare(CompareOperation* expr) {
1625 Expression* sub_expr;
1626 Handle<String>
check;
1627 if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
1628 EmitLiteralCompareTypeof(expr, sub_expr, check);
1632 if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) {
1637 if (expr->IsLiteralCompareNull(&sub_expr)) {
1638 EmitLiteralCompareNil(expr, sub_expr,
kNullValue);
1655 for (uint32_t i = 0; i < back_edges.
length(); i++) {
1656 if (static_cast<int>(back_edges.
loop_depth(i)) == loop_nesting_level) {
1660 PatchAt(unoptimized, back_edges.
pc(i), ON_STACK_REPLACEMENT, patch);
1665 ASSERT(Verify(isolate, unoptimized, loop_nesting_level));
1678 for (uint32_t i = 0; i < back_edges.
length(); i++) {
1679 if (static_cast<int>(back_edges.
loop_depth(i)) <= loop_nesting_level) {
1683 PatchAt(unoptimized, back_edges.
pc(i),
INTERRUPT, patch);
1690 ASSERT(Verify(isolate, unoptimized, -1));
1696 Isolate* isolate = code->GetIsolate();
1697 Address pc = code->instruction_start() + pc_offset;
1699 PatchAt(*code, pc, OSR_AFTER_STACK_CHECK, patch);
1705 Isolate* isolate = code->GetIsolate();
1706 Address pc = code->instruction_start() + pc_offset;
1708 if (OSR_AFTER_STACK_CHECK == GetBackEdgeState(isolate, *code, pc)) {
1710 PatchAt(*code, pc, ON_STACK_REPLACEMENT, patch);
1716 bool BackEdgeTable::Verify(
Isolate* isolate,
1718 int loop_nesting_level) {
1721 for (uint32_t i = 0; i < back_edges.length(); i++) {
1722 uint32_t loop_depth = back_edges.loop_depth(i);
1726 CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level),
1727 GetBackEdgeState(isolate,
bool is_global_scope() const
static Handle< Code > initialize_stub(Isolate *isolate, StrictMode strict_mode)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
#define INLINE_FUNCTION_LIST(F)
static void RemoveStackCheck(Handle< Code > code, uint32_t pc_offset)
static void Patch(Isolate *isolate, Code *unoptimized_code)
Code * builtin(Name name)
int allow_osr_at_loop_nesting_level()
#define CHECK_EQ(expected, value)
bool HasDeoptimizationSupport() const
static String * cast(Object *obj)
Handle< ScopeInfo > GetScopeInfo()
void SetCode(Handle< Code > code)
static ExtraICState ComputeExtraICState(ContextualMode contextual_mode)
static Smi * FromInt(int value)
static bool MakeCode(CompilationInfo *info)
Handle< Script > script() const
Isolate * isolate() const
#define ASSERT(condition)
static const int kContextOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including on console Map counters to a file Enable debugger compile events enable GDBJIT interface(disables compacting GC)") DEFINE_bool(gdbjit_full
bool back_edges_patched_for_osr()
static Handle< Code > initialize_stub(Isolate *isolate, ExtraICState extra_state)
bool IsOptimizable() const
static void AddStackCheck(Handle< Code > code, uint32_t pc_offset)
void set_emit_debug_code(bool value)
static void PrintCode(Handle< Code > code, CompilationInfo *info)
void set_back_edges_patched_for_osr(bool value)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
uint32_t loop_depth(uint32_t index)
static bool IsValid(intptr_t value)
void check(i::Vector< const uint8_t > string)
static Handle< ModuleInfo > Create(Isolate *isolate, Interface *interface, Scope *scope)
FunctionLiteral * function() const
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
bool has_pending_exception()
static v8::internal::Handle< To > OpenHandle(v8::Local< From > handle)
friend class NestedStatement
static Local< Context > ToLocal(v8::internal::Handle< v8::internal::Context > obj)
static void MakeCodePrologue(CompilationInfo *info, const char *kind)
static const int kMaxLoopNestingMarker
Address pc(uint32_t index)
void set_predictable_code_size(bool value)
static bool HaveASnapshotToStartFrom()
#define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize)
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Handle< T > handle(T *t, Isolate *isolate)
V8_INLINE bool IsEmpty() const
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
static void Revert(Isolate *isolate, Code *unoptimized_code)
Local< Function > GetFunction()
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, InlineCacheHolderFlag holder=OWN_MAP)
static bool is_valid(StrictModevalue)
#define ASSERT_EQ(v1, v2)
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
static const char * v8_compile_full_code
#define ASSERT_NE(v1, v2)
PositionsRecorder * positions_recorder()
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
void Check(Statement *stmt)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric literals(0o77, 0b11)") DEFINE_bool(harmony_strings
static Handle< Code > MakeCodeEpilogue(MacroAssembler *masm, Code::Flags flags, CompilationInfo *info)
#define LOG_CODE_EVENT(isolate, Call)
bool is_module_scope() const
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
void set_allow_osr_at_loop_nesting_level(int level)
ZoneList< Declaration * > * declarations()
static uint32_t encode(Statevalue)