39 AstTyper::AstTyper(CompilationInfo*
info)
42 Handle<Code>(info->closure()->shared()->
code()),
43 Handle<Context>(info->closure()->context()->native_context()),
45 store_(info->zone()) {
46 InitializeAstVisitor(info->zone());
50 #define RECURSE(call) \
52 ASSERT(!visitor->HasStackOverflow()); \
54 if (visitor->HasStackOverflow()) return; \
82 #endif // OBJECT_PRINT
85 Effect AstTyper::ObservedOnStack(
Object* value) {
86 Type* lower = Type::OfCurrently(
handle(value, isolate()), zone());
87 return Effect(
Bounds(lower, Type::Any(zone())));
91 void AstTyper::ObserveTypesAtOsrEntry(IterationStatement* stmt) {
92 if (stmt->OsrEntryId() != info_->osr_ast_id())
return;
95 JavaScriptFrameIterator it(isolate());
96 JavaScriptFrame* frame = it.frame();
97 Scope* scope = info_->scope();
100 ASSERT_EQ(*info_->closure(), frame->function());
102 int params = scope->num_parameters();
103 int locals = scope->StackLocalCount();
107 store_.Seq(parameter_index(-1), ObservedOnStack(frame->receiver()));
108 for (
int i = 0; i < params; i++) {
109 store_.Seq(parameter_index(i), ObservedOnStack(frame->GetParameter(i)));
112 for (
int i = 0; i < locals; i++) {
113 store_.Seq(stack_local_index(i), ObservedOnStack(frame->GetExpression(i)));
117 if (FLAG_trace_osr && FLAG_print_scopes) {
118 PrintObserved(scope->receiver(),
120 store_.LookupBounds(parameter_index(-1)).lower);
122 for (
int i = 0; i < params; i++) {
123 PrintObserved(scope->parameter(i),
124 frame->GetParameter(i),
125 store_.LookupBounds(parameter_index(i)).lower);
128 ZoneList<Variable*> local_vars(locals, zone());
129 ZoneList<Variable*> context_vars(scope->ContextLocalCount(), zone());
130 scope->CollectStackAndContextLocals(&local_vars, &context_vars);
131 for (
int i = 0; i < locals; i++) {
132 PrintObserved(local_vars.at(i),
133 frame->GetExpression(i),
134 store_.LookupBounds(stack_local_index(i)).lower);
137 #endif // OBJECT_PRINT
141 #define RECURSE(call) \
143 ASSERT(!HasStackOverflow()); \
145 if (HasStackOverflow()) return; \
149 void AstTyper::VisitStatements(ZoneList<Statement*>* stmts) {
150 for (
int i = 0; i < stmts->length(); ++i) {
151 Statement* stmt = stmts->at(i);
153 if (stmt->IsJump())
break;
158 void AstTyper::VisitBlock(
Block* stmt) {
159 RECURSE(VisitStatements(stmt->statements()));
160 if (stmt->labels() !=
NULL) {
166 void AstTyper::VisitExpressionStatement(ExpressionStatement* stmt) {
167 RECURSE(Visit(stmt->expression()));
171 void AstTyper::VisitEmptyStatement(EmptyStatement* stmt) {
175 void AstTyper::VisitIfStatement(IfStatement* stmt) {
177 if (!stmt->condition()->ToBooleanIsTrue() &&
178 !stmt->condition()->ToBooleanIsFalse()) {
179 stmt->condition()->RecordToBooleanTypeFeedback(oracle());
182 RECURSE(Visit(stmt->condition()));
183 Effects then_effects = EnterEffects();
184 RECURSE(Visit(stmt->then_statement()));
186 Effects else_effects = EnterEffects();
187 RECURSE(Visit(stmt->else_statement()));
189 then_effects.Alt(else_effects);
190 store_.Seq(then_effects);
194 void AstTyper::VisitContinueStatement(ContinueStatement* stmt) {
199 void AstTyper::VisitBreakStatement(BreakStatement* stmt) {
204 void AstTyper::VisitReturnStatement(ReturnStatement* stmt) {
207 stmt->expression()->RecordToBooleanTypeFeedback(oracle());
209 RECURSE(Visit(stmt->expression()));
214 void AstTyper::VisitWithStatement(WithStatement* stmt) {
220 void AstTyper::VisitSwitchStatement(SwitchStatement* stmt) {
223 ZoneList<CaseClause*>* clauses = stmt->cases();
224 Effects local_effects(zone());
225 bool complex_effects =
false;
227 for (
int i = 0; i < clauses->length(); ++i) {
228 CaseClause* clause = clauses->at(i);
230 Effects clause_effects = EnterEffects();
232 if (!clause->is_default()) {
233 Expression* label = clause->label();
238 oracle()->CompareType(clause->CompareId(),
239 &tag_type, &label_type, &combined_type);
240 NarrowLowerType(stmt->tag(), tag_type);
241 NarrowLowerType(label, label_type);
242 clause->set_compare_type(combined_type);
245 if (!clause_effects.IsEmpty()) complex_effects =
true;
248 ZoneList<Statement*>* stmts = clause->statements();
249 RECURSE(VisitStatements(stmts));
251 if (stmts->is_empty() || stmts->last()->IsJump()) {
252 local_effects.Alt(clause_effects);
254 complex_effects =
true;
258 if (complex_effects) {
261 store_.Seq(local_effects);
266 void AstTyper::VisitCaseClause(CaseClause* clause) {
271 void AstTyper::VisitDoWhileStatement(DoWhileStatement* stmt) {
273 if (!stmt->cond()->ToBooleanIsTrue()) {
274 stmt->cond()->RecordToBooleanTypeFeedback(oracle());
281 ObserveTypesAtOsrEntry(stmt);
288 void AstTyper::VisitWhileStatement(WhileStatement* stmt) {
290 if (!stmt->cond()->ToBooleanIsTrue()) {
291 stmt->cond()->RecordToBooleanTypeFeedback(oracle());
296 ObserveTypesAtOsrEntry(stmt);
302 void AstTyper::VisitForStatement(ForStatement* stmt) {
303 if (stmt->init() !=
NULL) {
307 if (stmt->cond() !=
NULL) {
309 stmt->cond()->RecordToBooleanTypeFeedback(oracle());
313 ObserveTypesAtOsrEntry(stmt);
315 if (stmt->next() !=
NULL) {
323 void AstTyper::VisitForInStatement(ForInStatement* stmt) {
325 stmt->set_for_in_type(static_cast<ForInStatement::ForInType>(
326 oracle()->ForInType(stmt->ForInFeedbackSlot())));
328 RECURSE(Visit(stmt->enumerable()));
330 ObserveTypesAtOsrEntry(stmt);
336 void AstTyper::VisitForOfStatement(ForOfStatement* stmt) {
337 RECURSE(Visit(stmt->iterable()));
344 void AstTyper::VisitTryCatchStatement(TryCatchStatement* stmt) {
345 Effects try_effects = EnterEffects();
346 RECURSE(Visit(stmt->try_block()));
348 Effects catch_effects = EnterEffects();
350 RECURSE(Visit(stmt->catch_block()));
352 try_effects.Alt(catch_effects);
353 store_.Seq(try_effects);
359 void AstTyper::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
360 RECURSE(Visit(stmt->try_block()));
362 RECURSE(Visit(stmt->finally_block()));
366 void AstTyper::VisitDebuggerStatement(DebuggerStatement* stmt) {
371 void AstTyper::VisitFunctionLiteral(FunctionLiteral* expr) {
372 expr->InitializeSharedInfo(Handle<Code>(info_->closure()->shared()->code()));
376 void AstTyper::VisitNativeFunctionLiteral(NativeFunctionLiteral* expr) {
380 void AstTyper::VisitConditional(Conditional* expr) {
382 expr->condition()->RecordToBooleanTypeFeedback(oracle());
384 RECURSE(Visit(expr->condition()));
385 Effects then_effects = EnterEffects();
386 RECURSE(Visit(expr->then_expression()));
388 Effects else_effects = EnterEffects();
389 RECURSE(Visit(expr->else_expression()));
391 then_effects.Alt(else_effects);
392 store_.Seq(then_effects);
394 NarrowType(expr, Bounds::Either(
395 expr->then_expression()->bounds(),
396 expr->else_expression()->bounds(), zone()));
400 void AstTyper::VisitVariableProxy(VariableProxy* expr) {
401 Variable* var = expr->var();
402 if (var->IsStackAllocated()) {
403 NarrowType(expr, store_.LookupBounds(variable_index(var)));
408 void AstTyper::VisitLiteral(Literal* expr) {
409 Type* type = Type::Constant(expr->value(), zone());
410 NarrowType(expr,
Bounds(type));
414 void AstTyper::VisitRegExpLiteral(RegExpLiteral* expr) {
415 NarrowType(expr,
Bounds(Type::RegExp(zone())));
419 void AstTyper::VisitObjectLiteral(ObjectLiteral* expr) {
420 ZoneList<ObjectLiteral::Property*>* properties = expr->properties();
421 for (
int i = 0; i < properties->length(); ++i) {
422 ObjectLiteral::Property* prop = properties->at(i);
425 if ((prop->kind() == ObjectLiteral::Property::MATERIALIZED_LITERAL &&
426 !CompileTimeValue::IsCompileTimeValue(prop->value())) ||
427 prop->kind() == ObjectLiteral::Property::COMPUTED) {
428 if (prop->key()->value()->IsInternalizedString() && prop->emit_store()) {
429 prop->RecordTypeFeedback(oracle());
440 void AstTyper::VisitArrayLiteral(ArrayLiteral* expr) {
441 ZoneList<Expression*>* values = expr->values();
442 for (
int i = 0; i < values->length(); ++i) {
443 Expression* value = values->at(i);
447 NarrowType(expr,
Bounds(Type::Array(zone())));
451 void AstTyper::VisitAssignment(Assignment* expr) {
453 Property* prop = expr->target()->AsProperty();
455 TypeFeedbackId
id = expr->AssignmentFeedbackId();
456 expr->set_is_uninitialized(oracle()->StoreIsUninitialized(
id));
457 if (!expr->IsUninitialized()) {
458 if (prop->key()->IsPropertyName()) {
459 Literal* lit_key = prop->key()->AsLiteral();
462 oracle()->AssignmentReceiverTypes(
id, name, expr->GetReceiverTypes());
465 oracle()->KeyedAssignmentReceiverTypes(
466 id, expr->GetReceiverTypes(), &store_mode);
467 expr->set_store_mode(store_mode);
473 expr->is_compound() ? expr->binary_operation() : expr->value();
474 RECURSE(Visit(expr->target()));
476 NarrowType(expr, rhs->bounds());
478 VariableProxy* proxy = expr->target()->AsVariableProxy();
479 if (proxy !=
NULL && proxy->var()->IsStackAllocated()) {
480 store_.Seq(variable_index(proxy->var()), Effect(expr->bounds()));
485 void AstTyper::VisitYield(Yield* expr) {
486 RECURSE(Visit(expr->generator_object()));
487 RECURSE(Visit(expr->expression()));
493 void AstTyper::VisitThrow(Throw* expr) {
494 RECURSE(Visit(expr->exception()));
501 void AstTyper::VisitProperty(Property* expr) {
503 TypeFeedbackId
id = expr->PropertyFeedbackId();
504 expr->set_is_uninitialized(oracle()->LoadIsUninitialized(
id));
505 if (!expr->IsUninitialized()) {
506 if (expr->key()->IsPropertyName()) {
507 Literal* lit_key = expr->key()->AsLiteral();
511 oracle()->PropertyReceiverTypes(
512 id, name, expr->GetReceiverTypes(), &is_prototype);
513 expr->set_is_function_prototype(is_prototype);
516 oracle()->KeyedPropertyReceiverTypes(
517 id, expr->GetReceiverTypes(), &is_string);
518 expr->set_is_string_access(is_string);
529 void AstTyper::VisitCall(Call* expr) {
531 RECURSE(Visit(expr->expression()));
532 if (!expr->expression()->IsProperty() &&
533 expr->HasCallFeedbackSlot() &&
534 oracle()->CallIsMonomorphic(expr->CallFeedbackSlot())) {
535 expr->set_target(oracle()->GetCallTarget(expr->CallFeedbackSlot()));
538 ZoneList<Expression*>* args = expr->arguments();
539 for (
int i = 0; i < args->length(); ++i) {
540 Expression* arg = args->at(i);
544 VariableProxy* proxy = expr->expression()->AsVariableProxy();
545 if (proxy !=
NULL && proxy->var()->is_possibly_eval(isolate())) {
553 void AstTyper::VisitCallNew(CallNew* expr) {
555 expr->RecordTypeFeedback(oracle());
557 RECURSE(Visit(expr->expression()));
558 ZoneList<Expression*>* args = expr->arguments();
559 for (
int i = 0; i < args->length(); ++i) {
560 Expression* arg = args->at(i);
568 void AstTyper::VisitCallRuntime(CallRuntime* expr) {
569 ZoneList<Expression*>* args = expr->arguments();
570 for (
int i = 0; i < args->length(); ++i) {
571 Expression* arg = args->at(i);
579 void AstTyper::VisitUnaryOperation(UnaryOperation* expr) {
583 expr->expression()->RecordToBooleanTypeFeedback(oracle());
586 RECURSE(Visit(expr->expression()));
588 switch (expr->op()) {
591 NarrowType(expr,
Bounds(Type::Boolean(zone())));
597 NarrowType(expr,
Bounds(Type::InternalizedString(zone())));
605 void AstTyper::VisitCountOperation(CountOperation* expr) {
607 TypeFeedbackId store_id = expr->CountStoreFeedbackId();
608 expr->set_store_mode(oracle()->GetStoreMode(store_id));
609 oracle()->CountReceiverTypes(store_id, expr->GetReceiverTypes());
610 expr->set_type(oracle()->CountType(expr->CountBinOpFeedbackId()));
613 RECURSE(Visit(expr->expression()));
615 NarrowType(expr,
Bounds(Type::SignedSmall(zone()), Type::Number(zone())));
617 VariableProxy* proxy = expr->expression()->AsVariableProxy();
618 if (proxy !=
NULL && proxy->var()->IsStackAllocated()) {
619 store_.Seq(variable_index(proxy->var()), Effect(expr->bounds()));
624 void AstTyper::VisitBinaryOperation(BinaryOperation* expr) {
629 Maybe<int> fixed_right_arg;
630 Handle<AllocationSite> allocation_site;
631 oracle()->BinaryType(expr->BinaryOperationFeedbackId(),
632 &left_type, &right_type, &type, &fixed_right_arg,
633 &allocation_site, expr->op());
634 NarrowLowerType(expr, type);
635 NarrowLowerType(expr->left(), left_type);
636 NarrowLowerType(expr->right(), right_type);
637 expr->set_allocation_site(allocation_site);
638 expr->set_fixed_right_arg(fixed_right_arg);
640 expr->left()->RecordToBooleanTypeFeedback(oracle());
643 switch (expr->op()) {
647 NarrowType(expr, expr->right()->bounds());
651 Effects left_effects = EnterEffects();
654 Effects right_effects = EnterEffects();
657 left_effects.Alt(right_effects);
658 store_.Seq(left_effects);
660 NarrowType(expr, Bounds::Either(
661 expr->left()->bounds(), expr->right()->bounds(), zone()));
665 case Token::BIT_AND: {
668 Type* upper = Type::Union(
669 expr->left()->bounds().upper, expr->right()->bounds().upper, zone());
670 if (!upper->Is(Type::Signed32())) upper = Type::Signed32(zone());
671 Type* lower = Type::Intersect(Type::SignedSmall(zone()), upper, zone());
672 NarrowType(expr,
Bounds(lower, upper));
681 Bounds(Type::SignedSmall(zone()), Type::Signed32(zone())));
689 NarrowType(expr,
Bounds(Type::SignedSmall(zone()), Type::Number(zone())));
694 Bounds l = expr->left()->bounds();
695 Bounds r = expr->right()->bounds();
699 l.lower->
Is(
Type::String()) || r.lower->
Is(
Type::String()) ?
700 Type::String(zone()) :
701 l.lower->
Is(
Type::Number()) && r.lower->
Is(
Type::Number()) ?
704 l.upper->
Is(Type::String()) || r.upper->Is(Type::String()) ?
705 Type::String(zone()) :
706 l.upper->
Is(
Type::Number()) && r.upper->
Is(
Type::Number()) ?
707 Type::Number(zone()) :
Type::NumberOrString(zone());
708 NarrowType(expr,
Bounds(lower, upper));
717 NarrowType(expr,
Bounds(Type::SignedSmall(zone()), Type::Number(zone())));
725 void AstTyper::VisitCompareOperation(CompareOperation* expr) {
730 oracle()->CompareType(expr->CompareOperationFeedbackId(),
731 &left_type, &right_type, &combined_type);
732 NarrowLowerType(expr->left(), left_type);
733 NarrowLowerType(expr->right(), right_type);
734 expr->set_combined_type(combined_type);
739 NarrowType(expr,
Bounds(Type::Boolean(zone())));
743 void AstTyper::VisitThisFunction(ThisFunction* expr) {
747 void AstTyper::VisitDeclarations(ZoneList<Declaration*>* decls) {
748 for (
int i = 0; i < decls->length(); ++i) {
749 Declaration* decl = decls->at(i);
755 void AstTyper::VisitVariableDeclaration(VariableDeclaration* declaration) {
759 void AstTyper::VisitFunctionDeclaration(FunctionDeclaration* declaration) {
760 RECURSE(Visit(declaration->fun()));
764 void AstTyper::VisitModuleDeclaration(ModuleDeclaration* declaration) {
765 RECURSE(Visit(declaration->module()));
769 void AstTyper::VisitImportDeclaration(ImportDeclaration* declaration) {
770 RECURSE(Visit(declaration->module()));
774 void AstTyper::VisitExportDeclaration(ExportDeclaration* declaration) {
778 void AstTyper::VisitModuleLiteral(ModuleLiteral* module) {
779 RECURSE(Visit(module->body()));
783 void AstTyper::VisitModuleVariable(ModuleVariable* module) {
787 void AstTyper::VisitModulePath(ModulePath* module) {
788 RECURSE(Visit(module->module()));
792 void AstTyper::VisitModuleUrl(ModuleUrl* module) {
796 void AstTyper::VisitModuleStatement(ModuleStatement* stmt) {
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
VariableDeclaration * function() const
void PrintF(const char *format,...)
static Handle< String > cast(Handle< S > that)
kSerializedDataOffset Object
BoundsImpl< ZoneTypeConfig > Bounds
TypeImpl< ZoneTypeConfig > Type
#define ASSERT(condition)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Handle< String > name() const
FunctionLiteral * function() const
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
V8_INLINE Handle< Primitive > Undefined(Isolate *isolate)
V8_INLINE bool IsString() const
Handle< T > handle(T *t, Isolate *isolate)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
void ShortPrint(FILE *out=stdout)
#define ASSERT_EQ(v1, v2)
PerThreadAssertScopeDebugOnly< HEAP_ALLOCATION_ASSERT, false > DisallowHeapAllocation
bool is_function_scope() const
ZoneList< Declaration * > * declarations()