41 static const int kScriptGenerations = 5;
42 static const int kEvalGlobalGenerations = 2;
43 static const int kEvalContextualGenerations = 2;
44 static const int kRegExpGenerations = 2;
47 static const int kInitialCacheSize = 64;
50 CompilationCache::CompilationCache(Isolate* isolate)
52 script_(isolate, kScriptGenerations),
53 eval_global_(isolate, kEvalGlobalGenerations),
54 eval_contextual_(isolate, kEvalContextualGenerations),
55 reg_exp_(isolate, kRegExpGenerations),
57 CompilationSubCache* subcaches[kSubCacheCount] =
58 {&script_, &eval_global_, &eval_contextual_, ®_exp_};
59 for (
int i = 0; i < kSubCacheCount; ++i) {
60 subcaches_[i] = subcaches[i];
65 CompilationCache::~CompilationCache() {}
68 static Handle<CompilationCacheTable> AllocateTable(Isolate* isolate,
int size) {
71 CompilationCacheTable);
76 ASSERT(generation < generations_);
78 if (tables_[generation]->IsUndefined()) {
79 result = AllocateTable(
isolate(), kInitialCacheSize);
91 for (
int i = generations_ - 1; i > 0; i--) {
92 tables_[i] = tables_[i - 1];
102 for (
int i = 0; i < generations_; i++) {
103 if (tables_[i] != undefined) {
111 v->VisitPointers(&tables_[0], &tables_[generations_]);
126 table->Remove(*function_info);
135 script_histogram_(
NULL),
136 script_histogram_initialized_(
false) { }
142 bool CompilationCacheScript::HasOrigin(
152 return script->name()->IsUndefined();
155 if (line_offset != script->line_offset()->value())
return false;
156 if (column_offset != script->column_offset()->value())
return false;
158 if (!name->IsString() || !script->name()->IsString())
return false;
180 for (generation = 0; generation <
generations(); generation++) {
183 if (probe->IsSharedFunctionInfo()) {
188 if (HasOrigin(function_info, name, line_offset, column_offset)) {
189 result = *function_info;
196 if (!script_histogram_initialized_) {
201 kScriptGenerations + 1);
202 script_histogram_initialized_ =
true;
205 if (script_histogram_ !=
NULL) {
213 if (result !=
NULL) {
216 ASSERT(HasOrigin(shared, name, line_offset, column_offset));
219 if (generation != 0)
Put(source, context, shared);
229 MaybeObject* CompilationCacheScript::TryTablePut(
234 return table->Put(*source, *context, *function_info);
243 TryTablePut(source, context, function_info),
244 CompilationCacheTable);
260 int scope_position) {
267 for (generation = 0; generation <
generations(); generation++) {
269 result = table->LookupEval(
270 *source, *context, language_mode, scope_position);
271 if (result->IsSharedFunctionInfo()) {
276 if (result->IsSharedFunctionInfo()) {
279 if (generation != 0) {
280 Put(source, context, function_info, scope_position);
283 return function_info;
291 MaybeObject* CompilationCacheEval::TryTablePut(
295 int scope_position) {
297 return table->PutEval(*source, *context, *function_info, scope_position);
305 int scope_position) {
308 source, context, function_info, scope_position),
309 CompilationCacheTable);
316 int scope_position) {
318 SetFirstTable(TablePut(source, context, function_info, scope_position));
330 for (generation = 0; generation <
generations(); generation++) {
332 result = table->LookupRegExp(*source, flags);
333 if (result->IsFixedArray()) {
338 if (result->IsFixedArray()) {
340 if (generation != 0) {
341 Put(source, flags, data);
352 MaybeObject* CompilationCacheRegExp::TryTablePut(
357 return table->PutRegExp(*source, flags, *data);
363 JSRegExp::Flags flags,
366 TryTablePut(source, flags, data),
367 CompilationCacheTable);
380 if (!IsEnabled())
return;
382 eval_global_.
Remove(function_info);
383 eval_contextual_.
Remove(function_info);
384 script_.
Remove(function_info);
398 return script_.
Lookup(source, name, line_offset, column_offset, context);
407 int scope_position) {
414 result = eval_global_.
Lookup(
415 source, context, language_mode, scope_position);
417 ASSERT(scope_position != RelocInfo::kNoPosition);
418 result = eval_contextual_.
Lookup(
419 source, context, language_mode, scope_position);
431 return reg_exp_.
Lookup(source, flags);
442 script_.
Put(source, context, function_info);
450 int scope_position) {
457 eval_global_.
Put(source, context, function_info, scope_position);
459 ASSERT(scope_position != RelocInfo::kNoPosition);
460 eval_contextual_.
Put(source, context, function_info, scope_position);
473 reg_exp_.
Put(source, flags, data);
478 for (
int i = 0; i < kSubCacheCount; i++) {
479 subcaches_[i]->
Clear();
485 for (
int i = 0; i < kSubCacheCount; i++) {
492 for (
int i = 0; i < kSubCacheCount; i++) {
499 for (
int i = 0; i < kSubCacheCount; i++) {
500 subcaches_[i]->
Age();
void Put(Handle< String > source, Handle< Context > context, Handle< SharedFunctionInfo > function_info, int scope_position)
void Put(Handle< String > source, Handle< Context > context, Handle< SharedFunctionInfo > function_info)
static String * cast(Object *obj)
StatsTable * stats_table()
static CompilationCacheTable * cast(Object *obj)
static Handle< T > cast(Handle< S > that)
Handle< FixedArray > LookupRegExp(Handle< String > source, JSRegExp::Flags flags)
#define ASSERT(condition)
void PutEval(Handle< String > source, Handle< Context > context, bool is_global, Handle< SharedFunctionInfo > function_info, int scope_position)
static Script * cast(Object *obj)
static SharedFunctionInfo * cast(Object *obj)
void SetFirstTable(Handle< CompilationCacheTable > value)
Handle< CompilationCacheTable > GetTable(int generation)
void PutScript(Handle< String > source, Handle< Context > context, Handle< SharedFunctionInfo > function_info)
bool Equals(String *other)
Handle< SharedFunctionInfo > LookupEval(Handle< String > source, Handle< Context > context, bool is_global, LanguageMode language_mode, int scope_position)
void Iterate(ObjectVisitor *v)
#define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE)
void MarkCompactPrologue()
Handle< SharedFunctionInfo > Lookup(Handle< String > source, Handle< Object > name, int line_offset, int column_offset, Handle< Context > context)
void Iterate(ObjectVisitor *v)
activate correct semantics for inheriting readonliness false
void PutRegExp(Handle< String > source, JSRegExp::Flags flags, Handle< FixedArray > data)
Handle< FixedArray > Lookup(Handle< String > source, JSRegExp::Flags flags)
Handle< CompilationCacheTable > GetFirstTable()
static Handle< T > null()
void MemsetPointer(T **dest, U *value, int counter)
Handle< SharedFunctionInfo > LookupScript(Handle< String > source, Handle< Object > name, int line_offset, int column_offset, Handle< Context > context)
void IterateFunctions(ObjectVisitor *v)
static FixedArray * cast(Object *obj)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new generation(in kBytes)") DEFINE_int(max_old_space_size
CompilationCacheScript(Isolate *isolate, int generations)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
void Remove(Handle< SharedFunctionInfo > function_info)
void * CreateHistogram(const char *name, int min, int max, size_t buckets)
Handle< SharedFunctionInfo > Lookup(Handle< String > source, Handle< Context > context, LanguageMode language_mode, int scope_position)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
void AddHistogramSample(void *histogram, int sample)
void Remove(Handle< SharedFunctionInfo > function_info)
void IterateFunctions(ObjectVisitor *v)
static MUST_USE_RESULT MaybeObject * Allocate(int at_least_space_for, MinimumCapacity capacity_option=USE_DEFAULT_MINIMUM_CAPACITY, PretenureFlag pretenure=NOT_TENURED)
void Put(Handle< String > source, JSRegExp::Flags flags, Handle< FixedArray > data)