49 static const int kSamplerFrameCount = 2;
52 static const int kSamplerFrameWeight[kSamplerFrameCount] = { 2, 1 };
54 static const int kSamplerTicksBetweenThresholdAdjustment = 32;
56 static const int kSamplerThresholdInit = 3;
57 static const int kSamplerThresholdMin = 1;
58 static const int kSamplerThresholdDelta = 1;
60 static const int kSamplerThresholdSizeFactorInit = 3;
62 static const int kSizeLimit = 1500;
68 static const int kProfilerTicksBeforeOptimization = 2;
72 static const int kProfilerTicksBeforeReenablingOptimization = 250;
76 static const int kTicksWhenNotEnoughTypeInfo = 100;
79 STATIC_ASSERT(kProfilerTicksBeforeReenablingOptimization < 256);
85 static const int kMaxSizeEarlyOpt =
86 5 * FullCodeGenerator::kBackEdgeDistanceUnit;
89 Atomic32 RuntimeProfiler::state_ = 0;
95 bool RuntimeProfiler::has_been_globally_set_up_ =
false;
97 bool RuntimeProfiler::enabled_ =
false;
102 sampler_threshold_(kSamplerThresholdInit),
103 sampler_threshold_size_factor_(kSamplerThresholdSizeFactorInit),
104 sampler_ticks_until_threshold_adjustment_(
105 kSamplerTicksBetweenThresholdAdjustment),
106 sampler_window_position_(0),
107 any_ic_changed_(
false),
108 code_generated_(
false) {
114 ASSERT(!has_been_globally_set_up_);
117 has_been_globally_set_up_ =
true;
123 int* ic_with_type_info_count,
127 *ic_with_type_info_count = 0;
129 function->shared()->code()->type_feedback_info();
130 if (raw_info->IsTypeFeedbackInfo()) {
135 *percentage = *ic_total_count > 0
136 ? 100 * *ic_with_type_info_count / *ic_total_count
141 void RuntimeProfiler::Optimize(JSFunction*
function,
const char* reason) {
142 ASSERT(function->IsOptimizable());
143 if (FLAG_trace_opt) {
145 function->PrintName();
146 PrintF(
" 0x%" V8PRIxPTR, reinterpret_cast<intptr_t>(function->address()));
147 PrintF(
" for recompilation, reason: %s", reason);
148 if (FLAG_type_info_threshold > 0) {
149 int typeinfo, total, percentage;
150 GetICCounts(
function, &typeinfo, &total, &percentage);
151 PrintF(
", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total, percentage);
156 if (FLAG_parallel_recompilation) {
157 function->MarkForParallelRecompilation();
160 function->MarkForLazyRecompilation();
168 ASSERT(function->IsMarkedForLazyRecompilation() ||
169 function->IsMarkedForParallelRecompilation());
172 function->IsBuiltin()) {
178 if (!shared->code()->optimizable())
return;
183 if (shared->uses_arguments())
return;
188 if (FLAG_trace_osr) {
189 PrintF(
"[patching stack checks in ");
190 function->PrintName();
191 PrintF(
" for on-stack replacement]\n");
196 bool found_code =
false;
198 if (FLAG_count_based_interrupts) {
200 found_code = interrupt_stub.FindCodeInCache(&stack_check_code);
204 found_code = check_stub.FindCodeInCache(&stack_check_code);
207 Code* replacement_code =
209 Code* unoptimized_code = shared->code();
217 void RuntimeProfiler::ClearSampleBuffer() {
218 memset(sampler_window_, 0,
sizeof(sampler_window_));
219 memset(sampler_window_weight_, 0,
sizeof(sampler_window_weight_));
223 int RuntimeProfiler::LookupSample(JSFunction*
function) {
225 for (
int i = 0; i < kSamplerWindowSize; i++) {
227 if (sample !=
NULL) {
228 bool fits = FLAG_lookup_sample_by_shared
232 weight += sampler_window_weight_[i];
240 void RuntimeProfiler::AddSample(JSFunction*
function,
int weight) {
242 sampler_window_[sampler_window_position_] =
function;
243 sampler_window_weight_[sampler_window_position_] = weight;
244 sampler_window_position_ = (sampler_window_position_ + 1) &
245 (kSamplerWindowSize - 1);
256 int sample_count = 0;
258 int frame_count_limit = FLAG_watch_ic_patching ? FLAG_frame_count
259 : kSamplerFrameCount;
261 frame_count++ < frame_count_limit && !it.done();
266 if (!FLAG_watch_ic_patching) {
269 if (sampler_ticks_until_threshold_adjustment_ > 0) {
270 sampler_ticks_until_threshold_adjustment_--;
271 if (sampler_ticks_until_threshold_adjustment_ <= 0) {
274 if (sampler_threshold_ > kSamplerThresholdMin) {
275 sampler_threshold_ -= kSamplerThresholdDelta;
276 sampler_ticks_until_threshold_adjustment_ =
277 kSamplerTicksBetweenThresholdAdjustment;
284 Code* shared_code = shared->code();
286 if (shared_code->
kind() != Code::FUNCTION)
continue;
288 if (function->IsMarkedForLazyRecompilation() ||
289 function->IsMarkedForParallelRecompilation()) {
299 const int kMaxToplevelSourceSize = 10 * 1024;
300 if (shared->is_toplevel() &&
301 (frame_count > 1 || shared->
SourceSize() > kMaxToplevelSourceSize)) {
306 if (shared->optimization_disabled()) {
311 if (ticks >= kProfilerTicksBeforeReenablingOptimization) {
320 if (!function->IsOptimizable())
continue;
322 if (FLAG_watch_ic_patching) {
325 if (ticks >= kProfilerTicksBeforeOptimization) {
326 int typeinfo, total, percentage;
327 GetICCounts(
function, &typeinfo, &total, &percentage);
328 if (percentage >= FLAG_type_info_threshold) {
331 Optimize(
function,
"hot and stable");
332 }
else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
333 Optimize(
function,
"not much type info but very hot");
336 if (FLAG_trace_opt_verbose) {
337 PrintF(
"[not yet optimizing ");
338 function->PrintName();
339 PrintF(
", not enough type info: %d/%d (%d%%)]\n",
340 typeinfo, total, percentage);
343 }
else if (!any_ic_changed_ &&
347 Optimize(
function,
"small function");
352 samples[sample_count++] =
function;
354 int function_size =
function->shared()->SourceSize();
355 int threshold_size_factor = (function_size > kSizeLimit)
356 ? sampler_threshold_size_factor_
359 int threshold = sampler_threshold_ * threshold_size_factor;
361 if (LookupSample(
function) >= threshold) {
362 Optimize(
function,
"sampler window lookup");
366 if (FLAG_watch_ic_patching) {
367 any_ic_changed_ =
false;
372 for (
int i = 0; i < sample_count; i++) {
373 AddSample(samples[i], kSamplerFrameWeight[i]);
380 if (FLAG_count_based_interrupts)
return;
386 ASSERT(has_been_globally_set_up_);
387 if (!FLAG_watch_ic_patching) {
397 if (!FLAG_watch_ic_patching) {
398 sampler_threshold_ = kSamplerThresholdInit;
399 sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit;
400 sampler_ticks_until_threshold_adjustment_ =
401 kSamplerTicksBetweenThresholdAdjustment;
412 return kSamplerWindowSize;
418 for (
int i = 0; i < kSamplerWindowSize; i++) {
419 Object*
function = sampler_window_[i];
422 if (map_word.IsForwardingAddress()) {
423 sampler_window_[i] = map_word.ToForwardingAddress();
425 sampler_window_[i] =
NULL;
432 void RuntimeProfiler::HandleWakeUp(
Isolate* isolate) {
439 semaphore.Pointer()->Signal();
451 if (old_state != 0)
return false;
452 semaphore.Pointer()->Wait();
465 if (new_state == 0) {
468 semaphore.Pointer()->Signal();
473 if (new_state != 0) {
480 for (
int i = 0; i < kSamplerWindowSize; i++) {
481 Object*
function = sampler_window_[i];
482 if (
function !=
NULL &&
484 sampler_window_[i] =
NULL;
491 for (
int i = 0; i < kSamplerWindowSize; i++) {
492 visitor->VisitPointer(&sampler_window_[i]);
497 bool RuntimeProfilerRateLimiter::SuspendIfNecessary() {
void TryReenableOptimization()
Code * builtin(Name name)
int allow_osr_at_loop_nesting_level()
Object * function() const
RuntimeProfiler(Isolate *isolate)
void PrintF(const char *format,...)
bool InNewSpace(Object *object)
static TypeFeedbackInfo * cast(Object *obj)
static bool UseCrankshaft()
static HeapObject * cast(Object *obj)
Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr, Atomic32 old_value, Atomic32 new_value)
#define ASSERT(condition)
void EnsureTickerStarted()
void set_profiler_ticks(int ticks)
static MarkBit MarkBitFrom(Address addr)
StackGuard * stack_guard()
void RequestRuntimeProfilerTick()
static void StopRuntimeProfilerThreadBeforeShutdown(Thread *thread)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static bool WaitForSomeIsolateToEnterJS()
Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr, Atomic32 increment)
void UpdateSamplesAfterCompact(ObjectVisitor *visitor)
static void PatchStackCheckCode(Code *unoptimized_code, Code *check_code, Code *replacement_code)
activate correct semantics for inheriting readonliness false
static void GlobalSetUp()
static const int kMaxLoopNestingMarker
Atomic32 NoBarrier_Load(volatile const Atomic32 *ptr)
JavaScriptFrameIteratorTemp< StackFrameIterator > JavaScriptFrameIterator
bool DebuggerHasBreakPoints()
int ic_with_type_info_count()
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
void AttemptOnStackReplacement(JSFunction *function)
static bool IsSomeIsolateInJS()
void set_allow_osr_at_loop_nesting_level(int level)
void UpdateSamplesAfterScavenge()
static JSFunction * cast(Object *obj)