48 static const int kSamplerFrameCount = 2;
51 static const int kSamplerFrameWeight[kSamplerFrameCount] = { 2, 1 };
53 static const int kSamplerTicksBetweenThresholdAdjustment = 32;
55 static const int kSamplerThresholdInit = 3;
56 static const int kSamplerThresholdMin = 1;
57 static const int kSamplerThresholdDelta = 1;
59 static const int kSamplerThresholdSizeFactorInit = 3;
61 static const int kSizeLimit = 1500;
67 static const int kProfilerTicksBeforeOptimization = 2;
71 static const int kProfilerTicksBeforeReenablingOptimization = 250;
75 static const int kTicksWhenNotEnoughTypeInfo = 100;
78 STATIC_ASSERT(kProfilerTicksBeforeReenablingOptimization < 256);
84 static const int kMaxSizeEarlyOpt = 500;
87 Atomic32 RuntimeProfiler::state_ = 0;
93 bool RuntimeProfiler::has_been_globally_set_up_ =
false;
95 bool RuntimeProfiler::enabled_ =
false;
100 sampler_threshold_(kSamplerThresholdInit),
101 sampler_threshold_size_factor_(kSamplerThresholdSizeFactorInit),
102 sampler_ticks_until_threshold_adjustment_(
103 kSamplerTicksBetweenThresholdAdjustment),
104 sampler_window_position_(0),
105 any_ic_changed_(
false),
106 code_generated_(
false) {
112 ASSERT(!has_been_globally_set_up_);
115 has_been_globally_set_up_ =
true;
121 int* ic_with_type_info_count,
125 *ic_with_type_info_count = 0;
127 function->shared()->code()->type_feedback_info();
128 if (raw_info->IsTypeFeedbackInfo()) {
133 *percentage = *ic_total_count > 0
134 ? 100 * *ic_with_type_info_count / *ic_total_count
139 void RuntimeProfiler::Optimize(JSFunction*
function,
const char* reason) {
140 ASSERT(function->IsOptimizable());
141 if (FLAG_trace_opt) {
143 function->PrintName();
144 PrintF(
" 0x%" V8PRIxPTR, reinterpret_cast<intptr_t>(function->address()));
145 PrintF(
" for recompilation, reason: %s", reason);
146 if (FLAG_type_info_threshold > 0) {
147 int typeinfo, total, percentage;
148 GetICCounts(
function, &typeinfo, &total, &percentage);
149 PrintF(
", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total, percentage);
155 function->MarkForLazyRecompilation();
162 ASSERT(function->IsMarkedForLazyRecompilation());
165 function->IsBuiltin()) {
171 if (!shared->code()->optimizable())
return;
176 if (shared->uses_arguments())
return;
181 if (FLAG_trace_osr) {
182 PrintF(
"[patching stack checks in ");
183 function->PrintName();
184 PrintF(
" for on-stack replacement]\n");
189 bool found_code =
false;
191 if (FLAG_count_based_interrupts) {
193 found_code = interrupt_stub.FindCodeInCache(&stack_check_code);
197 found_code = check_stub.FindCodeInCache(&stack_check_code);
200 Code* replacement_code =
202 Code* unoptimized_code = shared->code();
210 void RuntimeProfiler::ClearSampleBuffer() {
211 memset(sampler_window_, 0,
sizeof(sampler_window_));
212 memset(sampler_window_weight_, 0,
sizeof(sampler_window_weight_));
216 int RuntimeProfiler::LookupSample(JSFunction*
function) {
218 for (
int i = 0; i < kSamplerWindowSize; i++) {
220 if (sample !=
NULL) {
221 if (
function == sample) {
222 weight += sampler_window_weight_[i];
230 void RuntimeProfiler::AddSample(JSFunction*
function,
int weight) {
232 sampler_window_[sampler_window_position_] =
function;
233 sampler_window_weight_[sampler_window_position_] = weight;
234 sampler_window_position_ = (sampler_window_position_ + 1) &
235 (kSamplerWindowSize - 1);
246 int sample_count = 0;
248 int frame_count_limit = FLAG_watch_ic_patching ? FLAG_frame_count
249 : kSamplerFrameCount;
251 frame_count++ < frame_count_limit && !it.done();
256 if (!FLAG_watch_ic_patching) {
259 if (sampler_ticks_until_threshold_adjustment_ > 0) {
260 sampler_ticks_until_threshold_adjustment_--;
261 if (sampler_ticks_until_threshold_adjustment_ <= 0) {
264 if (sampler_threshold_ > kSamplerThresholdMin) {
265 sampler_threshold_ -= kSamplerThresholdDelta;
266 sampler_ticks_until_threshold_adjustment_ =
267 kSamplerTicksBetweenThresholdAdjustment;
274 Code* shared_code = shared->code();
278 if (function->IsMarkedForLazyRecompilation()) {
288 const int kMaxToplevelSourceSize = 10 * 1024;
289 if (shared->is_toplevel() &&
290 (frame_count > 1 || shared->
SourceSize() > kMaxToplevelSourceSize)) {
295 if (shared->optimization_disabled()) {
300 if (ticks >= kProfilerTicksBeforeReenablingOptimization) {
309 if (!function->IsOptimizable())
continue;
313 if (FLAG_watch_ic_patching) {
316 if (ticks >= kProfilerTicksBeforeOptimization) {
317 int typeinfo, total, percentage;
318 GetICCounts(
function, &typeinfo, &total, &percentage);
319 if (percentage >= FLAG_type_info_threshold) {
322 Optimize(
function,
"hot and stable");
323 }
else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
324 Optimize(
function,
"not much type info but very hot");
327 if (FLAG_trace_opt_verbose) {
328 PrintF(
"[not yet optimizing ");
329 function->PrintName();
330 PrintF(
", not enough type info: %d/%d (%d%%)]\n",
331 typeinfo, total, percentage);
334 }
else if (!any_ic_changed_ &&
338 Optimize(
function,
"small function");
343 samples[sample_count++] =
function;
345 int function_size =
function->shared()->SourceSize();
346 int threshold_size_factor = (function_size > kSizeLimit)
347 ? sampler_threshold_size_factor_
350 int threshold = sampler_threshold_ * threshold_size_factor;
352 if (LookupSample(
function) >= threshold) {
353 Optimize(
function,
"sampler window lookup");
357 if (FLAG_watch_ic_patching) {
358 any_ic_changed_ =
false;
363 for (
int i = 0; i < sample_count; i++) {
364 AddSample(samples[i], kSamplerFrameWeight[i]);
371 if (FLAG_count_based_interrupts)
return;
377 ASSERT(has_been_globally_set_up_);
378 if (!FLAG_watch_ic_patching) {
388 if (!FLAG_watch_ic_patching) {
389 sampler_threshold_ = kSamplerThresholdInit;
390 sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit;
391 sampler_ticks_until_threshold_adjustment_ =
392 kSamplerTicksBetweenThresholdAdjustment;
403 return kSamplerWindowSize;
409 for (
int i = 0; i < kSamplerWindowSize; i++) {
410 Object*
function = sampler_window_[i];
413 if (map_word.IsForwardingAddress()) {
414 sampler_window_[i] = map_word.ToForwardingAddress();
416 sampler_window_[i] =
NULL;
423 void RuntimeProfiler::HandleWakeUp(
Isolate* isolate) {
430 semaphore.Pointer()->Signal();
442 if (old_state != 0)
return false;
443 semaphore.Pointer()->Wait();
456 if (new_state == 0) {
459 semaphore.Pointer()->Signal();
464 if (new_state != 0) {
471 for (
int i = 0; i < kSamplerWindowSize; i++) {
472 Object*
function = sampler_window_[i];
473 if (
function !=
NULL &&
475 sampler_window_[i] =
NULL;
482 for (
int i = 0; i < kSamplerWindowSize; i++) {
483 visitor->VisitPointer(&sampler_window_[i]);
488 bool RuntimeProfilerRateLimiter::SuspendIfNecessary() {
void TryReenableOptimization()
Code * builtin(Name name)
static const int kDefaultMaxOptCount
int allow_osr_at_loop_nesting_level()
Object * function() const
RuntimeProfiler(Isolate *isolate)
void PrintF(const char *format,...)
bool InNewSpace(Object *object)
static TypeFeedbackInfo * cast(Object *obj)
static bool UseCrankshaft()
static HeapObject * cast(Object *obj)
Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr, Atomic32 old_value, Atomic32 new_value)
#define ASSERT(condition)
void EnsureTickerStarted()
void set_profiler_ticks(int ticks)
static MarkBit MarkBitFrom(Address addr)
StackGuard * stack_guard()
void RequestRuntimeProfilerTick()
static void StopRuntimeProfilerThreadBeforeShutdown(Thread *thread)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static bool WaitForSomeIsolateToEnterJS()
Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr, Atomic32 increment)
void UpdateSamplesAfterCompact(ObjectVisitor *visitor)
static void PatchStackCheckCode(Code *unoptimized_code, Code *check_code, Code *replacement_code)
static void GlobalSetUp()
static const int kMaxLoopNestingMarker
Atomic32 NoBarrier_Load(volatile const Atomic32 *ptr)
JavaScriptFrameIteratorTemp< StackFrameIterator > JavaScriptFrameIterator
bool DebuggerHasBreakPoints()
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
int ic_with_type_info_count()
void AttemptOnStackReplacement(JSFunction *function)
static bool IsSomeIsolateInJS()
void set_allow_osr_at_loop_nesting_level(int level)
void UpdateSamplesAfterScavenge()
static JSFunction * cast(Object *obj)