v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
runtime-profiler.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "runtime-profiler.h"
31 
32 #include "assembler.h"
33 #include "bootstrapper.h"
34 #include "code-stubs.h"
35 #include "compilation-cache.h"
36 #include "execution.h"
37 #include "full-codegen.h"
38 #include "global-handles.h"
39 #include "isolate-inl.h"
40 #include "mark-compact.h"
41 #include "platform.h"
42 #include "scopeinfo.h"
43 
44 namespace v8 {
45 namespace internal {
46 
47 
48 // Number of times a function has to be seen on the stack before it is
49 // optimized.
50 static const int kProfilerTicksBeforeOptimization = 2;
51 // If the function optimization was disabled due to high deoptimization count,
52 // but the function is hot and has been seen on the stack this number of times,
53 // then we try to reenable optimization for this function.
54 static const int kProfilerTicksBeforeReenablingOptimization = 250;
55 // If a function does not have enough type info (according to
56 // FLAG_type_info_threshold), but has seen a huge number of ticks,
57 // optimize it as it is.
58 static const int kTicksWhenNotEnoughTypeInfo = 100;
59 // We only have one byte to store the number of ticks.
60 STATIC_ASSERT(kProfilerTicksBeforeOptimization < 256);
61 STATIC_ASSERT(kProfilerTicksBeforeReenablingOptimization < 256);
62 STATIC_ASSERT(kTicksWhenNotEnoughTypeInfo < 256);
63 
64 // Maximum size in bytes of generate code for a function to allow OSR.
65 static const int kOSRCodeSizeAllowanceBase =
66  100 * FullCodeGenerator::kCodeSizeMultiplier;
67 
68 static const int kOSRCodeSizeAllowancePerTick =
69  4 * FullCodeGenerator::kCodeSizeMultiplier;
70 
71 // Maximum size in bytes of generated code for a function to be optimized
72 // the very first time it is seen on the stack.
73 static const int kMaxSizeEarlyOpt =
74  5 * FullCodeGenerator::kCodeSizeMultiplier;
75 
76 
78  : isolate_(isolate),
79  any_ic_changed_(false) {
80 }
81 
82 
83 static void GetICCounts(Code* shared_code,
84  int* ic_with_type_info_count,
85  int* ic_total_count,
86  int* percentage) {
87  *ic_total_count = 0;
88  *ic_with_type_info_count = 0;
89  Object* raw_info = shared_code->type_feedback_info();
90  if (raw_info->IsTypeFeedbackInfo()) {
92  *ic_with_type_info_count = info->ic_with_type_info_count();
93  *ic_total_count = info->ic_total_count();
94  }
95  *percentage = *ic_total_count > 0
96  ? 100 * *ic_with_type_info_count / *ic_total_count
97  : 100;
98 }
99 
100 
101 void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
102  ASSERT(function->IsOptimizable());
103 
104  if (FLAG_trace_opt && function->PassesFilter(FLAG_hydrogen_filter)) {
105  PrintF("[marking ");
106  function->ShortPrint();
107  PrintF(" for recompilation, reason: %s", reason);
108  if (FLAG_type_info_threshold > 0) {
109  int typeinfo, total, percentage;
110  GetICCounts(function->shared()->code(), &typeinfo, &total, &percentage);
111  PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total, percentage);
112  }
113  PrintF("]\n");
114  }
115 
116 
117  if (isolate_->concurrent_recompilation_enabled() &&
118  !isolate_->bootstrapper()->IsActive()) {
119  if (isolate_->concurrent_osr_enabled() &&
120  isolate_->optimizing_compiler_thread()->IsQueuedForOSR(function)) {
121  // Do not attempt regular recompilation if we already queued this for OSR.
122  // TODO(yangguo): This is necessary so that we don't install optimized
123  // code on a function that is already optimized, since OSR and regular
124  // recompilation race. This goes away as soon as OSR becomes one-shot.
125  return;
126  }
127  ASSERT(!function->IsInOptimizationQueue());
128  function->MarkForConcurrentOptimization();
129  } else {
130  // The next call to the function will trigger optimization.
131  function->MarkForOptimization();
132  }
133 }
134 
135 
137  // See AlwaysFullCompiler (in compiler.cc) comment on why we need
138  // Debug::has_break_points().
139  if (!FLAG_use_osr ||
140  isolate_->DebuggerHasBreakPoints() ||
141  function->IsBuiltin()) {
142  return;
143  }
144 
145  SharedFunctionInfo* shared = function->shared();
146  // If the code is not optimizable, don't try OSR.
147  if (!shared->code()->optimizable()) return;
148 
149  // We are not prepared to do OSR for a function that already has an
150  // allocated arguments object. The optimized code would bypass it for
151  // arguments accesses, which is unsound. Don't try OSR.
152  if (shared->uses_arguments()) return;
153 
154  // We're using on-stack replacement: patch the unoptimized code so that
155  // any back edge in any unoptimized frame will trigger on-stack
156  // replacement for that frame.
157  if (FLAG_trace_osr) {
158  PrintF("[OSR - patching back edges in ");
159  function->PrintName();
160  PrintF("]\n");
161  }
162 
163  BackEdgeTable::Patch(isolate_, shared->code());
164 }
165 
166 
168  HandleScope scope(isolate_);
169 
170  if (isolate_->DebuggerHasBreakPoints()) return;
171 
173 
174  // Run through the JavaScript frames and collect them. If we already
175  // have a sample of the function, we mark it for optimizations
176  // (eagerly or lazily).
177  int frame_count = 0;
178  int frame_count_limit = FLAG_frame_count;
179  for (JavaScriptFrameIterator it(isolate_);
180  frame_count++ < frame_count_limit && !it.done();
181  it.Advance()) {
182  JavaScriptFrame* frame = it.frame();
183  JSFunction* function = frame->function();
184 
185  SharedFunctionInfo* shared = function->shared();
186  Code* shared_code = shared->code();
187 
188  if (shared_code->kind() != Code::FUNCTION) continue;
189  if (function->IsInOptimizationQueue()) continue;
190 
191  if (FLAG_always_osr &&
192  shared_code->allow_osr_at_loop_nesting_level() == 0) {
193  // Testing mode: always try an OSR compile for every function.
194  for (int i = 0; i < Code::kMaxLoopNestingMarker; i++) {
195  // TODO(titzer): fix AttemptOnStackReplacement to avoid this dumb loop.
196  shared_code->set_allow_osr_at_loop_nesting_level(i);
197  AttemptOnStackReplacement(function);
198  }
199  // Fall through and do a normal optimized compile as well.
200  } else if (!frame->is_optimized() &&
201  (function->IsMarkedForOptimization() ||
202  function->IsMarkedForConcurrentOptimization() ||
203  function->IsOptimized())) {
204  // Attempt OSR if we are still running unoptimized code even though the
205  // the function has long been marked or even already been optimized.
206  int ticks = shared_code->profiler_ticks();
207  int allowance = kOSRCodeSizeAllowanceBase +
208  ticks * kOSRCodeSizeAllowancePerTick;
209  if (shared_code->CodeSize() > allowance) {
210  if (ticks < 255) shared_code->set_profiler_ticks(ticks + 1);
211  } else {
212  int nesting = shared_code->allow_osr_at_loop_nesting_level();
213  if (nesting < Code::kMaxLoopNestingMarker) {
214  int new_nesting = nesting + 1;
215  shared_code->set_allow_osr_at_loop_nesting_level(new_nesting);
216  AttemptOnStackReplacement(function);
217  }
218  }
219  continue;
220  }
221 
222  // Only record top-level code on top of the execution stack and
223  // avoid optimizing excessively large scripts since top-level code
224  // will be executed only once.
225  const int kMaxToplevelSourceSize = 10 * 1024;
226  if (shared->is_toplevel() &&
227  (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) {
228  continue;
229  }
230 
231  // Do not record non-optimizable functions.
232  if (shared->optimization_disabled()) {
233  if (shared->deopt_count() >= FLAG_max_opt_count) {
234  // If optimization was disabled due to many deoptimizations,
235  // then check if the function is hot and try to reenable optimization.
236  int ticks = shared_code->profiler_ticks();
237  if (ticks >= kProfilerTicksBeforeReenablingOptimization) {
238  shared_code->set_profiler_ticks(0);
239  shared->TryReenableOptimization();
240  } else {
241  shared_code->set_profiler_ticks(ticks + 1);
242  }
243  }
244  continue;
245  }
246  if (!function->IsOptimizable()) continue;
247 
248  int ticks = shared_code->profiler_ticks();
249 
250  if (ticks >= kProfilerTicksBeforeOptimization) {
251  int typeinfo, total, percentage;
252  GetICCounts(shared_code, &typeinfo, &total, &percentage);
253  if (percentage >= FLAG_type_info_threshold) {
254  // If this particular function hasn't had any ICs patched for enough
255  // ticks, optimize it now.
256  Optimize(function, "hot and stable");
257  } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
258  Optimize(function, "not much type info but very hot");
259  } else {
260  shared_code->set_profiler_ticks(ticks + 1);
261  if (FLAG_trace_opt_verbose) {
262  PrintF("[not yet optimizing ");
263  function->PrintName();
264  PrintF(", not enough type info: %d/%d (%d%%)]\n",
265  typeinfo, total, percentage);
266  }
267  }
268  } else if (!any_ic_changed_ &&
269  shared_code->instruction_size() < kMaxSizeEarlyOpt) {
270  // If no IC was patched since the last tick and this function is very
271  // small, optimistically optimize it now.
272  Optimize(function, "small function");
273  } else {
274  shared_code->set_profiler_ticks(ticks + 1);
275  }
276  }
277  any_ic_changed_ = false;
278 }
279 
280 
281 } } // namespace v8::internal
Object * type_feedback_info()
Definition: objects-inl.h:5819
static void Patch(Isolate *isolate, Code *unoptimized_code)
int allow_osr_at_loop_nesting_level()
Definition: objects-inl.h:4442
void PrintF(const char *format,...)
Definition: v8utils.cc:40
static TypeFeedbackInfo * cast(Object *obj)
bool concurrent_osr_enabled() const
Definition: isolate.h:1069
Bootstrapper * bootstrapper()
Definition: isolate.h:858
#define ASSERT(condition)
Definition: checks.h:329
void set_profiler_ticks(int ticks)
Definition: objects-inl.h:4461
bool concurrent_recompilation_enabled()
Definition: isolate.h:1062
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kMaxLoopNestingMarker
Definition: objects.h:5578
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
bool DebuggerHasBreakPoints()
Definition: isolate-inl.h:66
OptimizingCompilerThread * optimizing_compiler_thread()
Definition: isolate.h:1076
void AttemptOnStackReplacement(JSFunction *function)
void set_allow_osr_at_loop_nesting_level(int level)
Definition: objects-inl.h:4448
JSFunction * function() const
Definition: frames-inl.h:284
bool IsQueuedForOSR(Handle< JSFunction > function, BailoutId osr_ast_id)