v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
log.cc
Go to the documentation of this file.
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include <stdarg.h>
29 
30 #include "v8.h"
31 
32 #include "bootstrapper.h"
33 #include "code-stubs.h"
34 #include "deoptimizer.h"
35 #include "global-handles.h"
36 #include "log.h"
37 #include "macro-assembler.h"
38 #include "platform.h"
39 #include "runtime-profiler.h"
40 #include "serialize.h"
41 #include "string-stream.h"
42 #include "vm-state-inl.h"
43 
44 namespace v8 {
45 namespace internal {
46 
47 //
48 // Sliding state window. Updates counters to keep track of the last
49 // window of kBufferSize states. This is useful to track where we
50 // spent our time.
51 //
53  public:
54  explicit SlidingStateWindow(Isolate* isolate);
56  void AddState(StateTag state);
57 
58  private:
59  static const int kBufferSize = 256;
60  Counters* counters_;
61  int current_index_;
62  bool is_full_;
63  byte buffer_[kBufferSize];
64 
65 
66  void IncrementStateCounter(StateTag state) {
67  counters_->state_counters(state)->Increment();
68  }
69 
70 
71  void DecrementStateCounter(StateTag state) {
72  counters_->state_counters(state)->Decrement();
73  }
74 };
75 
76 
77 //
78 // The Profiler samples pc and sp values for the main thread.
79 // Each sample is appended to a circular buffer.
80 // An independent thread removes data and writes it to the log.
81 // This design minimizes the time spent in the sampler.
82 //
83 class Profiler: public Thread {
84  public:
85  explicit Profiler(Isolate* isolate);
86  void Engage();
87  void Disengage();
88 
89  // Inserts collected profiling data into buffer.
91  if (paused_)
92  return;
93 
94  if (Succ(head_) == tail_) {
95  overflow_ = true;
96  } else {
97  buffer_[head_] = *sample;
98  head_ = Succ(head_);
99  buffer_semaphore_->Signal(); // Tell we have an element.
100  }
101  }
102 
103  // Waits for a signal and removes profiling data.
105  buffer_semaphore_->Wait(); // Wait for an element.
106  *sample = buffer_[tail_];
107  bool result = overflow_;
108  tail_ = Succ(tail_);
109  overflow_ = false;
110  return result;
111  }
112 
113  void Run();
114 
115  // Pause and Resume TickSample data collection.
116  bool paused() const { return paused_; }
117  void pause() { paused_ = true; }
118  void resume() { paused_ = false; }
119 
120  private:
121  // Returns the next index in the cyclic buffer.
122  int Succ(int index) { return (index + 1) % kBufferSize; }
123 
124  Isolate* isolate_;
125  // Cyclic buffer for communicating profiling samples
126  // between the signal handler and the worker thread.
127  static const int kBufferSize = 128;
128  TickSample buffer_[kBufferSize]; // Buffer storage.
129  int head_; // Index to the buffer head.
130  int tail_; // Index to the buffer tail.
131  bool overflow_; // Tell whether a buffer overflow has occurred.
132  Semaphore* buffer_semaphore_; // Sempahore used for buffer synchronization.
133 
134  // Tells whether profiler is engaged, that is, processing thread is stated.
135  bool engaged_;
136 
137  // Tells whether worker thread should continue running.
138  bool running_;
139 
140  // Tells whether we are currently recording tick samples.
141  bool paused_;
142 };
143 
144 
145 //
146 // StackTracer implementation
147 //
149  ASSERT(isolate->IsInitialized());
150 
151  // Avoid collecting traces while doing GC.
152  if (sample->state == GC) return;
153 
154  const Address js_entry_sp =
156  if (js_entry_sp == 0) {
157  // Not executing JS now.
158  return;
159  }
160 
161  const Address callback = isolate->external_callback();
162  if (callback != NULL) {
163  sample->external_callback = callback;
164  sample->has_external_callback = true;
165  } else {
166  // Sample potential return address value for frameless invocation of
167  // stubs (we'll figure out later, if this value makes sense).
168  sample->tos = Memory::Address_at(sample->sp);
169  sample->has_external_callback = false;
170  }
171 
172  SafeStackTraceFrameIterator it(isolate,
173  sample->fp, sample->sp,
174  sample->sp, js_entry_sp);
175  int i = 0;
176  while (!it.done() && i < TickSample::kMaxFramesCount) {
177  sample->stack[i++] = it.frame()->pc();
178  it.Advance();
179  }
180  sample->frames_count = i;
181 }
182 
183 
184 //
185 // Ticker used to provide ticks to the profiler and the sliding state
186 // window.
187 //
188 class Ticker: public Sampler {
189  public:
190  Ticker(Isolate* isolate, int interval):
191  Sampler(isolate, interval),
192  window_(NULL),
193  profiler_(NULL) {}
194 
195  ~Ticker() { if (IsActive()) Stop(); }
196 
197  virtual void Tick(TickSample* sample) {
198  if (profiler_) profiler_->Insert(sample);
199  if (window_) window_->AddState(sample->state);
200  }
201 
203  window_ = window;
204  if (!IsActive()) Start();
205  }
206 
207  void ClearWindow() {
208  window_ = NULL;
209  if (!profiler_ && IsActive() && !RuntimeProfiler::IsEnabled()) Stop();
210  }
211 
212  void SetProfiler(Profiler* profiler) {
213  ASSERT(profiler_ == NULL);
214  profiler_ = profiler;
216  if (!FLAG_prof_lazy && !IsActive()) Start();
217  }
218 
219  void ClearProfiler() {
221  profiler_ = NULL;
222  if (!window_ && IsActive() && !RuntimeProfiler::IsEnabled()) Stop();
223  }
224 
225  protected:
226  virtual void DoSampleStack(TickSample* sample) {
227  StackTracer::Trace(isolate(), sample);
228  }
229 
230  private:
231  SlidingStateWindow* window_;
232  Profiler* profiler_;
233 };
234 
235 
236 //
237 // SlidingStateWindow implementation.
238 //
240  : counters_(isolate->counters()), current_index_(0), is_full_(false) {
241  for (int i = 0; i < kBufferSize; i++) {
242  buffer_[i] = static_cast<byte>(OTHER);
243  }
244  isolate->logger()->ticker_->SetWindow(this);
245 }
246 
247 
249  LOGGER->ticker_->ClearWindow();
250 }
251 
252 
254  if (is_full_) {
255  DecrementStateCounter(static_cast<StateTag>(buffer_[current_index_]));
256  } else if (current_index_ == kBufferSize - 1) {
257  is_full_ = true;
258  }
259  buffer_[current_index_] = static_cast<byte>(state);
260  IncrementStateCounter(state);
261  ASSERT(IsPowerOf2(kBufferSize));
262  current_index_ = (current_index_ + 1) & (kBufferSize - 1);
263 }
264 
265 
266 //
267 // Profiler implementation.
268 //
270  : Thread("v8:Profiler"),
271  isolate_(isolate),
272  head_(0),
273  tail_(0),
274  overflow_(false),
275  buffer_semaphore_(OS::CreateSemaphore(0)),
276  engaged_(false),
277  running_(false),
278  paused_(false) {
279 }
280 
281 
283  if (engaged_) return;
284  engaged_ = true;
285 
286  // TODO(mnaganov): This is actually "Chromium" mode. Flags need to be revised.
287  // http://code.google.com/p/v8/issues/detail?id=487
288  if (!FLAG_prof_lazy) {
290  }
291 
292  // Start thread processing the profiler buffer.
293  running_ = true;
294  Start();
295 
296  // Register to get ticks.
297  LOGGER->ticker_->SetProfiler(this);
298 
299  LOGGER->ProfilerBeginEvent();
300 }
301 
302 
304  if (!engaged_) return;
305 
306  // Stop receiving ticks.
307  LOGGER->ticker_->ClearProfiler();
308 
309  // Terminate the worker thread by setting running_ to false,
310  // inserting a fake element in the queue and then wait for
311  // the thread to terminate.
312  running_ = false;
314  // Reset 'paused_' flag, otherwise semaphore may not be signalled.
315  resume();
316  Insert(&sample);
317  Join();
318 
319  LOG(ISOLATE, UncheckedStringEvent("profiler", "end"));
320 }
321 
322 
325  bool overflow = Remove(&sample);
326  while (running_) {
327  LOG(isolate_, TickEvent(&sample, overflow));
328  overflow = Remove(&sample);
329  }
330 }
331 
332 
333 // Low-level profiling event structures.
334 
336  static const char kTag = 'C';
337 
341 };
342 
343 
345  static const char kTag = 'M';
346 
349 };
350 
351 
353  static const char kTag = 'D';
354 
356 };
357 
358 
360  static const char kTag = 'P';
361 
364 };
365 
366 
367 static const char kCodeMovingGCTag = 'G';
368 
369 
370 //
371 // Logger class implementation.
372 //
373 
375  public:
376  NameMap() : impl_(&PointerEquals) {}
377 
379  for (HashMap::Entry* p = impl_.Start(); p != NULL; p = impl_.Next(p)) {
380  DeleteArray(static_cast<const char*>(p->value));
381  }
382  }
383 
384  void Insert(Address code_address, const char* name, int name_size) {
385  HashMap::Entry* entry = FindOrCreateEntry(code_address);
386  if (entry->value == NULL) {
387  entry->value = CopyName(name, name_size);
388  }
389  }
390 
391  const char* Lookup(Address code_address) {
392  HashMap::Entry* entry = FindEntry(code_address);
393  return (entry != NULL) ? static_cast<const char*>(entry->value) : NULL;
394  }
395 
396  void Remove(Address code_address) {
397  HashMap::Entry* entry = FindEntry(code_address);
398  if (entry != NULL) {
399  DeleteArray(static_cast<char*>(entry->value));
400  RemoveEntry(entry);
401  }
402  }
403 
404  void Move(Address from, Address to) {
405  if (from == to) return;
406  HashMap::Entry* from_entry = FindEntry(from);
407  ASSERT(from_entry != NULL);
408  void* value = from_entry->value;
409  RemoveEntry(from_entry);
410  HashMap::Entry* to_entry = FindOrCreateEntry(to);
411  ASSERT(to_entry->value == NULL);
412  to_entry->value = value;
413  }
414 
415  private:
416  static bool PointerEquals(void* lhs, void* rhs) {
417  return lhs == rhs;
418  }
419 
420  static char* CopyName(const char* name, int name_size) {
421  char* result = NewArray<char>(name_size + 1);
422  for (int i = 0; i < name_size; ++i) {
423  char c = name[i];
424  if (c == '\0') c = ' ';
425  result[i] = c;
426  }
427  result[name_size] = '\0';
428  return result;
429  }
430 
431  HashMap::Entry* FindOrCreateEntry(Address code_address) {
432  return impl_.Lookup(code_address, ComputePointerHash(code_address), true);
433  }
434 
435  HashMap::Entry* FindEntry(Address code_address) {
436  return impl_.Lookup(code_address, ComputePointerHash(code_address), false);
437  }
438 
439  void RemoveEntry(HashMap::Entry* entry) {
440  impl_.Remove(entry->key, entry->hash);
441  }
442 
443  HashMap impl_;
444 
445  DISALLOW_COPY_AND_ASSIGN(NameMap);
446 };
447 
448 
450  public:
451  NameBuffer() { Reset(); }
452 
453  void Reset() {
454  utf8_pos_ = 0;
455  }
456 
457  void AppendString(String* str) {
458  if (str == NULL) return;
459  if (str->HasOnlyAsciiChars()) {
460  int utf8_length = Min(str->length(), kUtf8BufferSize - utf8_pos_);
461  String::WriteToFlat(str, utf8_buffer_ + utf8_pos_, 0, utf8_length);
462  utf8_pos_ += utf8_length;
463  return;
464  }
465  int uc16_length = Min(str->length(), kUtf16BufferSize);
466  String::WriteToFlat(str, utf16_buffer, 0, uc16_length);
468  for (int i = 0; i < uc16_length && utf8_pos_ < kUtf8BufferSize; ++i) {
469  uc16 c = utf16_buffer[i];
470  if (c <= String::kMaxAsciiCharCodeU) {
471  utf8_buffer_[utf8_pos_++] = static_cast<char>(c);
472  } else {
473  int char_length = unibrow::Utf8::Length(c, previous);
474  if (utf8_pos_ + char_length > kUtf8BufferSize) break;
475  unibrow::Utf8::Encode(utf8_buffer_ + utf8_pos_, c, previous);
476  utf8_pos_ += char_length;
477  }
478  previous = c;
479  }
480  }
481 
482  void AppendBytes(const char* bytes, int size) {
483  size = Min(size, kUtf8BufferSize - utf8_pos_);
484  memcpy(utf8_buffer_ + utf8_pos_, bytes, size);
485  utf8_pos_ += size;
486  }
487 
488  void AppendBytes(const char* bytes) {
489  AppendBytes(bytes, StrLength(bytes));
490  }
491 
492  void AppendByte(char c) {
493  if (utf8_pos_ >= kUtf8BufferSize) return;
494  utf8_buffer_[utf8_pos_++] = c;
495  }
496 
497  void AppendInt(int n) {
498  Vector<char> buffer(utf8_buffer_ + utf8_pos_, kUtf8BufferSize - utf8_pos_);
499  int size = OS::SNPrintF(buffer, "%d", n);
500  if (size > 0 && utf8_pos_ + size <= kUtf8BufferSize) {
501  utf8_pos_ += size;
502  }
503  }
504 
505  const char* get() { return utf8_buffer_; }
506  int size() const { return utf8_pos_; }
507 
508  private:
509  static const int kUtf8BufferSize = 512;
510  static const int kUtf16BufferSize = 128;
511 
512  int utf8_pos_;
513  char utf8_buffer_[kUtf8BufferSize];
514  uc16 utf16_buffer[kUtf16BufferSize];
515 };
516 
517 
518 Logger::Logger()
519  : ticker_(NULL),
520  profiler_(NULL),
521  sliding_state_window_(NULL),
522  log_events_(NULL),
523  logging_nesting_(0),
524  cpu_profiler_nesting_(0),
525  log_(new Log(this)),
526  name_buffer_(new NameBuffer),
527  address_to_name_map_(NULL),
528  is_initialized_(false),
529  last_address_(NULL),
530  prev_sp_(NULL),
531  prev_function_(NULL),
532  prev_to_(NULL),
533  prev_code_(NULL) {
534 }
535 
536 
537 Logger::~Logger() {
538  delete address_to_name_map_;
539  delete name_buffer_;
540  delete log_;
541 }
542 
543 
544 #define DECLARE_EVENT(ignore1, name) name,
545 static const char* const kLogEventsNames[Logger::NUMBER_OF_LOG_EVENTS] = {
547 };
548 #undef DECLARE_EVENT
549 
550 
551 void Logger::ProfilerBeginEvent() {
552  if (!log_->IsEnabled()) return;
553  LogMessageBuilder msg(this);
554  msg.Append("profiler,\"begin\",%d\n", kSamplingIntervalMs);
555  msg.WriteToLogFile();
556 }
557 
558 
559 void Logger::StringEvent(const char* name, const char* value) {
560  if (FLAG_log) UncheckedStringEvent(name, value);
561 }
562 
563 
564 void Logger::UncheckedStringEvent(const char* name, const char* value) {
565  if (!log_->IsEnabled()) return;
566  LogMessageBuilder msg(this);
567  msg.Append("%s,\"%s\"\n", name, value);
568  msg.WriteToLogFile();
569 }
570 
571 
572 void Logger::IntEvent(const char* name, int value) {
573  if (FLAG_log) UncheckedIntEvent(name, value);
574 }
575 
576 
577 void Logger::IntPtrTEvent(const char* name, intptr_t value) {
578  if (FLAG_log) UncheckedIntPtrTEvent(name, value);
579 }
580 
581 
582 void Logger::UncheckedIntEvent(const char* name, int value) {
583  if (!log_->IsEnabled()) return;
584  LogMessageBuilder msg(this);
585  msg.Append("%s,%d\n", name, value);
586  msg.WriteToLogFile();
587 }
588 
589 
590 void Logger::UncheckedIntPtrTEvent(const char* name, intptr_t value) {
591  if (!log_->IsEnabled()) return;
592  LogMessageBuilder msg(this);
593  msg.Append("%s,%" V8_PTR_PREFIX "d\n", name, value);
594  msg.WriteToLogFile();
595 }
596 
597 
598 void Logger::HandleEvent(const char* name, Object** location) {
599  if (!log_->IsEnabled() || !FLAG_log_handles) return;
600  LogMessageBuilder msg(this);
601  msg.Append("%s,0x%" V8PRIxPTR "\n", name, location);
602  msg.WriteToLogFile();
603 }
604 
605 
606 // ApiEvent is private so all the calls come from the Logger class. It is the
607 // caller's responsibility to ensure that log is enabled and that
608 // FLAG_log_api is true.
609 void Logger::ApiEvent(const char* format, ...) {
610  ASSERT(log_->IsEnabled() && FLAG_log_api);
611  LogMessageBuilder msg(this);
612  va_list ap;
613  va_start(ap, format);
614  msg.AppendVA(format, ap);
615  va_end(ap);
616  msg.WriteToLogFile();
617 }
618 
619 
621  if (!log_->IsEnabled() || !FLAG_log_api) return;
622  if (key->IsString()) {
625  ApiEvent("api,check-security,\"%s\"\n", *str);
626  } else if (key->IsUndefined()) {
627  ApiEvent("api,check-security,undefined\n");
628  } else {
629  ApiEvent("api,check-security,['no-name']\n");
630  }
631 }
632 
633 
634 void Logger::SharedLibraryEvent(const char* library_path,
635  uintptr_t start,
636  uintptr_t end) {
637  if (!log_->IsEnabled() || !FLAG_prof) return;
638  LogMessageBuilder msg(this);
639  msg.Append("shared-library,\"%s\",0x%08" V8PRIxPTR ",0x%08" V8PRIxPTR "\n",
640  library_path,
641  start,
642  end);
643  msg.WriteToLogFile();
644 }
645 
646 
647 void Logger::SharedLibraryEvent(const wchar_t* library_path,
648  uintptr_t start,
649  uintptr_t end) {
650  if (!log_->IsEnabled() || !FLAG_prof) return;
651  LogMessageBuilder msg(this);
652  msg.Append("shared-library,\"%ls\",0x%08" V8PRIxPTR ",0x%08" V8PRIxPTR "\n",
653  library_path,
654  start,
655  end);
656  msg.WriteToLogFile();
657 }
658 
659 
660 void Logger::LogRegExpSource(Handle<JSRegExp> regexp) {
661  // Prints "/" + re.source + "/" +
662  // (re.global?"g":"") + (re.ignorecase?"i":"") + (re.multiline?"m":"")
663  LogMessageBuilder msg(this);
664 
665  Handle<Object> source = GetProperty(regexp, "source");
666  if (!source->IsString()) {
667  msg.Append("no source");
668  return;
669  }
670 
671  switch (regexp->TypeTag()) {
672  case JSRegExp::ATOM:
673  msg.Append('a');
674  break;
675  default:
676  break;
677  }
678  msg.Append('/');
679  msg.AppendDetailed(*Handle<String>::cast(source), false);
680  msg.Append('/');
681 
682  // global flag
683  Handle<Object> global = GetProperty(regexp, "global");
684  if (global->IsTrue()) {
685  msg.Append('g');
686  }
687  // ignorecase flag
688  Handle<Object> ignorecase = GetProperty(regexp, "ignoreCase");
689  if (ignorecase->IsTrue()) {
690  msg.Append('i');
691  }
692  // multiline flag
693  Handle<Object> multiline = GetProperty(regexp, "multiline");
694  if (multiline->IsTrue()) {
695  msg.Append('m');
696  }
697 
698  msg.WriteToLogFile();
699 }
700 
701 
702 void Logger::RegExpCompileEvent(Handle<JSRegExp> regexp, bool in_cache) {
703  if (!log_->IsEnabled() || !FLAG_log_regexp) return;
704  LogMessageBuilder msg(this);
705  msg.Append("regexp-compile,");
706  LogRegExpSource(regexp);
707  msg.Append(in_cache ? ",hit\n" : ",miss\n");
708  msg.WriteToLogFile();
709 }
710 
711 
713  if (!log_->IsEnabled() || !FLAG_log_runtime) return;
714  HandleScope scope;
715  LogMessageBuilder msg(this);
716  for (int i = 0; i < format.length(); i++) {
717  char c = format[i];
718  if (c == '%' && i <= format.length() - 2) {
719  i++;
720  ASSERT('0' <= format[i] && format[i] <= '9');
721  MaybeObject* maybe = args->GetElement(format[i] - '0');
722  Object* obj;
723  if (!maybe->ToObject(&obj)) {
724  msg.Append("<exception>");
725  continue;
726  }
727  i++;
728  switch (format[i]) {
729  case 's':
730  msg.AppendDetailed(String::cast(obj), false);
731  break;
732  case 'S':
733  msg.AppendDetailed(String::cast(obj), true);
734  break;
735  case 'r':
736  Logger::LogRegExpSource(Handle<JSRegExp>(JSRegExp::cast(obj)));
737  break;
738  case 'x':
739  msg.Append("0x%x", Smi::cast(obj)->value());
740  break;
741  case 'i':
742  msg.Append("%i", Smi::cast(obj)->value());
743  break;
744  default:
745  UNREACHABLE();
746  }
747  } else {
748  msg.Append(c);
749  }
750  }
751  msg.Append('\n');
752  msg.WriteToLogFile();
753 }
754 
755 
756 void Logger::ApiIndexedSecurityCheck(uint32_t index) {
757  if (!log_->IsEnabled() || !FLAG_log_api) return;
758  ApiEvent("api,check-security,%u\n", index);
759 }
760 
761 
762 void Logger::ApiNamedPropertyAccess(const char* tag,
763  JSObject* holder,
764  Object* name) {
765  ASSERT(name->IsString());
766  if (!log_->IsEnabled() || !FLAG_log_api) return;
767  String* class_name_obj = holder->class_name();
768  SmartArrayPointer<char> class_name =
770  SmartArrayPointer<char> property_name =
772  ApiEvent("api,%s,\"%s\",\"%s\"\n", tag, *class_name, *property_name);
773 }
774 
775 void Logger::ApiIndexedPropertyAccess(const char* tag,
776  JSObject* holder,
777  uint32_t index) {
778  if (!log_->IsEnabled() || !FLAG_log_api) return;
779  String* class_name_obj = holder->class_name();
780  SmartArrayPointer<char> class_name =
782  ApiEvent("api,%s,\"%s\",%u\n", tag, *class_name, index);
783 }
784 
785 void Logger::ApiObjectAccess(const char* tag, JSObject* object) {
786  if (!log_->IsEnabled() || !FLAG_log_api) return;
787  String* class_name_obj = object->class_name();
788  SmartArrayPointer<char> class_name =
790  ApiEvent("api,%s,\"%s\"\n", tag, *class_name);
791 }
792 
793 
794 void Logger::ApiEntryCall(const char* name) {
795  if (!log_->IsEnabled() || !FLAG_log_api) return;
796  ApiEvent("api,%s\n", name);
797 }
798 
799 
800 void Logger::NewEvent(const char* name, void* object, size_t size) {
801  if (!log_->IsEnabled() || !FLAG_log) return;
802  LogMessageBuilder msg(this);
803  msg.Append("new,%s,0x%" V8PRIxPTR ",%u\n", name, object,
804  static_cast<unsigned int>(size));
805  msg.WriteToLogFile();
806 }
807 
808 
809 void Logger::DeleteEvent(const char* name, void* object) {
810  if (!log_->IsEnabled() || !FLAG_log) return;
811  LogMessageBuilder msg(this);
812  msg.Append("delete,%s,0x%" V8PRIxPTR "\n", name, object);
813  msg.WriteToLogFile();
814 }
815 
816 
817 void Logger::NewEventStatic(const char* name, void* object, size_t size) {
818  LOGGER->NewEvent(name, object, size);
819 }
820 
821 
822 void Logger::DeleteEventStatic(const char* name, void* object) {
823  LOGGER->DeleteEvent(name, object);
824 }
825 
826 void Logger::CallbackEventInternal(const char* prefix, const char* name,
827  Address entry_point) {
828  if (!log_->IsEnabled() || !FLAG_log_code) return;
829  LogMessageBuilder msg(this);
830  msg.Append("%s,%s,",
831  kLogEventsNames[CODE_CREATION_EVENT],
832  kLogEventsNames[CALLBACK_TAG]);
833  msg.AppendAddress(entry_point);
834  msg.Append(",1,\"%s%s\"", prefix, name);
835  msg.Append('\n');
836  msg.WriteToLogFile();
837 }
838 
839 
840 void Logger::CallbackEvent(String* name, Address entry_point) {
841  if (!log_->IsEnabled() || !FLAG_log_code) return;
844  CallbackEventInternal("", *str, entry_point);
845 }
846 
847 
848 void Logger::GetterCallbackEvent(String* name, Address entry_point) {
849  if (!log_->IsEnabled() || !FLAG_log_code) return;
852  CallbackEventInternal("get ", *str, entry_point);
853 }
854 
855 
856 void Logger::SetterCallbackEvent(String* name, Address entry_point) {
857  if (!log_->IsEnabled() || !FLAG_log_code) return;
860  CallbackEventInternal("set ", *str, entry_point);
861 }
862 
863 
865  Code* code,
866  const char* comment) {
867  if (!log_->IsEnabled()) return;
868  if (FLAG_ll_prof || Serializer::enabled()) {
869  name_buffer_->Reset();
870  name_buffer_->AppendBytes(kLogEventsNames[tag]);
871  name_buffer_->AppendByte(':');
872  name_buffer_->AppendBytes(comment);
873  }
874  if (FLAG_ll_prof) {
875  LowLevelCodeCreateEvent(code, name_buffer_->get(), name_buffer_->size());
876  }
877  if (Serializer::enabled()) {
878  RegisterSnapshotCodeName(code, name_buffer_->get(), name_buffer_->size());
879  }
880  if (!FLAG_log_code) return;
881  LogMessageBuilder msg(this);
882  msg.Append("%s,%s,",
883  kLogEventsNames[CODE_CREATION_EVENT],
884  kLogEventsNames[tag]);
885  msg.AppendAddress(code->address());
886  msg.Append(",%d,\"", code->ExecutableSize());
887  for (const char* p = comment; *p != '\0'; p++) {
888  if (*p == '"') {
889  msg.Append('\\');
890  }
891  msg.Append(*p);
892  }
893  msg.Append('"');
894  msg.Append('\n');
895  msg.WriteToLogFile();
896 }
897 
898 
900  Code* code,
901  String* name) {
902  if (!log_->IsEnabled()) return;
903  if (FLAG_ll_prof || Serializer::enabled()) {
904  name_buffer_->Reset();
905  name_buffer_->AppendBytes(kLogEventsNames[tag]);
906  name_buffer_->AppendByte(':');
907  name_buffer_->AppendString(name);
908  }
909  if (FLAG_ll_prof) {
910  LowLevelCodeCreateEvent(code, name_buffer_->get(), name_buffer_->size());
911  }
912  if (Serializer::enabled()) {
913  RegisterSnapshotCodeName(code, name_buffer_->get(), name_buffer_->size());
914  }
915  if (!FLAG_log_code) return;
916  LogMessageBuilder msg(this);
917  msg.Append("%s,%s,",
918  kLogEventsNames[CODE_CREATION_EVENT],
919  kLogEventsNames[tag]);
920  msg.AppendAddress(code->address());
921  msg.Append(",%d,\"", code->ExecutableSize());
922  msg.AppendDetailed(name, false);
923  msg.Append('"');
924  msg.Append('\n');
925  msg.WriteToLogFile();
926 }
927 
928 
929 // ComputeMarker must only be used when SharedFunctionInfo is known.
930 static const char* ComputeMarker(Code* code) {
931  switch (code->kind()) {
932  case Code::FUNCTION: return code->optimizable() ? "~" : "";
933  case Code::OPTIMIZED_FUNCTION: return "*";
934  default: return "";
935  }
936 }
937 
938 
940  Code* code,
941  SharedFunctionInfo* shared,
942  String* name) {
943  if (!log_->IsEnabled()) return;
944  if (FLAG_ll_prof || Serializer::enabled()) {
945  name_buffer_->Reset();
946  name_buffer_->AppendBytes(kLogEventsNames[tag]);
947  name_buffer_->AppendByte(':');
948  name_buffer_->AppendBytes(ComputeMarker(code));
949  name_buffer_->AppendString(name);
950  }
951  if (FLAG_ll_prof) {
952  LowLevelCodeCreateEvent(code, name_buffer_->get(), name_buffer_->size());
953  }
954  if (Serializer::enabled()) {
955  RegisterSnapshotCodeName(code, name_buffer_->get(), name_buffer_->size());
956  }
957  if (!FLAG_log_code) return;
958  if (code == Isolate::Current()->builtins()->builtin(
959  Builtins::kLazyCompile))
960  return;
961 
962  LogMessageBuilder msg(this);
965  msg.Append("%s,%s,",
966  kLogEventsNames[CODE_CREATION_EVENT],
967  kLogEventsNames[tag]);
968  msg.AppendAddress(code->address());
969  msg.Append(",%d,\"%s\",", code->ExecutableSize(), *str);
970  msg.AppendAddress(shared->address());
971  msg.Append(",%s", ComputeMarker(code));
972  msg.Append('\n');
973  msg.WriteToLogFile();
974 }
975 
976 
977 // Although, it is possible to extract source and line from
978 // the SharedFunctionInfo object, we left it to caller
979 // to leave logging functions free from heap allocations.
981  Code* code,
982  SharedFunctionInfo* shared,
983  String* source, int line) {
984  if (!log_->IsEnabled()) return;
985  if (FLAG_ll_prof || Serializer::enabled()) {
986  name_buffer_->Reset();
987  name_buffer_->AppendBytes(kLogEventsNames[tag]);
988  name_buffer_->AppendByte(':');
989  name_buffer_->AppendBytes(ComputeMarker(code));
990  name_buffer_->AppendString(shared->DebugName());
991  name_buffer_->AppendByte(' ');
992  name_buffer_->AppendString(source);
993  name_buffer_->AppendByte(':');
994  name_buffer_->AppendInt(line);
995  }
996  if (FLAG_ll_prof) {
997  LowLevelCodeCreateEvent(code, name_buffer_->get(), name_buffer_->size());
998  }
999  if (Serializer::enabled()) {
1000  RegisterSnapshotCodeName(code, name_buffer_->get(), name_buffer_->size());
1001  }
1002  if (!FLAG_log_code) return;
1003  LogMessageBuilder msg(this);
1006  SmartArrayPointer<char> sourcestr =
1008  msg.Append("%s,%s,",
1009  kLogEventsNames[CODE_CREATION_EVENT],
1010  kLogEventsNames[tag]);
1011  msg.AppendAddress(code->address());
1012  msg.Append(",%d,\"%s %s:%d\",",
1013  code->ExecutableSize(),
1014  *name,
1015  *sourcestr,
1016  line);
1017  msg.AppendAddress(shared->address());
1018  msg.Append(",%s", ComputeMarker(code));
1019  msg.Append('\n');
1020  msg.WriteToLogFile();
1021 }
1022 
1023 
1024 void Logger::CodeCreateEvent(LogEventsAndTags tag, Code* code, int args_count) {
1025  if (!log_->IsEnabled()) return;
1026  if (FLAG_ll_prof || Serializer::enabled()) {
1027  name_buffer_->Reset();
1028  name_buffer_->AppendBytes(kLogEventsNames[tag]);
1029  name_buffer_->AppendByte(':');
1030  name_buffer_->AppendInt(args_count);
1031  }
1032  if (FLAG_ll_prof) {
1033  LowLevelCodeCreateEvent(code, name_buffer_->get(), name_buffer_->size());
1034  }
1035  if (Serializer::enabled()) {
1036  RegisterSnapshotCodeName(code, name_buffer_->get(), name_buffer_->size());
1037  }
1038  if (!FLAG_log_code) return;
1039  LogMessageBuilder msg(this);
1040  msg.Append("%s,%s,",
1041  kLogEventsNames[CODE_CREATION_EVENT],
1042  kLogEventsNames[tag]);
1043  msg.AppendAddress(code->address());
1044  msg.Append(",%d,\"args_count: %d\"", code->ExecutableSize(), args_count);
1045  msg.Append('\n');
1046  msg.WriteToLogFile();
1047 }
1048 
1049 
1051  if (!log_->IsEnabled() || !FLAG_ll_prof) return;
1052  LowLevelLogWriteBytes(&kCodeMovingGCTag, sizeof(kCodeMovingGCTag));
1054 }
1055 
1056 
1058  if (!log_->IsEnabled()) return;
1059  if (FLAG_ll_prof || Serializer::enabled()) {
1060  name_buffer_->Reset();
1061  name_buffer_->AppendBytes(kLogEventsNames[REG_EXP_TAG]);
1062  name_buffer_->AppendByte(':');
1063  name_buffer_->AppendString(source);
1064  }
1065  if (FLAG_ll_prof) {
1066  LowLevelCodeCreateEvent(code, name_buffer_->get(), name_buffer_->size());
1067  }
1068  if (Serializer::enabled()) {
1069  RegisterSnapshotCodeName(code, name_buffer_->get(), name_buffer_->size());
1070  }
1071  if (!FLAG_log_code) return;
1072  LogMessageBuilder msg(this);
1073  msg.Append("%s,%s,",
1074  kLogEventsNames[CODE_CREATION_EVENT],
1075  kLogEventsNames[REG_EXP_TAG]);
1076  msg.AppendAddress(code->address());
1077  msg.Append(",%d,\"", code->ExecutableSize());
1078  msg.AppendDetailed(source, false);
1079  msg.Append('\"');
1080  msg.Append('\n');
1081  msg.WriteToLogFile();
1082 }
1083 
1084 
1086  if (!log_->IsEnabled()) return;
1087  if (FLAG_ll_prof) LowLevelCodeMoveEvent(from, to);
1088  if (Serializer::enabled() && address_to_name_map_ != NULL) {
1089  address_to_name_map_->Move(from, to);
1090  }
1091  MoveEventInternal(CODE_MOVE_EVENT, from, to);
1092 }
1093 
1094 
1096  if (!log_->IsEnabled()) return;
1097  if (FLAG_ll_prof) LowLevelCodeDeleteEvent(from);
1098  if (Serializer::enabled() && address_to_name_map_ != NULL) {
1099  address_to_name_map_->Remove(from);
1100  }
1101  DeleteEventInternal(CODE_DELETE_EVENT, from);
1102 }
1103 
1104 
1106  if (!log_->IsEnabled()) return;
1107  if (FLAG_ll_prof) LowLevelSnapshotPositionEvent(addr, pos);
1108  if (Serializer::enabled() && address_to_name_map_ != NULL) {
1109  const char* code_name = address_to_name_map_->Lookup(addr);
1110  if (code_name == NULL) return; // Not a code object.
1111  LogMessageBuilder msg(this);
1112  msg.Append("%s,%d,\"", kLogEventsNames[SNAPSHOT_CODE_NAME_EVENT], pos);
1113  for (const char* p = code_name; *p != '\0'; ++p) {
1114  if (*p == '"') msg.Append('\\');
1115  msg.Append(*p);
1116  }
1117  msg.Append("\"\n");
1118  msg.WriteToLogFile();
1119  }
1120  if (!FLAG_log_snapshot_positions) return;
1121  LogMessageBuilder msg(this);
1122  msg.Append("%s,", kLogEventsNames[SNAPSHOT_POSITION_EVENT]);
1123  msg.AppendAddress(addr);
1124  msg.Append(",%d", pos);
1125  msg.Append('\n');
1126  msg.WriteToLogFile();
1127 }
1128 
1129 
1131  MoveEventInternal(SHARED_FUNC_MOVE_EVENT, from, to);
1132 }
1133 
1134 
1135 void Logger::MoveEventInternal(LogEventsAndTags event,
1136  Address from,
1137  Address to) {
1138  if (!log_->IsEnabled() || !FLAG_log_code) return;
1139  LogMessageBuilder msg(this);
1140  msg.Append("%s,", kLogEventsNames[event]);
1141  msg.AppendAddress(from);
1142  msg.Append(',');
1143  msg.AppendAddress(to);
1144  msg.Append('\n');
1145  msg.WriteToLogFile();
1146 }
1147 
1148 
1149 void Logger::DeleteEventInternal(LogEventsAndTags event, Address from) {
1150  if (!log_->IsEnabled() || !FLAG_log_code) return;
1151  LogMessageBuilder msg(this);
1152  msg.Append("%s,", kLogEventsNames[event]);
1153  msg.AppendAddress(from);
1154  msg.Append('\n');
1155  msg.WriteToLogFile();
1156 }
1157 
1158 
1159 void Logger::ResourceEvent(const char* name, const char* tag) {
1160  if (!log_->IsEnabled() || !FLAG_log) return;
1161  LogMessageBuilder msg(this);
1162  msg.Append("%s,%s,", name, tag);
1163 
1164  uint32_t sec, usec;
1165  if (OS::GetUserTime(&sec, &usec) != -1) {
1166  msg.Append("%d,%d,", sec, usec);
1167  }
1168  msg.Append("%.0f", OS::TimeCurrentMillis());
1169 
1170  msg.Append('\n');
1171  msg.WriteToLogFile();
1172 }
1173 
1174 
1176  if (!log_->IsEnabled() || !FLAG_log_suspect) return;
1177  LogMessageBuilder msg(this);
1178  String* class_name = obj->IsJSObject()
1179  ? JSObject::cast(obj)->class_name()
1180  : HEAP->empty_string();
1181  msg.Append("suspect-read,");
1182  msg.Append(class_name);
1183  msg.Append(',');
1184  msg.Append('"');
1185  msg.Append(name);
1186  msg.Append('"');
1187  msg.Append('\n');
1188  msg.WriteToLogFile();
1189 }
1190 
1191 
1192 void Logger::HeapSampleBeginEvent(const char* space, const char* kind) {
1193  if (!log_->IsEnabled() || !FLAG_log_gc) return;
1194  LogMessageBuilder msg(this);
1195  // Using non-relative system time in order to be able to synchronize with
1196  // external memory profiling events (e.g. DOM memory size).
1197  msg.Append("heap-sample-begin,\"%s\",\"%s\",%.0f\n",
1198  space, kind, OS::TimeCurrentMillis());
1199  msg.WriteToLogFile();
1200 }
1201 
1202 
1203 void Logger::HeapSampleEndEvent(const char* space, const char* kind) {
1204  if (!log_->IsEnabled() || !FLAG_log_gc) return;
1205  LogMessageBuilder msg(this);
1206  msg.Append("heap-sample-end,\"%s\",\"%s\"\n", space, kind);
1207  msg.WriteToLogFile();
1208 }
1209 
1210 
1211 void Logger::HeapSampleItemEvent(const char* type, int number, int bytes) {
1212  if (!log_->IsEnabled() || !FLAG_log_gc) return;
1213  LogMessageBuilder msg(this);
1214  msg.Append("heap-sample-item,%s,%d,%d\n", type, number, bytes);
1215  msg.WriteToLogFile();
1216 }
1217 
1218 
1219 void Logger::DebugTag(const char* call_site_tag) {
1220  if (!log_->IsEnabled() || !FLAG_log) return;
1221  LogMessageBuilder msg(this);
1222  msg.Append("debug-tag,%s\n", call_site_tag);
1223  msg.WriteToLogFile();
1224 }
1225 
1226 
1227 void Logger::DebugEvent(const char* event_type, Vector<uint16_t> parameter) {
1228  if (!log_->IsEnabled() || !FLAG_log) return;
1229  StringBuilder s(parameter.length() + 1);
1230  for (int i = 0; i < parameter.length(); ++i) {
1231  s.AddCharacter(static_cast<char>(parameter[i]));
1232  }
1233  char* parameter_string = s.Finalize();
1234  LogMessageBuilder msg(this);
1235  msg.Append("debug-queue-event,%s,%15.3f,%s\n",
1236  event_type,
1238  parameter_string);
1239  DeleteArray(parameter_string);
1240  msg.WriteToLogFile();
1241 }
1242 
1243 
1244 void Logger::TickEvent(TickSample* sample, bool overflow) {
1245  if (!log_->IsEnabled() || !FLAG_prof) return;
1246  LogMessageBuilder msg(this);
1247  msg.Append("%s,", kLogEventsNames[TICK_EVENT]);
1248  msg.AppendAddress(sample->pc);
1249  msg.Append(',');
1250  msg.AppendAddress(sample->sp);
1251  if (sample->has_external_callback) {
1252  msg.Append(",1,");
1253  msg.AppendAddress(sample->external_callback);
1254  } else {
1255  msg.Append(",0,");
1256  msg.AppendAddress(sample->tos);
1257  }
1258  msg.Append(",%d", static_cast<int>(sample->state));
1259  if (overflow) {
1260  msg.Append(",overflow");
1261  }
1262  for (int i = 0; i < sample->frames_count; ++i) {
1263  msg.Append(',');
1264  msg.AppendAddress(sample->stack[i]);
1265  }
1266  msg.Append('\n');
1267  msg.WriteToLogFile();
1268 }
1269 
1270 
1272  return profiler_ == NULL || profiler_->paused();
1273 }
1274 
1275 
1277  if (!log_->IsEnabled()) return;
1278  if (profiler_ != NULL) {
1279  // It is OK to have negative nesting.
1280  if (--cpu_profiler_nesting_ == 0) {
1281  profiler_->pause();
1282  if (FLAG_prof_lazy) {
1283  if (!FLAG_sliding_state_window && !RuntimeProfiler::IsEnabled()) {
1284  ticker_->Stop();
1285  }
1286  FLAG_log_code = false;
1287  LOG(ISOLATE, UncheckedStringEvent("profiler", "pause"));
1288  }
1289  --logging_nesting_;
1290  }
1291  }
1292 }
1293 
1294 
1296  if (!log_->IsEnabled()) return;
1297  if (profiler_ != NULL) {
1298  if (cpu_profiler_nesting_++ == 0) {
1299  ++logging_nesting_;
1300  if (FLAG_prof_lazy) {
1301  profiler_->Engage();
1302  LOG(ISOLATE, UncheckedStringEvent("profiler", "resume"));
1303  FLAG_log_code = true;
1306  if (!FLAG_sliding_state_window && !ticker_->IsActive()) {
1307  ticker_->Start();
1308  }
1309  }
1310  profiler_->resume();
1311  }
1312  }
1313 }
1314 
1315 
1316 // This function can be called when Log's mutex is acquired,
1317 // either from main or Profiler's thread.
1319  PauseProfiler();
1320 }
1321 
1322 
1323 bool Logger::IsProfilerSamplerActive() {
1324  return ticker_->IsActive();
1325 }
1326 
1327 
1328 class EnumerateOptimizedFunctionsVisitor: public OptimizedFunctionVisitor {
1329  public:
1331  Handle<Code>* code_objects,
1332  int* count)
1333  : sfis_(sfis), code_objects_(code_objects), count_(count) { }
1334 
1335  virtual void EnterContext(Context* context) {}
1336  virtual void LeaveContext(Context* context) {}
1337 
1338  virtual void VisitFunction(JSFunction* function) {
1339  SharedFunctionInfo* sfi = SharedFunctionInfo::cast(function->shared());
1340  Object* maybe_script = sfi->script();
1341  if (maybe_script->IsScript()
1342  && !Script::cast(maybe_script)->HasValidSource()) return;
1343  if (sfis_ != NULL) {
1344  sfis_[*count_] = Handle<SharedFunctionInfo>(sfi);
1345  }
1346  if (code_objects_ != NULL) {
1347  ASSERT(function->code()->kind() == Code::OPTIMIZED_FUNCTION);
1348  code_objects_[*count_] = Handle<Code>(function->code());
1349  }
1350  *count_ = *count_ + 1;
1351  }
1352 
1353  private:
1355  Handle<Code>* code_objects_;
1356  int* count_;
1357 };
1358 
1359 
1360 static int EnumerateCompiledFunctions(Handle<SharedFunctionInfo>* sfis,
1361  Handle<Code>* code_objects) {
1362  HeapIterator iterator;
1363  AssertNoAllocation no_alloc;
1364  int compiled_funcs_count = 0;
1365 
1366  // Iterate the heap to find shared function info objects and record
1367  // the unoptimized code for them.
1368  for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1369  if (!obj->IsSharedFunctionInfo()) continue;
1370  SharedFunctionInfo* sfi = SharedFunctionInfo::cast(obj);
1371  if (sfi->is_compiled()
1372  && (!sfi->script()->IsScript()
1373  || Script::cast(sfi->script())->HasValidSource())) {
1374  if (sfis != NULL) {
1375  sfis[compiled_funcs_count] = Handle<SharedFunctionInfo>(sfi);
1376  }
1377  if (code_objects != NULL) {
1378  code_objects[compiled_funcs_count] = Handle<Code>(sfi->code());
1379  }
1380  ++compiled_funcs_count;
1381  }
1382  }
1383 
1384  // Iterate all optimized functions in all contexts.
1385  EnumerateOptimizedFunctionsVisitor visitor(sfis,
1386  code_objects,
1387  &compiled_funcs_count);
1389 
1390  return compiled_funcs_count;
1391 }
1392 
1393 
1394 void Logger::LogCodeObject(Object* object) {
1395  if (FLAG_log_code || FLAG_ll_prof) {
1396  Code* code_object = Code::cast(object);
1397  LogEventsAndTags tag = Logger::STUB_TAG;
1398  const char* description = "Unknown code from the snapshot";
1399  switch (code_object->kind()) {
1400  case Code::FUNCTION:
1402  return; // We log this later using LogCompiledFunctions.
1403  case Code::UNARY_OP_IC: // fall through
1404  case Code::BINARY_OP_IC: // fall through
1405  case Code::COMPARE_IC: // fall through
1406  case Code::TO_BOOLEAN_IC: // fall through
1407  case Code::STUB:
1408  description =
1409  CodeStub::MajorName(CodeStub::GetMajorKey(code_object), true);
1410  if (description == NULL)
1411  description = "A stub from the snapshot";
1412  tag = Logger::STUB_TAG;
1413  break;
1414  case Code::BUILTIN:
1415  description = "A builtin from the snapshot";
1416  tag = Logger::BUILTIN_TAG;
1417  break;
1418  case Code::KEYED_LOAD_IC:
1419  description = "A keyed load IC from the snapshot";
1420  tag = Logger::KEYED_LOAD_IC_TAG;
1421  break;
1422  case Code::LOAD_IC:
1423  description = "A load IC from the snapshot";
1424  tag = Logger::LOAD_IC_TAG;
1425  break;
1426  case Code::STORE_IC:
1427  description = "A store IC from the snapshot";
1428  tag = Logger::STORE_IC_TAG;
1429  break;
1430  case Code::KEYED_STORE_IC:
1431  description = "A keyed store IC from the snapshot";
1432  tag = Logger::KEYED_STORE_IC_TAG;
1433  break;
1434  case Code::CALL_IC:
1435  description = "A call IC from the snapshot";
1436  tag = Logger::CALL_IC_TAG;
1437  break;
1438  case Code::KEYED_CALL_IC:
1439  description = "A keyed call IC from the snapshot";
1440  tag = Logger::KEYED_CALL_IC_TAG;
1441  break;
1442  }
1443  PROFILE(ISOLATE, CodeCreateEvent(tag, code_object, description));
1444  }
1445 }
1446 
1447 
1448 void Logger::LogCodeInfo() {
1449  if (!log_->IsEnabled() || !FLAG_ll_prof) return;
1450 #if V8_TARGET_ARCH_IA32
1451  const char arch[] = "ia32";
1452 #elif V8_TARGET_ARCH_X64
1453  const char arch[] = "x64";
1454 #elif V8_TARGET_ARCH_ARM
1455  const char arch[] = "arm";
1456 #elif V8_TARGET_ARCH_MIPS
1457  const char arch[] = "mips";
1458 #else
1459  const char arch[] = "unknown";
1460 #endif
1461  LowLevelLogWriteBytes(arch, sizeof(arch));
1462 }
1463 
1464 
1465 void Logger::RegisterSnapshotCodeName(Code* code,
1466  const char* name,
1467  int name_size) {
1469  if (address_to_name_map_ == NULL) {
1470  address_to_name_map_ = new NameMap;
1471  }
1472  address_to_name_map_->Insert(code->address(), name, name_size);
1473 }
1474 
1475 
1476 void Logger::LowLevelCodeCreateEvent(Code* code,
1477  const char* name,
1478  int name_size) {
1479  if (log_->ll_output_handle_ == NULL) return;
1480  LowLevelCodeCreateStruct event;
1481  event.name_size = name_size;
1482  event.code_address = code->instruction_start();
1483  ASSERT(event.code_address == code->address() + Code::kHeaderSize);
1484  event.code_size = code->instruction_size();
1485  LowLevelLogWriteStruct(event);
1486  LowLevelLogWriteBytes(name, name_size);
1487  LowLevelLogWriteBytes(
1488  reinterpret_cast<const char*>(code->instruction_start()),
1489  code->instruction_size());
1490 }
1491 
1492 
1493 void Logger::LowLevelCodeMoveEvent(Address from, Address to) {
1494  if (log_->ll_output_handle_ == NULL) return;
1495  LowLevelCodeMoveStruct event;
1496  event.from_address = from + Code::kHeaderSize;
1497  event.to_address = to + Code::kHeaderSize;
1498  LowLevelLogWriteStruct(event);
1499 }
1500 
1501 
1502 void Logger::LowLevelCodeDeleteEvent(Address from) {
1503  if (log_->ll_output_handle_ == NULL) return;
1504  LowLevelCodeDeleteStruct event;
1505  event.address = from + Code::kHeaderSize;
1506  LowLevelLogWriteStruct(event);
1507 }
1508 
1509 
1510 void Logger::LowLevelSnapshotPositionEvent(Address addr, int pos) {
1511  if (log_->ll_output_handle_ == NULL) return;
1512  LowLevelSnapshotPositionStruct event;
1513  event.address = addr + Code::kHeaderSize;
1514  event.position = pos;
1515  LowLevelLogWriteStruct(event);
1516 }
1517 
1518 
1519 void Logger::LowLevelLogWriteBytes(const char* bytes, int size) {
1520  size_t rv = fwrite(bytes, 1, size, log_->ll_output_handle_);
1521  ASSERT(static_cast<size_t>(size) == rv);
1522  USE(rv);
1523 }
1524 
1525 
1527  HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
1528  "Logger::LogCodeObjects");
1529  HeapIterator iterator;
1530  AssertNoAllocation no_alloc;
1531  for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1532  if (obj->IsCode()) LogCodeObject(obj);
1533  }
1534 }
1535 
1536 
1538  Handle<Code> code) {
1539  Handle<String> func_name(shared->DebugName());
1540  if (shared->script()->IsScript()) {
1541  Handle<Script> script(Script::cast(shared->script()));
1542  if (script->name()->IsString()) {
1543  Handle<String> script_name(String::cast(script->name()));
1544  int line_num = GetScriptLineNumber(script, shared->start_position());
1545  if (line_num > 0) {
1546  PROFILE(ISOLATE,
1548  Logger::ToNativeByScript(Logger::LAZY_COMPILE_TAG, *script),
1549  *code, *shared,
1550  *script_name, line_num + 1));
1551  } else {
1552  // Can't distinguish eval and script here, so always use Script.
1553  PROFILE(ISOLATE,
1555  Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script),
1556  *code, *shared, *script_name));
1557  }
1558  } else {
1559  PROFILE(ISOLATE,
1561  Logger::ToNativeByScript(Logger::LAZY_COMPILE_TAG, *script),
1562  *code, *shared, *func_name));
1563  }
1564  } else if (shared->IsApiFunction()) {
1565  // API function.
1566  FunctionTemplateInfo* fun_data = shared->get_api_func_data();
1567  Object* raw_call_data = fun_data->call_code();
1568  if (!raw_call_data->IsUndefined()) {
1569  CallHandlerInfo* call_data = CallHandlerInfo::cast(raw_call_data);
1570  Object* callback_obj = call_data->callback();
1571  Address entry_point = v8::ToCData<Address>(callback_obj);
1572  PROFILE(ISOLATE, CallbackEvent(*func_name, entry_point));
1573  }
1574  } else {
1575  PROFILE(ISOLATE,
1577  Logger::LAZY_COMPILE_TAG, *code, *shared, *func_name));
1578  }
1579 }
1580 
1581 
1583  HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
1584  "Logger::LogCompiledFunctions");
1585  HandleScope scope;
1586  const int compiled_funcs_count = EnumerateCompiledFunctions(NULL, NULL);
1587  ScopedVector< Handle<SharedFunctionInfo> > sfis(compiled_funcs_count);
1588  ScopedVector< Handle<Code> > code_objects(compiled_funcs_count);
1589  EnumerateCompiledFunctions(sfis.start(), code_objects.start());
1590 
1591  // During iteration, there can be heap allocation due to
1592  // GetScriptLineNumber call.
1593  for (int i = 0; i < compiled_funcs_count; ++i) {
1594  if (*code_objects[i] == Isolate::Current()->builtins()->builtin(
1595  Builtins::kLazyCompile))
1596  continue;
1597  LogExistingFunction(sfis[i], code_objects[i]);
1598  }
1599 }
1600 
1601 
1603  HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
1604  "Logger::LogAccessorCallbacks");
1605  HeapIterator iterator;
1606  AssertNoAllocation no_alloc;
1607  for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1608  if (!obj->IsAccessorInfo()) continue;
1609  AccessorInfo* ai = AccessorInfo::cast(obj);
1610  if (!ai->name()->IsString()) continue;
1611  String* name = String::cast(ai->name());
1612  Address getter_entry = v8::ToCData<Address>(ai->getter());
1613  if (getter_entry != 0) {
1614  PROFILE(ISOLATE, GetterCallbackEvent(name, getter_entry));
1615  }
1616  Address setter_entry = v8::ToCData<Address>(ai->setter());
1617  if (setter_entry != 0) {
1618  PROFILE(ISOLATE, SetterCallbackEvent(name, setter_entry));
1619  }
1620  }
1621 }
1622 
1623 
1625  // Tests and EnsureInitialize() can call this twice in a row. It's harmless.
1626  if (is_initialized_) return true;
1627  is_initialized_ = true;
1628 
1629  // --ll-prof implies --log-code and --log-snapshot-positions.
1630  if (FLAG_ll_prof) {
1631  FLAG_log_snapshot_positions = true;
1632  }
1633 
1634  // --prof_lazy controls --log-code, implies --noprof_auto.
1635  if (FLAG_prof_lazy) {
1636  FLAG_log_code = false;
1637  FLAG_prof_auto = false;
1638  }
1639 
1640  // TODO(isolates): this assert introduces cyclic dependency (logger
1641  // -> thread local top -> heap -> logger).
1642  // ASSERT(VMState::is_outermost_external());
1643 
1644  log_->Initialize();
1645 
1646  if (FLAG_ll_prof) LogCodeInfo();
1647 
1648  Isolate* isolate = Isolate::Current();
1649  ticker_ = new Ticker(isolate, kSamplingIntervalMs);
1650 
1651  if (FLAG_sliding_state_window && sliding_state_window_ == NULL) {
1652  sliding_state_window_ = new SlidingStateWindow(isolate);
1653  }
1654 
1655  bool start_logging = FLAG_log || FLAG_log_runtime || FLAG_log_api
1656  || FLAG_log_code || FLAG_log_gc || FLAG_log_handles || FLAG_log_suspect
1657  || FLAG_log_regexp || FLAG_log_state_changes || FLAG_ll_prof;
1658 
1659  if (start_logging) {
1660  logging_nesting_ = 1;
1661  }
1662 
1663  if (FLAG_prof) {
1664  profiler_ = new Profiler(isolate);
1665  if (!FLAG_prof_auto) {
1666  profiler_->pause();
1667  } else {
1668  logging_nesting_ = 1;
1669  }
1670  if (!FLAG_prof_lazy) {
1671  profiler_->Engage();
1672  }
1673  }
1674 
1675  return true;
1676 }
1677 
1678 
1680  return ticker_;
1681 }
1682 
1683 
1685  ASSERT(ticker_ != NULL);
1686  if (!ticker_->IsActive()) ticker_->Start();
1687 }
1688 
1689 
1691  if (ticker_ != NULL && ticker_->IsActive()) ticker_->Stop();
1692 }
1693 
1694 
1696  if (!is_initialized_) return NULL;
1697  is_initialized_ = false;
1698 
1699  // Stop the profiler before closing the file.
1700  if (profiler_ != NULL) {
1701  profiler_->Disengage();
1702  delete profiler_;
1703  profiler_ = NULL;
1704  }
1705 
1706  delete sliding_state_window_;
1707  sliding_state_window_ = NULL;
1708 
1709  delete ticker_;
1710  ticker_ = NULL;
1711 
1712  return log_->Close();
1713 }
1714 
1715 
1717  // If the ticker is NULL, Logger::SetUp has not been called yet. In
1718  // that case, we set the sliding_state_window flag so that the
1719  // sliding window computation will be started when Logger::SetUp is
1720  // called.
1721  if (ticker_ == NULL) {
1722  FLAG_sliding_state_window = true;
1723  return;
1724  }
1725  // Otherwise, if the sliding state window computation has not been
1726  // started we do it now.
1727  if (sliding_state_window_ == NULL) {
1728  sliding_state_window_ = new SlidingStateWindow(Isolate::Current());
1729  }
1730 }
1731 
1732 // Protects the state below.
1733 static Mutex* active_samplers_mutex = NULL;
1734 
1735 List<Sampler*>* SamplerRegistry::active_samplers_ = NULL;
1736 
1737 
1739  if (!active_samplers_mutex) {
1740  active_samplers_mutex = OS::CreateMutex();
1741  }
1742 }
1743 
1744 
1745 bool SamplerRegistry::IterateActiveSamplers(VisitSampler func, void* param) {
1746  ScopedLock lock(active_samplers_mutex);
1747  for (int i = 0;
1748  ActiveSamplersExist() && i < active_samplers_->length();
1749  ++i) {
1750  func(active_samplers_->at(i), param);
1751  }
1752  return ActiveSamplersExist();
1753 }
1754 
1755 
1756 static void ComputeCpuProfiling(Sampler* sampler, void* flag_ptr) {
1757  bool* flag = reinterpret_cast<bool*>(flag_ptr);
1758  *flag |= sampler->IsProfiling();
1759 }
1760 
1761 
1763  bool flag = false;
1764  if (!IterateActiveSamplers(&ComputeCpuProfiling, &flag)) {
1765  return HAS_NO_SAMPLERS;
1766  }
1767  return flag ? HAS_CPU_PROFILING_SAMPLERS : HAS_SAMPLERS;
1768 }
1769 
1770 
1772  ASSERT(sampler->IsActive());
1773  ScopedLock lock(active_samplers_mutex);
1774  if (active_samplers_ == NULL) {
1775  active_samplers_ = new List<Sampler*>;
1776  } else {
1777  ASSERT(!active_samplers_->Contains(sampler));
1778  }
1779  active_samplers_->Add(sampler);
1780 }
1781 
1782 
1784  ASSERT(sampler->IsActive());
1785  ScopedLock lock(active_samplers_mutex);
1786  ASSERT(active_samplers_ != NULL);
1787  bool removed = active_samplers_->RemoveElement(sampler);
1788  ASSERT(removed);
1789  USE(removed);
1790 }
1791 
1792 } } // namespace v8::internal
byte * Address
Definition: globals.h:172
void LogRuntime(Vector< const char > format, JSArray *args)
Definition: log.cc:712
void PauseProfiler()
Definition: log.cc:1276
static void NewEventStatic(const char *name, void *object, size_t size)
Definition: log.cc:817
void RegExpCodeCreateEvent(Code *code, String *source)
Definition: log.cc:1057
static CallHandlerInfo * cast(Object *obj)
void CodeMoveEvent(Address from, Address to)
Definition: log.cc:1085
#define V8PRIxPTR
Definition: globals.h:204
SlidingStateWindow(Isolate *isolate)
Definition: log.cc:239
virtual void VisitFunction(JSFunction *function)
Definition: log.cc:1338
void GetterCallbackEvent(String *name, Address entry_point)
Definition: log.cc:848
void Move(Address from, Address to)
Definition: log.cc:404
void SetterCallbackEvent(String *name, Address entry_point)
Definition: log.cc:856
static String * cast(Object *obj)
#define LOG(isolate, Call)
Definition: log.h:81
bool IsActive() const
Definition: platform.h:734
bool Remove(TickSample *sample)
Definition: log.cc:104
Isolate * isolate()
Definition: platform.h:736
static void DeleteEventStatic(const char *name, void *object)
Definition: log.cc:822
static void SignalCodeMovingGC()
static Address js_entry_sp(ThreadLocalTop *thread)
Definition: isolate.h:625
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
static Handle< String > cast(Handle< S > that)
Definition: handles.h:81
void DecreaseProfilingDepth()
Definition: platform.h:731
void EnsureTickerStopped()
Definition: log.cc:1690
Sampler * sampler()
Definition: log.cc:1679
virtual void Tick(TickSample *sample)
Definition: log.cc:197
static int GetUserTime(uint32_t *secs, uint32_t *usecs)
int int32_t
Definition: unicode.cc:47
void LogCompiledFunctions()
Definition: log.cc:1582
void CodeDeleteEvent(Address from)
Definition: log.cc:1095
static bool enabled()
Definition: serialize.h:480
TickSample * sample
void SetProfiler(Profiler *profiler)
Definition: log.cc:212
void IntPtrTEvent(const char *name, intptr_t value)
Definition: log.cc:577
#define ASSERT(condition)
Definition: checks.h:270
void EnsureTickerStarted()
Definition: log.cc:1684
void ResumeProfiler()
Definition: log.cc:1295
#define PROFILE(isolate, Call)
Definition: cpu-profiler.h:190
int interval() const
Definition: platform.h:712
static Script * cast(Object *obj)
static JSRegExp * cast(Object *obj)
const char * comment() const
Definition: flags.cc:1362
static SharedFunctionInfo * cast(Object *obj)
void ApiNamedSecurityCheck(Object *key)
Definition: log.cc:620
Address stack[kMaxFramesCount]
Definition: platform.h:701
void ClearProfiler()
Definition: log.cc:219
Handle< Object > GetProperty(Handle< JSReceiver > obj, const char *name)
Definition: handles.cc:282
void HandleEvent(const char *name, Object **location)
Definition: log.cc:598
static Code * cast(Object *obj)
static void RemoveActiveSampler(Sampler *sampler)
Definition: log.cc:1783
static Smi * cast(Object *object)
void ApiIndexedSecurityCheck(uint32_t index)
Definition: log.cc:756
void LogFailure()
Definition: log.cc:1318
void SnapshotPositionEvent(Address addr, int pos)
Definition: log.cc:1105
uint8_t byte
Definition: globals.h:171
SmartArrayPointer< char > ToCString(AllowNullsFlag allow_nulls, RobustnessFlag robustness_flag, int offset, int length, int *length_output=0)
Definition: objects.cc:6161
void AppendBytes(const char *bytes)
Definition: log.cc:488
uint32_t ComputePointerHash(void *ptr)
Definition: utils.h:310
void HeapSampleBeginEvent(const char *space, const char *kind)
Definition: log.cc:1192
static uchar Length(uchar chr, int previous)
Definition: unicode-inl.h:123
#define UNREACHABLE()
Definition: checks.h:50
T * start() const
Definition: utils.h:389
void AppendBytes(const char *bytes, int size)
Definition: log.cc:482
void LogExistingFunction(Handle< SharedFunctionInfo > shared, Handle< Code > code)
Definition: log.cc:1537
void Insert(TickSample *sample)
Definition: log.cc:90
void IncreaseProfilingDepth()
Definition: platform.h:730
bool IsProfiling() const
Definition: platform.h:729
virtual void EnterContext(Context *context)
Definition: log.cc:1335
Profiler(Isolate *isolate)
Definition: log.cc:269
static Address & Address_at(Address addr)
Definition: v8memory.h:71
void DebugEvent(const char *event_type, Vector< uint16_t > parameter)
Definition: log.cc:1227
FILE * TearDown()
Definition: log.cc:1695
void Insert(Address code_address, const char *name, int name_size)
Definition: log.cc:384
Entry * Lookup(void *key, uint32_t hash, bool insert, AllocationPolicy allocator=AllocationPolicy())
Definition: hashmap.h:130
void RegExpCompileEvent(Handle< JSRegExp > regexp, bool in_cache)
Definition: log.cc:702
bool paused() const
Definition: log.cc:116
int length() const
Definition: utils.h:383
bool IsEnabled()
Definition: log-utils.h:53
static Mutex * CreateMutex()
static double TimeCurrentMillis()
static const int kMakeHeapIterableMask
Definition: heap.h:1056
Address external_callback()
Definition: isolate.h:984
#define V8_PTR_PREFIX
Definition: globals.h:196
static void Trace(Isolate *isolate, TickSample *sample)
Definition: log.cc:148
Ticker(Isolate *isolate, int interval)
Definition: log.cc:190
Definition: v8.h:104
virtual void LeaveContext(Context *context)
Definition: log.cc:1336
bool IsPowerOf2(T x)
Definition: utils.h:50
void EnableSlidingStateWindow()
Definition: log.cc:1716
void ApiNamedPropertyAccess(const char *tag, JSObject *holder, Object *name)
Definition: log.cc:762
static unsigned Encode(char *out, uchar c, int previous)
Definition: unicode-inl.h:81
int StrLength(const char *string)
Definition: utils.h:234
void LogCodeObjects()
Definition: log.cc:1526
void ApiIndexedPropertyAccess(const char *tag, JSObject *holder, uint32_t index)
Definition: log.cc:775
void SharedLibraryEvent(const char *library_path, uintptr_t start, uintptr_t end)
Definition: log.cc:634
void ApiEntryCall(const char *name)
Definition: log.cc:794
static int SNPrintF(Vector< char > str, const char *format,...)
void HeapSampleEndEvent(const char *space, const char *kind)
Definition: log.cc:1203
int GetScriptLineNumber(Handle< Script > script, int code_pos)
Definition: handles.cc:479
#define ISOLATE
Definition: isolate.h:1410
#define DECLARE_EVENT(ignore1, name)
Definition: log.cc:544
static void WriteToFlat(String *source, sinkchar *sink, int from, int to)
Definition: objects.cc:6819
EnumerateOptimizedFunctionsVisitor(Handle< SharedFunctionInfo > *sfis, Handle< Code > *code_objects, int *count)
Definition: log.cc:1330
void CallbackEvent(String *name, Address entry_point)
Definition: log.cc:840
void AddState(StateTag state)
Definition: log.cc:253
static const int kMaxFramesCount
Definition: platform.h:700
void HeapSampleItemEvent(const char *type, int number, int bytes)
Definition: log.cc:1211
void DeleteEvent(const char *name, void *object)
Definition: log.cc:809
uint16_t uc16
Definition: globals.h:273
int ExecutableSize()
Definition: objects.h:4457
static State GetState()
Definition: log.cc:1762
void ApiObjectAccess(const char *tag, JSObject *obj)
Definition: log.cc:785
virtual void Signal()=0
static const int kHeaderSize
Definition: objects.h:4513
friend class Profiler
Definition: log.h:395
static void LogSharedLibraryAddresses()
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset flag
Definition: objects-inl.h:3682
void * Remove(void *key, uint32_t hash)
Definition: hashmap.h:160
TemplateHashMapImpl< FreeStoreAllocationPolicy > HashMap
Definition: hashmap.h:112
#define HEAP
Definition: isolate.h:1408
static const int kSamplingIntervalMs
Definition: log.h:302
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
void CodeMovingGCEvent()
Definition: log.cc:1050
void USE(T)
Definition: globals.h:303
static const unsigned kMaxAsciiCharCodeU
Definition: objects.h:7108
static AccessorInfo * cast(Object *obj)
friend class SlidingStateWindow
Definition: log.h:396
static Handle< Object > GetElement(Handle< Object > object, uint32_t index)
Definition: objects.cc:244
virtual void Wait()=0
static void VisitAllOptimizedFunctions(OptimizedFunctionVisitor *visitor)
Definition: deoptimizer.cc:305
Logger * logger()
Definition: isolate.h:813
void SharedFunctionInfoMoveEvent(Address from, Address to)
Definition: log.cc:1130
void IntEvent(const char *name, int value)
Definition: log.cc:572
static void AddActiveSampler(Sampler *sampler)
Definition: log.cc:1771
void LogAccessorCallbacks()
Definition: log.cc:1602
bool IsProfilerPaused()
Definition: log.cc:1271
void AppendString(String *str)
Definition: log.cc:457
friend class LogMessageBuilder
Definition: log.h:393
static bool IterateActiveSamplers(VisitSampler func, void *param)
Definition: log.cc:1745
void Remove(Address code_address)
Definition: log.cc:396
void DeleteArray(T *array)
Definition: allocation.h:91
T Min(T a, T b)
Definition: utils.h:229
const char * Lookup(Address code_address)
Definition: log.cc:391
#define LOGGER
Definition: isolate.h:1411
ThreadLocalTop * thread_local_top()
Definition: isolate.h:824
virtual void DoSampleStack(TickSample *sample)
Definition: log.cc:226
void CodeCreateEvent(LogEventsAndTags tag, Code *code, const char *source)
Definition: log.cc:864
void DebugTag(const char *call_site_tag)
Definition: log.cc:1219
void ClearWindow()
Definition: log.cc:207
void ResourceEvent(const char *name, const char *tag)
Definition: log.cc:1159
static JSObject * cast(Object *obj)
FlagType type() const
Definition: flags.cc:1358
void NewEvent(const char *name, void *object, size_t size)
Definition: log.cc:800
StatsCounter * state_counters(StateTag state)
Definition: v8-counters.h:281
Entry * Next(Entry *p) const
Definition: hashmap.h:241
static const int kNoPreviousCharacter
Definition: unicode.h:132
void SetWindow(SlidingStateWindow *window)
Definition: log.cc:202
void SuspectReadEvent(String *name, Object *obj)
Definition: log.cc:1175
#define LOG_EVENTS_AND_TAGS_LIST(V)
Definition: log.h:89
void StringEvent(const char *name, const char *value)
Definition: log.cc:559