v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
profile-generator.h
Go to the documentation of this file.
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_PROFILE_GENERATOR_H_
29 #define V8_PROFILE_GENERATOR_H_
30 
31 #include "allocation.h"
32 #include "hashmap.h"
33 #include "../include/v8-profiler.h"
34 
35 namespace v8 {
36 namespace internal {
37 
39  public:
42  int GetTokenId(Object* token);
43 
44  static const int kNoSecurityToken = -1;
45  static const int kInheritsSecurityToken = -2;
46 
47  private:
48  static void TokenRemovedCallback(v8::Persistent<v8::Value> handle,
49  void* parameter);
50  void TokenRemoved(Object** token_location);
51 
52  List<Object**> token_locations_;
53  List<bool> token_removed_;
54 
55  friend class TokenEnumeratorTester;
56 
58 };
59 
60 
61 // Provides a storage of strings allocated in C++ heap, to hold them
62 // forever, even if they disappear from JS heap or external storage.
64  public:
67 
68  const char* GetCopy(const char* src);
69  const char* GetFormatted(const char* format, ...);
70  const char* GetVFormatted(const char* format, va_list args);
71  const char* GetName(String* name);
72  const char* GetName(int index);
73  inline const char* GetFunctionName(String* name);
74  inline const char* GetFunctionName(const char* name);
75  size_t GetUsedMemorySize() const;
76 
77  private:
78  static const int kMaxNameSize = 1024;
79 
80  INLINE(static bool StringsMatch(void* key1, void* key2)) {
81  return strcmp(reinterpret_cast<char*>(key1),
82  reinterpret_cast<char*>(key2)) == 0;
83  }
84  const char* AddOrDisposeString(char* str, uint32_t hash);
85 
86  // Mapping of strings by String::Hash to const char* strings.
87  HashMap names_;
88 
89  DISALLOW_COPY_AND_ASSIGN(StringsStorage);
90 };
91 
92 
93 class CodeEntry {
94  public:
95  // CodeEntry doesn't own name strings, just references them.
97  const char* name_prefix,
98  const char* name,
99  const char* resource_name,
100  int line_number,
101  int security_token_id));
102 
103  INLINE(bool is_js_function() const) { return is_js_function_tag(tag_); }
104  INLINE(const char* name_prefix() const) { return name_prefix_; }
105  INLINE(bool has_name_prefix() const) { return name_prefix_[0] != '\0'; }
106  INLINE(const char* name() const) { return name_; }
107  INLINE(const char* resource_name() const) { return resource_name_; }
108  INLINE(int line_number() const) { return line_number_; }
109  INLINE(int shared_id() const) { return shared_id_; }
110  INLINE(void set_shared_id(int shared_id)) { shared_id_ = shared_id; }
111  INLINE(int security_token_id() const) { return security_token_id_; }
112 
113  INLINE(static bool is_js_function_tag(Logger::LogEventsAndTags tag));
114 
115  void CopyData(const CodeEntry& source);
116  uint32_t GetCallUid() const;
117  bool IsSameAs(CodeEntry* entry) const;
118 
119  static const char* const kEmptyNamePrefix;
120 
121  private:
123  const char* name_prefix_;
124  const char* name_;
125  const char* resource_name_;
126  int line_number_;
127  int shared_id_;
128  int security_token_id_;
129 
130  DISALLOW_COPY_AND_ASSIGN(CodeEntry);
131 };
132 
133 
134 class ProfileTree;
135 
136 class ProfileNode {
137  public:
138  INLINE(ProfileNode(ProfileTree* tree, CodeEntry* entry));
139 
142  INLINE(void IncrementSelfTicks()) { ++self_ticks_; }
143  INLINE(void IncreaseSelfTicks(unsigned amount)) { self_ticks_ += amount; }
144  INLINE(void IncreaseTotalTicks(unsigned amount)) { total_ticks_ += amount; }
145 
146  INLINE(CodeEntry* entry() const) { return entry_; }
147  INLINE(unsigned self_ticks() const) { return self_ticks_; }
148  INLINE(unsigned total_ticks() const) { return total_ticks_; }
149  INLINE(const List<ProfileNode*>* children() const) { return &children_list_; }
150  double GetSelfMillis() const;
151  double GetTotalMillis() const;
152 
153  void Print(int indent);
154 
155  private:
156  INLINE(static bool CodeEntriesMatch(void* entry1, void* entry2)) {
157  return reinterpret_cast<CodeEntry*>(entry1)->IsSameAs(
158  reinterpret_cast<CodeEntry*>(entry2));
159  }
160 
161  INLINE(static uint32_t CodeEntryHash(CodeEntry* entry)) {
162  return entry->GetCallUid();
163  }
164 
165  ProfileTree* tree_;
166  CodeEntry* entry_;
167  unsigned total_ticks_;
168  unsigned self_ticks_;
169  // Mapping from CodeEntry* to ProfileNode*
170  HashMap children_;
171  List<ProfileNode*> children_list_;
172 
173  DISALLOW_COPY_AND_ASSIGN(ProfileNode);
174 };
175 
176 
177 class ProfileTree {
178  public:
179  ProfileTree();
180  ~ProfileTree();
181 
182  void AddPathFromEnd(const Vector<CodeEntry*>& path);
183  void AddPathFromStart(const Vector<CodeEntry*>& path);
184  void CalculateTotalTicks();
185  void FilteredClone(ProfileTree* src, int security_token_id);
186 
187  double TicksToMillis(unsigned ticks) const {
188  return ticks * ms_to_ticks_scale_;
189  }
190  ProfileNode* root() const { return root_; }
191  void SetTickRatePerMs(double ticks_per_ms);
192 
193  void ShortPrint();
194  void Print() {
195  root_->Print(0);
196  }
197 
198  private:
199  template <typename Callback>
200  void TraverseDepthFirst(Callback* callback);
201 
202  CodeEntry root_entry_;
203  ProfileNode* root_;
204  double ms_to_ticks_scale_;
205 
206  DISALLOW_COPY_AND_ASSIGN(ProfileTree);
207 };
208 
209 
210 class CpuProfile {
211  public:
212  CpuProfile(const char* title, unsigned uid)
213  : title_(title), uid_(uid) { }
214 
215  // Add pc -> ... -> main() call path to the profile.
216  void AddPath(const Vector<CodeEntry*>& path);
217  void CalculateTotalTicks();
218  void SetActualSamplingRate(double actual_sampling_rate);
219  CpuProfile* FilteredClone(int security_token_id);
220 
221  INLINE(const char* title() const) { return title_; }
222  INLINE(unsigned uid() const) { return uid_; }
223  INLINE(const ProfileTree* top_down() const) { return &top_down_; }
224  INLINE(const ProfileTree* bottom_up() const) { return &bottom_up_; }
225 
226  void UpdateTicksScale();
227 
228  void ShortPrint();
229  void Print();
230 
231  private:
232  const char* title_;
233  unsigned uid_;
234  ProfileTree top_down_;
235  ProfileTree bottom_up_;
236 
237  DISALLOW_COPY_AND_ASSIGN(CpuProfile);
238 };
239 
240 
241 class CodeMap {
242  public:
243  CodeMap() : next_shared_id_(1) { }
244  void AddCode(Address addr, CodeEntry* entry, unsigned size);
245  void MoveCode(Address from, Address to);
246  CodeEntry* FindEntry(Address addr);
247  int GetSharedId(Address addr);
248 
249  void Print();
250 
251  private:
252  struct CodeEntryInfo {
253  CodeEntryInfo(CodeEntry* an_entry, unsigned a_size)
254  : entry(an_entry), size(a_size) { }
255  CodeEntry* entry;
256  unsigned size;
257  };
258 
259  struct CodeTreeConfig {
260  typedef Address Key;
261  typedef CodeEntryInfo Value;
262  static const Key kNoKey;
263  static const Value NoValue() { return CodeEntryInfo(NULL, 0); }
264  static int Compare(const Key& a, const Key& b) {
265  return a < b ? -1 : (a > b ? 1 : 0);
266  }
267  };
268  typedef SplayTree<CodeTreeConfig> CodeTree;
269 
270  class CodeTreePrinter {
271  public:
272  void Call(const Address& key, const CodeEntryInfo& value);
273  };
274 
275  void DeleteAllCoveredCode(Address start, Address end);
276 
277  // Fake CodeEntry pointer to distinguish shared function entries.
278  static CodeEntry* const kSharedFunctionCodeEntry;
279 
280  CodeTree tree_;
281  int next_shared_id_;
282 
283  DISALLOW_COPY_AND_ASSIGN(CodeMap);
284 };
285 
286 
288  public:
291 
292  bool StartProfiling(const char* title, unsigned uid);
293  bool StartProfiling(String* title, unsigned uid);
294  CpuProfile* StopProfiling(int security_token_id,
295  const char* title,
296  double actual_sampling_rate);
297  List<CpuProfile*>* Profiles(int security_token_id);
298  const char* GetName(String* name) {
299  return function_and_resource_names_.GetName(name);
300  }
301  const char* GetName(int args_count) {
302  return function_and_resource_names_.GetName(args_count);
303  }
304  CpuProfile* GetProfile(int security_token_id, unsigned uid);
305  bool IsLastProfile(const char* title);
306  void RemoveProfile(CpuProfile* profile);
307  bool HasDetachedProfiles() { return detached_profiles_.length() > 0; }
308 
310  String* name, String* resource_name, int line_number);
311  CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, const char* name);
313  const char* name_prefix, String* name);
314  CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, int args_count);
315  CodeEntry* NewCodeEntry(int security_token_id);
316 
317  // Called from profile generator thread.
319 
320  // Limits the number of profiles that can be simultaneously collected.
321  static const int kMaxSimultaneousProfiles = 100;
322 
323  private:
324  const char* GetFunctionName(String* name) {
325  return function_and_resource_names_.GetFunctionName(name);
326  }
327  const char* GetFunctionName(const char* name) {
328  return function_and_resource_names_.GetFunctionName(name);
329  }
330  int GetProfileIndex(unsigned uid);
331  List<CpuProfile*>* GetProfilesList(int security_token_id);
332  int TokenToIndex(int security_token_id);
333 
334  INLINE(static bool UidsMatch(void* key1, void* key2)) {
335  return key1 == key2;
336  }
337 
338  StringsStorage function_and_resource_names_;
339  List<CodeEntry*> code_entries_;
340  List<List<CpuProfile*>* > profiles_by_token_;
341  // Mapping from profiles' uids to indexes in the second nested list
342  // of profiles_by_token_.
343  HashMap profiles_uids_;
344  List<CpuProfile*> detached_profiles_;
345 
346  // Accessed by VM thread and profile generator thread.
347  List<CpuProfile*> current_profiles_;
348  Semaphore* current_profiles_semaphore_;
349 
350  DISALLOW_COPY_AND_ASSIGN(CpuProfilesCollection);
351 };
352 
353 
355  public:
357  : result_(Logger::kSamplingIntervalMs * kResultScale),
358  ticks_per_ms_(Logger::kSamplingIntervalMs),
359  measurements_count_(0),
360  wall_time_query_countdown_(1) {
361  }
362 
363  double ticks_per_ms() {
364  return result_ / static_cast<double>(kResultScale);
365  }
366  void Tick();
367  void UpdateMeasurements(double current_time);
368 
369  // Instead of querying current wall time each tick,
370  // we use this constant to control query intervals.
371  static const unsigned kWallTimeQueryIntervalMs = 100;
372 
373  private:
374  // As the result needs to be accessed from a different thread, we
375  // use type that guarantees atomic writes to memory. There should
376  // be <= 1000 ticks per second, thus storing a value of a 10 ** 5
377  // order should provide enough precision while keeping away from a
378  // potential overflow.
379  static const int kResultScale = 100000;
380 
381  AtomicWord result_;
382  // All other fields are accessed only from the sampler thread.
383  double ticks_per_ms_;
384  unsigned measurements_count_;
385  unsigned wall_time_query_countdown_;
386  double last_wall_time_;
387 
388  DISALLOW_COPY_AND_ASSIGN(SampleRateCalculator);
389 };
390 
391 
393  public:
394  explicit ProfileGenerator(CpuProfilesCollection* profiles);
395 
397  String* name,
398  String* resource_name,
399  int line_number)) {
400  return profiles_->NewCodeEntry(tag, name, resource_name, line_number);
401  }
402 
404  const char* name)) {
405  return profiles_->NewCodeEntry(tag, name);
406  }
407 
409  const char* name_prefix,
410  String* name)) {
411  return profiles_->NewCodeEntry(tag, name_prefix, name);
412  }
413 
415  int args_count)) {
416  return profiles_->NewCodeEntry(tag, args_count);
417  }
418 
419  INLINE(CodeEntry* NewCodeEntry(int security_token_id)) {
420  return profiles_->NewCodeEntry(security_token_id);
421  }
422 
423  void RecordTickSample(const TickSample& sample);
424 
425  INLINE(CodeMap* code_map()) { return &code_map_; }
426 
427  INLINE(void Tick()) { sample_rate_calc_.Tick(); }
428  INLINE(double actual_sampling_rate()) {
429  return sample_rate_calc_.ticks_per_ms();
430  }
431 
432  static const char* const kAnonymousFunctionName;
433  static const char* const kProgramEntryName;
434  static const char* const kGarbageCollectorEntryName;
435 
436  private:
437  INLINE(CodeEntry* EntryForVMState(StateTag tag));
438 
439  CpuProfilesCollection* profiles_;
440  CodeMap code_map_;
441  CodeEntry* program_entry_;
442  CodeEntry* gc_entry_;
443  SampleRateCalculator sample_rate_calc_;
444 
445  DISALLOW_COPY_AND_ASSIGN(ProfileGenerator);
446 };
447 
448 
449 class HeapEntry;
450 class HeapSnapshot;
451 
453  public:
454  enum Type {
462  };
463 
465  HeapGraphEdge(Type type, const char* name, int from, int to);
466  HeapGraphEdge(Type type, int index, int from, int to);
467  void ReplaceToIndexWithEntry(HeapSnapshot* snapshot);
468 
469  Type type() const { return static_cast<Type>(type_); }
470  int index() const {
471  ASSERT(type_ == kElement || type_ == kHidden || type_ == kWeak);
472  return index_;
473  }
474  const char* name() const {
475  ASSERT(type_ == kContextVariable
476  || type_ == kProperty
477  || type_ == kInternal
478  || type_ == kShortcut);
479  return name_;
480  }
481  INLINE(HeapEntry* from() const);
482  HeapEntry* to() const { return to_entry_; }
483 
484  private:
485  INLINE(HeapSnapshot* snapshot() const);
486 
487  unsigned type_ : 3;
488  int from_index_ : 29;
489  union {
490  // During entries population |to_index_| is used for storing the index,
491  // afterwards it is replaced with a pointer to the entry.
493  HeapEntry* to_entry_;
494  };
495  union {
496  int index_;
497  const char* name_;
498  };
499 };
500 
501 
502 // HeapEntry instances represent an entity from the heap (or a special
503 // virtual node, e.g. root).
504 class HeapEntry BASE_EMBEDDED {
505  public:
506  enum Type {
507  kHidden = v8::HeapGraphNode::kHidden,
517  };
518  static const int kNoEntry;
519 
520  HeapEntry() { }
521  HeapEntry(HeapSnapshot* snapshot,
522  Type type,
523  const char* name,
524  SnapshotObjectId id,
525  int self_size);
526 
527  HeapSnapshot* snapshot() { return snapshot_; }
528  Type type() { return static_cast<Type>(type_); }
529  const char* name() { return name_; }
530  void set_name(const char* name) { name_ = name; }
531  inline SnapshotObjectId id() { return id_; }
532  int self_size() { return self_size_; }
533  INLINE(int index() const);
534  int children_count() const { return children_count_; }
535  INLINE(int set_children_index(int index));
536  void add_child(HeapGraphEdge* edge) {
537  children_arr()[children_count_++] = edge;
538  }
540  return Vector<HeapGraphEdge*>(children_arr(), children_count_); }
541 
542  void SetIndexedReference(
543  HeapGraphEdge::Type type, int index, HeapEntry* entry);
544  void SetNamedReference(
545  HeapGraphEdge::Type type, const char* name, HeapEntry* entry);
546 
547  void Print(
548  const char* prefix, const char* edge_name, int max_depth, int indent);
549 
550  Handle<HeapObject> GetHeapObject();
551 
552  private:
553  INLINE(HeapGraphEdge** children_arr());
554  const char* TypeAsString();
555 
556  unsigned type_: 4;
557  int children_count_: 28;
558  int children_index_;
559  int self_size_;
560  SnapshotObjectId id_;
561  HeapSnapshot* snapshot_;
562  const char* name_;
563 };
564 
565 
566 class HeapSnapshotsCollection;
567 
568 // HeapSnapshot represents a single heap snapshot. It is stored in
569 // HeapSnapshotsCollection, which is also a factory for
570 // HeapSnapshots. All HeapSnapshots share strings copied from JS heap
571 // to be able to return them even if they were collected.
572 // HeapSnapshotGenerator fills in a HeapSnapshot.
574  public:
575  enum Type {
577  };
578 
580  Type type,
581  const char* title,
582  unsigned uid);
583  void Delete();
584 
585  HeapSnapshotsCollection* collection() { return collection_; }
586  Type type() { return type_; }
587  const char* title() { return title_; }
588  unsigned uid() { return uid_; }
589  size_t RawSnapshotSize() const;
590  HeapEntry* root() { return &entries_[root_index_]; }
591  HeapEntry* gc_roots() { return &entries_[gc_roots_index_]; }
592  HeapEntry* natives_root() { return &entries_[natives_root_index_]; }
593  HeapEntry* gc_subroot(int index) {
594  return &entries_[gc_subroot_indexes_[index]];
595  }
596  List<HeapEntry>& entries() { return entries_; }
597  List<HeapGraphEdge>& edges() { return edges_; }
598  List<HeapGraphEdge*>& children() { return children_; }
599  void RememberLastJSObjectId();
601  return max_snapshot_js_object_id_;
602  }
603 
604  HeapEntry* AddEntry(HeapEntry::Type type,
605  const char* name,
606  SnapshotObjectId id,
607  int size);
608  HeapEntry* AddRootEntry();
609  HeapEntry* AddGcRootsEntry();
610  HeapEntry* AddGcSubrootEntry(int tag);
611  HeapEntry* AddNativesRootEntry();
612  HeapEntry* GetEntryById(SnapshotObjectId id);
614  void FillChildren();
615 
616  void Print(int max_depth);
617  void PrintEntriesSize();
618 
619  private:
620  HeapSnapshotsCollection* collection_;
621  Type type_;
622  const char* title_;
623  unsigned uid_;
624  int root_index_;
625  int gc_roots_index_;
626  int natives_root_index_;
627  int gc_subroot_indexes_[VisitorSynchronization::kNumberOfSyncTags];
628  List<HeapEntry> entries_;
629  List<HeapGraphEdge> edges_;
630  List<HeapGraphEdge*> children_;
631  List<HeapEntry*> sorted_entries_;
632  SnapshotObjectId max_snapshot_js_object_id_;
633 
634  friend class HeapSnapshotTester;
635 
637 };
638 
639 
641  public:
642  HeapObjectsMap();
643 
646  SnapshotObjectId FindOrAddEntry(Address addr, unsigned int size);
647  void MoveObject(Address from, Address to);
649  return next_id_ - kObjectIdStep;
650  }
651 
654  size_t GetUsedMemorySize() const;
655 
657  static inline SnapshotObjectId GetNthGcSubrootId(int delta);
658 
659  static const int kObjectIdStep = 2;
665 
666  private:
667  struct EntryInfo {
668  EntryInfo(SnapshotObjectId id, Address addr, unsigned int size)
669  : id(id), addr(addr), size(size), accessed(true) { }
670  EntryInfo(SnapshotObjectId id, Address addr, unsigned int size, bool accessed)
671  : id(id), addr(addr), size(size), accessed(accessed) { }
672  SnapshotObjectId id;
673  Address addr;
674  unsigned int size;
675  bool accessed;
676  };
677  struct TimeInterval {
678  explicit TimeInterval(SnapshotObjectId id) : id(id), size(0), count(0) { }
679  SnapshotObjectId id;
680  uint32_t size;
681  uint32_t count;
682  };
683 
684  void UpdateHeapObjectsMap();
685  void RemoveDeadEntries();
686 
687  static bool AddressesMatch(void* key1, void* key2) {
688  return key1 == key2;
689  }
690 
691  static uint32_t AddressHash(Address addr) {
692  return ComputeIntegerHash(
693  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(addr)),
694  v8::internal::kZeroHashSeed);
695  }
696 
697  SnapshotObjectId next_id_;
698  HashMap entries_map_;
699  List<EntryInfo> entries_;
700  List<TimeInterval> time_intervals_;
701 
702  DISALLOW_COPY_AND_ASSIGN(HeapObjectsMap);
703 };
704 
705 
707  public:
710 
711  bool is_tracking_objects() { return is_tracking_objects_; }
713  return ids_.PushHeapObjectsStats(stream);
714  }
715  void StartHeapObjectsTracking() { is_tracking_objects_ = true; }
717 
719  HeapSnapshot::Type type, const char* name, unsigned uid);
721  List<HeapSnapshot*>* snapshots() { return &snapshots_; }
722  HeapSnapshot* GetSnapshot(unsigned uid);
723  void RemoveSnapshot(HeapSnapshot* snapshot);
724 
725  StringsStorage* names() { return &names_; }
726  TokenEnumerator* token_enumerator() { return token_enumerator_; }
727 
729  return ids_.FindEntry(object_addr);
730  }
731  SnapshotObjectId GetObjectId(Address object_addr, int object_size) {
732  return ids_.FindOrAddEntry(object_addr, object_size);
733  }
735  void ObjectMoveEvent(Address from, Address to) { ids_.MoveObject(from, to); }
737  return ids_.last_assigned_id();
738  }
739  size_t GetUsedMemorySize() const;
740 
741  private:
742  INLINE(static bool HeapSnapshotsMatch(void* key1, void* key2)) {
743  return key1 == key2;
744  }
745 
746  bool is_tracking_objects_; // Whether tracking object moves is needed.
747  List<HeapSnapshot*> snapshots_;
748  // Mapping from snapshots' uids to HeapSnapshot* pointers.
749  HashMap snapshots_uids_;
750  StringsStorage names_;
751  TokenEnumerator* token_enumerator_;
752  // Mapping from HeapObject addresses to objects' uids.
753  HeapObjectsMap ids_;
754 
755  DISALLOW_COPY_AND_ASSIGN(HeapSnapshotsCollection);
756 };
757 
758 
759 // A typedef for referencing anything that can be snapshotted living
760 // in any kind of heap memory.
761 typedef void* HeapThing;
762 
763 
764 // An interface that creates HeapEntries by HeapThings.
766  public:
767  virtual ~HeapEntriesAllocator() { }
768  virtual HeapEntry* AllocateEntry(HeapThing ptr) = 0;
769 };
770 
771 
772 // The HeapEntriesMap instance is used to track a mapping between
773 // real heap objects and their representations in heap snapshots.
775  public:
776  HeapEntriesMap();
777 
778  int Map(HeapThing thing);
779  void Pair(HeapThing thing, int entry);
780 
781  private:
782  static uint32_t Hash(HeapThing thing) {
783  return ComputeIntegerHash(
784  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(thing)),
785  v8::internal::kZeroHashSeed);
786  }
787  static bool HeapThingsMatch(HeapThing key1, HeapThing key2) {
788  return key1 == key2;
789  }
790 
791  HashMap entries_;
792 
793  friend class HeapObjectsSet;
794 
796 };
797 
798 
800  public:
801  HeapObjectsSet();
802  void Clear();
803  bool Contains(Object* object);
804  void Insert(Object* obj);
805  const char* GetTag(Object* obj);
806  void SetTag(Object* obj, const char* tag);
807  bool is_empty() const { return entries_.occupancy() == 0; }
808 
809  private:
810  HashMap entries_;
811 
812  DISALLOW_COPY_AND_ASSIGN(HeapObjectsSet);
813 };
814 
815 
816 // An interface used to populate a snapshot with nodes and edges.
818  public:
820  virtual HeapEntry* AddEntry(HeapThing ptr,
821  HeapEntriesAllocator* allocator) = 0;
822  virtual HeapEntry* FindEntry(HeapThing ptr) = 0;
823  virtual HeapEntry* FindOrAddEntry(HeapThing ptr,
824  HeapEntriesAllocator* allocator) = 0;
826  int parent_entry,
827  int index,
828  HeapEntry* child_entry) = 0;
830  int parent_entry,
831  HeapEntry* child_entry) = 0;
833  int parent_entry,
834  const char* reference_name,
835  HeapEntry* child_entry) = 0;
837  int parent_entry,
838  HeapEntry* child_entry) = 0;
839 };
840 
841 
843  public:
845  virtual void ProgressStep() = 0;
846  virtual bool ProgressReport(bool force) = 0;
847 };
848 
849 
850 // An implementation of V8 heap graph extractor.
852  public:
853  V8HeapExplorer(HeapSnapshot* snapshot,
855  virtual ~V8HeapExplorer();
856  virtual HeapEntry* AllocateEntry(HeapThing ptr);
858  int EstimateObjectsCount(HeapIterator* iterator);
860  void TagGlobalObjects();
861 
862  static String* GetConstructorName(JSObject* object);
863 
865 
866  private:
867  HeapEntry* AddEntry(HeapObject* object);
868  HeapEntry* AddEntry(HeapObject* object,
869  HeapEntry::Type type,
870  const char* name);
871  const char* GetSystemEntryName(HeapObject* object);
872 
873  void ExtractReferences(HeapObject* obj);
874  void ExtractJSGlobalProxyReferences(JSGlobalProxy* proxy);
875  void ExtractJSObjectReferences(int entry, JSObject* js_obj);
876  void ExtractStringReferences(int entry, String* obj);
877  void ExtractContextReferences(int entry, Context* context);
878  void ExtractMapReferences(int entry, Map* map);
879  void ExtractSharedFunctionInfoReferences(int entry,
880  SharedFunctionInfo* shared);
881  void ExtractScriptReferences(int entry, Script* script);
882  void ExtractCodeCacheReferences(int entry, CodeCache* code_cache);
883  void ExtractCodeReferences(int entry, Code* code);
884  void ExtractJSGlobalPropertyCellReferences(int entry,
885  JSGlobalPropertyCell* cell);
886  void ExtractClosureReferences(JSObject* js_obj, int entry);
887  void ExtractPropertyReferences(JSObject* js_obj, int entry);
888  void ExtractElementReferences(JSObject* js_obj, int entry);
889  void ExtractInternalReferences(JSObject* js_obj, int entry);
890  bool IsEssentialObject(Object* object);
891  void SetClosureReference(HeapObject* parent_obj,
892  int parent,
893  String* reference_name,
894  Object* child);
895  void SetNativeBindReference(HeapObject* parent_obj,
896  int parent,
897  const char* reference_name,
898  Object* child);
899  void SetElementReference(HeapObject* parent_obj,
900  int parent,
901  int index,
902  Object* child);
903  void SetInternalReference(HeapObject* parent_obj,
904  int parent,
905  const char* reference_name,
906  Object* child,
907  int field_offset = -1);
908  void SetInternalReference(HeapObject* parent_obj,
909  int parent,
910  int index,
911  Object* child,
912  int field_offset = -1);
913  void SetHiddenReference(HeapObject* parent_obj,
914  int parent,
915  int index,
916  Object* child);
917  void SetWeakReference(HeapObject* parent_obj,
918  int parent,
919  int index,
920  Object* child_obj,
921  int field_offset);
922  void SetPropertyReference(HeapObject* parent_obj,
923  int parent,
924  String* reference_name,
925  Object* child,
926  const char* name_format_string = NULL,
927  int field_offset = -1);
928  void SetPropertyShortcutReference(HeapObject* parent_obj,
929  int parent,
930  String* reference_name,
931  Object* child);
932  void SetUserGlobalReference(Object* user_global);
933  void SetRootGcRootsReference();
934  void SetGcRootsReference(VisitorSynchronization::SyncTag tag);
935  void SetGcSubrootReference(
936  VisitorSynchronization::SyncTag tag, bool is_weak, Object* child);
937  const char* GetStrongGcSubrootName(Object* object);
938  void TagObject(Object* obj, const char* tag);
939 
940  HeapEntry* GetEntry(Object* obj);
941 
942  static inline HeapObject* GetNthGcSubrootObject(int delta);
943  static inline int GetGcSubrootOrder(HeapObject* subroot);
944 
945  Heap* heap_;
946  HeapSnapshot* snapshot_;
947  HeapSnapshotsCollection* collection_;
949  SnapshotFillerInterface* filler_;
950  HeapObjectsSet objects_tags_;
951  HeapObjectsSet strong_gc_subroot_names_;
952 
953  static HeapObject* const kGcRootsObject;
954  static HeapObject* const kFirstGcSubrootObject;
955  static HeapObject* const kLastGcSubrootObject;
956 
958  friend class GcSubrootsEnumerator;
960 
962 };
963 
964 
966 
967 
968 // An implementation of retained native objects extractor.
970  public:
973  virtual ~NativeObjectsExplorer();
975  int EstimateObjectsCount();
977 
978  private:
979  void FillRetainedObjects();
980  void FillImplicitReferences();
981  List<HeapObject*>* GetListMaybeDisposeInfo(v8::RetainedObjectInfo* info);
982  void SetNativeRootReference(v8::RetainedObjectInfo* info);
983  void SetRootNativeRootsReference();
984  void SetWrapperNativeReferences(HeapObject* wrapper,
985  v8::RetainedObjectInfo* info);
986  void VisitSubtreeWrapper(Object** p, uint16_t class_id);
987 
988  static uint32_t InfoHash(v8::RetainedObjectInfo* info) {
989  return ComputeIntegerHash(static_cast<uint32_t>(info->GetHash()),
990  v8::internal::kZeroHashSeed);
991  }
992  static bool RetainedInfosMatch(void* key1, void* key2) {
993  return key1 == key2 ||
994  (reinterpret_cast<v8::RetainedObjectInfo*>(key1))->IsEquivalent(
995  reinterpret_cast<v8::RetainedObjectInfo*>(key2));
996  }
997  INLINE(static bool StringsMatch(void* key1, void* key2)) {
998  return strcmp(reinterpret_cast<char*>(key1),
999  reinterpret_cast<char*>(key2)) == 0;
1000  }
1001 
1002  NativeGroupRetainedObjectInfo* FindOrAddGroupInfo(const char* label);
1003 
1004  HeapSnapshot* snapshot_;
1005  HeapSnapshotsCollection* collection_;
1007  bool embedder_queried_;
1008  HeapObjectsSet in_groups_;
1009  // RetainedObjectInfo* -> List<HeapObject*>*
1010  HashMap objects_by_info_;
1011  HashMap native_groups_;
1012  HeapEntriesAllocator* synthetic_entries_allocator_;
1013  HeapEntriesAllocator* native_entries_allocator_;
1014  // Used during references extraction.
1015  SnapshotFillerInterface* filler_;
1016 
1017  static HeapThing const kNativesRootObject;
1018 
1020 
1022 };
1023 
1024 
1026  public:
1028  v8::ActivityControl* control);
1029  bool GenerateSnapshot();
1030 
1031  private:
1032  bool FillReferences();
1033  void ProgressStep();
1034  bool ProgressReport(bool force = false);
1035  void SetProgressTotal(int iterations_count);
1036 
1037  HeapSnapshot* snapshot_;
1038  v8::ActivityControl* control_;
1039  V8HeapExplorer v8_heap_explorer_;
1040  NativeObjectsExplorer dom_explorer_;
1041  // Mapping from HeapThing pointers to HeapEntry* pointers.
1042  HeapEntriesMap entries_;
1043  // Used during snapshot generation.
1044  int progress_counter_;
1045  int progress_total_;
1046 
1047  DISALLOW_COPY_AND_ASSIGN(HeapSnapshotGenerator);
1048 };
1049 
1050 class OutputStreamWriter;
1051 
1053  public:
1055  : snapshot_(snapshot),
1056  strings_(ObjectsMatch),
1057  next_node_id_(1),
1058  next_string_id_(1),
1059  writer_(NULL) {
1060  }
1061  void Serialize(v8::OutputStream* stream);
1062 
1063  private:
1064  INLINE(static bool ObjectsMatch(void* key1, void* key2)) {
1065  return key1 == key2;
1066  }
1067 
1068  INLINE(static uint32_t ObjectHash(const void* key)) {
1069  return ComputeIntegerHash(
1070  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(key)),
1071  v8::internal::kZeroHashSeed);
1072  }
1073 
1074  HeapSnapshot* CreateFakeSnapshot();
1075  int GetStringId(const char* s);
1076  int entry_index(HeapEntry* e) { return e->index() * kNodeFieldsCount; }
1077  void SerializeEdge(HeapGraphEdge* edge, bool first_edge);
1078  void SerializeEdges();
1079  void SerializeImpl();
1080  void SerializeNode(HeapEntry* entry);
1081  void SerializeNodes();
1082  void SerializeSnapshot();
1083  void SerializeString(const unsigned char* s);
1084  void SerializeStrings();
1085  void SortHashMap(HashMap* map, List<HashMap::Entry*>* sorted_entries);
1086 
1087  static const int kEdgeFieldsCount;
1088  static const int kNodeFieldsCount;
1089 
1090  HeapSnapshot* snapshot_;
1091  HashMap strings_;
1092  int next_node_id_;
1093  int next_string_id_;
1094  OutputStreamWriter* writer_;
1095 
1098 
1100 };
1101 
1102 } } // namespace v8::internal
1103 
1104 #endif // V8_PROFILE_GENERATOR_H_
byte * Address
Definition: globals.h:172
virtual HeapEntry * AllocateEntry(HeapThing ptr)=0
static const SnapshotObjectId kGcRootsFirstSubrootId
SnapshotObjectId FindOrAddEntry(Address addr, unsigned int size)
void RemoveSnapshot(HeapSnapshot *snapshot)
static const int kInheritsSecurityToken
Handle< HeapObject > FindHeapObjectById(SnapshotObjectId id)
virtual HeapEntry * AllocateEntry(HeapThing ptr)
void SetTickRatePerMs(double ticks_per_ms)
virtual intptr_t GetHash()=0
uint32_t GetCallUid() const
INLINE(CodeEntry *NewCodeEntry(Logger::LogEventsAndTags tag, const char *name))
ProfileNode * root() const
INLINE(CodeEntry *NewCodeEntry(Logger::LogEventsAndTags tag, const char *name_prefix, String *name))
ProfileGenerator(CpuProfilesCollection *profiles)
INLINE(double actual_sampling_rate())
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
static SnapshotObjectId GetNthGcSubrootId(int delta)
CpuProfile * GetProfile(int security_token_id, unsigned uid)
static const SnapshotObjectId kNativesRootObjectId
INLINE(const char *title() const)
INLINE(void IncreaseTotalTicks(unsigned amount))
bool IterateAndExtractReferences(SnapshotFillerInterface *filler)
CodeEntry * NewCodeEntry(Logger::LogEventsAndTags tag, String *name, String *resource_name, int line_number)
INLINE(unsigned total_ticks() const)
INLINE(bool is_js_function() const)
void FilteredClone(ProfileTree *src, int security_token_id)
TickSample * sample
List< HeapSnapshot * > * snapshots()
const char * name() const
FlagType type_
Definition: flags.cc:1351
virtual void SetNamedAutoIndexReference(HeapGraphEdge::Type type, int parent_entry, HeapEntry *child_entry)=0
#define ASSERT(condition)
Definition: checks.h:270
v8::Handle< v8::Value > Print(const v8::Arguments &args)
const char * GetFormatted(const char *format,...)
SnapshotObjectId PushHeapObjectsStats(OutputStream *stream)
unsigned short uint16_t
Definition: unicode.cc:46
virtual HeapEntry * AddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)=0
INLINE(CodeEntry(Logger::LogEventsAndTags tag, const char *name_prefix, const char *name, const char *resource_name, int line_number, int security_token_id))
void set_name(const char *name)
void SetTag(Object *obj, const char *tag)
INLINE(const char *name() const)
void SetActualSamplingRate(double actual_sampling_rate)
bool IterateAndExtractReferences(SnapshotFillerInterface *filler)
const char * GetName(int args_count)
INLINE(void set_shared_id(int shared_id))
HeapEntry * AddNativesRootEntry()
virtual void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, int parent_entry, HeapEntry *child_entry)=0
INLINE(unsigned self_ticks() const)
static const SnapshotObjectId kGcRootsObjectId
static SnapshotObjectId GenerateId(v8::RetainedObjectInfo *info)
INLINE(CodeEntry *entry() const)
INLINE(CodeEntry *NewCodeEntry(Logger::LogEventsAndTags tag, String *name, String *resource_name, int line_number))
const char * GetName(String *name)
SnapshotObjectId last_assigned_id() const
INLINE(CodeEntry *NewCodeEntry(int security_token_id))
virtual void SetNamedReference(HeapGraphEdge::Type type, int parent_entry, const char *reference_name, HeapEntry *child_entry)=0
CodeEntry * FindEntry(Address addr)
NativeObjectsExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress)
List< HeapEntry > & entries()
void RecordTickSample(const TickSample &sample)
const char * GetTag(Object *obj)
int GetSharedId(Address addr)
SnapshotObjectId FindObjectId(Address object_addr)
static String * GetConstructorName(JSObject *object)
HeapEntry * gc_subroot(int index)
ProfileNode * FindChild(CodeEntry *entry)
HeapSnapshotsCollection * collection()
uint32_t occupancy() const
Definition: hashmap.h:82
int Compare(const T &a, const T &b)
Definition: utils.h:156
static HeapObject *const kInternalRootObject
INLINE(CodeEntry *NewCodeEntry(Logger::LogEventsAndTags tag, int args_count))
INLINE(int shared_id() const)
INLINE(const ProfileTree *bottom_up() const)
void MoveCode(Address from, Address to)
static const char *const kProgramEntryName
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition: globals.h:321
intptr_t AtomicWord
Definition: atomicops.h:72
double TicksToMillis(unsigned ticks) const
static const SnapshotObjectId kFirstAvailableObjectId
List< HeapGraphEdge > & edges()
HeapSnapshotGenerator(HeapSnapshot *snapshot, v8::ActivityControl *control)
INLINE(unsigned uid() const)
SnapshotObjectId FindEntry(Address addr)
bool IsSameAs(CodeEntry *entry) const
void AddPathFromEnd(const Vector< CodeEntry * > &path)
virtual void SetIndexedReference(HeapGraphEdge::Type type, int parent_entry, int index, HeapEntry *child_entry)=0
V8HeapExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress)
INLINE(const char *name_prefix() const)
#define BASE_EMBEDDED
Definition: allocation.h:68
void RemoveProfile(CpuProfile *profile)
CpuProfile * FilteredClone(int security_token_id)
INLINE(int line_number() const)
SnapshotObjectId PushHeapObjectsStats(OutputStream *stream)
static const char *const kGarbageCollectorEntryName
void AddPathToCurrentProfiles(const Vector< CodeEntry * > &path)
HeapEntry * GetEntryById(SnapshotObjectId id)
virtual HeapEntry * FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)=0
void AddRootEntries(SnapshotFillerInterface *filler)
void CopyData(const CodeEntry &source)
void UpdateMeasurements(double current_time)
static const unsigned kWallTimeQueryIntervalMs
void AddPath(const Vector< CodeEntry * > &path)
void AddCode(Address addr, CodeEntry *entry, unsigned size)
void Serialize(v8::OutputStream *stream)
void AddPathFromStart(const Vector< CodeEntry * > &path)
INLINE(int security_token_id() const)
HeapEntry * AddEntry(HeapEntry::Type type, const char *name, SnapshotObjectId id, int size)
static const char *const kAnonymousFunctionName
uint32_t ComputeIntegerHash(uint32_t key, uint32_t seed)
Definition: utils.h:285
static const SnapshotObjectId kInternalRootObjectId
INLINE(bool has_name_prefix() const)
INLINE(static HeapObject *EnsureDoubleAligned(Heap *heap, HeapObject *object, int size))
List< HeapEntry * > * GetSortedEntriesList()
uint32_t SnapshotObjectId
Definition: v8-profiler.h:67
virtual HeapEntry * FindEntry(HeapThing ptr)=0
void SnapshotGenerationFinished(HeapSnapshot *snapshot)
const char * GetCopy(const char *src)
void Pair(HeapThing thing, int entry)
HeapEntry * AddGcSubrootEntry(int tag)
TemplateHashMapImpl< FreeStoreAllocationPolicy > HashMap
Definition: hashmap.h:112
SnapshotObjectId last_assigned_id() const
INLINE(const char *resource_name() const)
static const char *const kEmptyNamePrefix
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
void AddRootEntries(SnapshotFillerInterface *filler)
const char * GetFunctionName(String *name)
HeapSnapshot * NewSnapshot(HeapSnapshot::Type type, const char *name, unsigned uid)
bool StartProfiling(const char *title, unsigned uid)
HeapSnapshot * GetSnapshot(unsigned uid)
List< HeapGraphEdge * > & children()
SnapshotObjectId GetObjectId(Address object_addr, int object_size)
INLINE(const List< ProfileNode * > *children() const)
const char * name_
Definition: flags.cc:1352
INLINE(ProfileNode(ProfileTree *tree, CodeEntry *entry))
int EstimateObjectsCount(HeapIterator *iterator)
const char * GetVFormatted(const char *format, va_list args)
CpuProfile * StopProfiling(int security_token_id, const char *title, double actual_sampling_rate)
INLINE(void IncrementSelfTicks())
SnapshotObjectId max_snapshot_js_object_id() const
const char * GetName(String *name)
void MoveObject(Address from, Address to)
ProfileNode * FindOrAddChild(CodeEntry *entry)
CpuProfile(const char *title, unsigned uid)
void add_child(HeapGraphEdge *edge)
HeapSnapshot(HeapSnapshotsCollection *collection, Type type, const char *title, unsigned uid)
INLINE(void IncreaseSelfTicks(unsigned amount))
INLINE(const ProfileTree *top_down() const)
FlagType type() const
Definition: flags.cc:1358
Vector< HeapGraphEdge * > children()
HeapSnapshotJSONSerializer(HeapSnapshot *snapshot)
void ObjectMoveEvent(Address from, Address to)
List< CpuProfile * > * Profiles(int security_token_id)