v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
heap-snapshot-generator.cc
Go to the documentation of this file.
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
31 
32 #include "allocation-tracker.h"
33 #include "code-stubs.h"
34 #include "heap-profiler.h"
35 #include "debug.h"
36 #include "types.h"
37 #include "v8conversions.h"
38 
39 namespace v8 {
40 namespace internal {
41 
42 
43 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
44  : type_(type),
45  from_index_(from),
46  to_index_(to),
47  name_(name) {
48  ASSERT(type == kContextVariable
49  || type == kProperty
50  || type == kInternal
51  || type == kShortcut
52  || type == kWeak);
53 }
54 
55 
56 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
57  : type_(type),
58  from_index_(from),
59  to_index_(to),
60  index_(index) {
61  ASSERT(type == kElement || type == kHidden);
62 }
63 
64 
65 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
66  to_entry_ = &snapshot->entries()[to_index_];
67 }
68 
69 
70 const int HeapEntry::kNoEntry = -1;
71 
72 HeapEntry::HeapEntry(HeapSnapshot* snapshot,
73  Type type,
74  const char* name,
76  size_t self_size,
77  unsigned trace_node_id)
78  : type_(type),
79  children_count_(0),
80  children_index_(-1),
81  self_size_(self_size),
82  snapshot_(snapshot),
83  name_(name),
84  id_(id),
85  trace_node_id_(trace_node_id) { }
86 
87 
88 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
89  const char* name,
90  HeapEntry* entry) {
91  HeapGraphEdge edge(type, name, this->index(), entry->index());
92  snapshot_->edges().Add(edge);
93  ++children_count_;
94 }
95 
96 
97 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
98  int index,
99  HeapEntry* entry) {
100  HeapGraphEdge edge(type, index, this->index(), entry->index());
101  snapshot_->edges().Add(edge);
102  ++children_count_;
103 }
104 
105 
106 void HeapEntry::Print(
107  const char* prefix, const char* edge_name, int max_depth, int indent) {
108  STATIC_CHECK(sizeof(unsigned) == sizeof(id()));
109  OS::Print("%6" V8PRIuPTR " @%6u %*c %s%s: ",
110  self_size(), id(), indent, ' ', prefix, edge_name);
111  if (type() != kString) {
112  OS::Print("%s %.40s\n", TypeAsString(), name_);
113  } else {
114  OS::Print("\"");
115  const char* c = name_;
116  while (*c && (c - name_) <= 40) {
117  if (*c != '\n')
118  OS::Print("%c", *c);
119  else
120  OS::Print("\\n");
121  ++c;
122  }
123  OS::Print("\"\n");
124  }
125  if (--max_depth == 0) return;
126  Vector<HeapGraphEdge*> ch = children();
127  for (int i = 0; i < ch.length(); ++i) {
128  HeapGraphEdge& edge = *ch[i];
129  const char* edge_prefix = "";
130  EmbeddedVector<char, 64> index;
131  const char* edge_name = index.start();
132  switch (edge.type()) {
134  edge_prefix = "#";
135  edge_name = edge.name();
136  break;
138  OS::SNPrintF(index, "%d", edge.index());
139  break;
141  edge_prefix = "$";
142  edge_name = edge.name();
143  break;
145  edge_name = edge.name();
146  break;
148  edge_prefix = "$";
149  OS::SNPrintF(index, "%d", edge.index());
150  break;
152  edge_prefix = "^";
153  edge_name = edge.name();
154  break;
156  edge_prefix = "w";
157  edge_name = edge.name();
158  break;
159  default:
160  OS::SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
161  }
162  edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
163  }
164 }
165 
166 
167 const char* HeapEntry::TypeAsString() {
168  switch (type()) {
169  case kHidden: return "/hidden/";
170  case kObject: return "/object/";
171  case kClosure: return "/closure/";
172  case kString: return "/string/";
173  case kCode: return "/code/";
174  case kArray: return "/array/";
175  case kRegExp: return "/regexp/";
176  case kHeapNumber: return "/number/";
177  case kNative: return "/native/";
178  case kSynthetic: return "/synthetic/";
179  case kConsString: return "/concatenated string/";
180  case kSlicedString: return "/sliced string/";
181  default: return "???";
182  }
183 }
184 
185 
186 // It is very important to keep objects that form a heap snapshot
187 // as small as possible.
188 namespace { // Avoid littering the global namespace.
189 
190 template <size_t ptr_size> struct SnapshotSizeConstants;
191 
192 template <> struct SnapshotSizeConstants<4> {
193  static const int kExpectedHeapGraphEdgeSize = 12;
194  static const int kExpectedHeapEntrySize = 28;
195 };
196 
197 template <> struct SnapshotSizeConstants<8> {
198  static const int kExpectedHeapGraphEdgeSize = 24;
199  static const int kExpectedHeapEntrySize = 40;
200 };
201 
202 } // namespace
203 
204 
206  const char* title,
207  unsigned uid)
208  : profiler_(profiler),
209  title_(title),
210  uid_(uid),
211  root_index_(HeapEntry::kNoEntry),
212  gc_roots_index_(HeapEntry::kNoEntry),
213  natives_root_index_(HeapEntry::kNoEntry),
214  max_snapshot_js_object_id_(0) {
215  STATIC_CHECK(
216  sizeof(HeapGraphEdge) ==
217  SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
218  STATIC_CHECK(
219  sizeof(HeapEntry) ==
220  SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
221  USE(SnapshotSizeConstants<4>::kExpectedHeapGraphEdgeSize);
222  USE(SnapshotSizeConstants<4>::kExpectedHeapEntrySize);
223  USE(SnapshotSizeConstants<8>::kExpectedHeapGraphEdgeSize);
224  USE(SnapshotSizeConstants<8>::kExpectedHeapEntrySize);
225  for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
226  gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
227  }
228 }
229 
230 
232  profiler_->RemoveSnapshot(this);
233  delete this;
234 }
235 
236 
238  max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id();
239 }
240 
241 
243  ASSERT(root_index_ == HeapEntry::kNoEntry);
244  ASSERT(entries_.is_empty()); // Root entry must be the first one.
245  HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
246  "",
248  0,
249  0);
250  root_index_ = entry->index();
251  ASSERT(root_index_ == 0);
252  return entry;
253 }
254 
255 
257  ASSERT(gc_roots_index_ == HeapEntry::kNoEntry);
258  HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
259  "(GC roots)",
261  0,
262  0);
263  gc_roots_index_ = entry->index();
264  return entry;
265 }
266 
267 
268 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) {
269  ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
271  HeapEntry* entry = AddEntry(
272  HeapEntry::kSynthetic,
275  0,
276  0);
277  gc_subroot_indexes_[tag] = entry->index();
278  return entry;
279 }
280 
281 
283  const char* name,
284  SnapshotObjectId id,
285  size_t size,
286  unsigned trace_node_id) {
287  HeapEntry entry(this, type, name, id, size, trace_node_id);
288  entries_.Add(entry);
289  return &entries_.last();
290 }
291 
292 
294  ASSERT(children().is_empty());
295  children().Allocate(edges().length());
296  int children_index = 0;
297  for (int i = 0; i < entries().length(); ++i) {
298  HeapEntry* entry = &entries()[i];
299  children_index = entry->set_children_index(children_index);
300  }
301  ASSERT(edges().length() == children_index);
302  for (int i = 0; i < edges().length(); ++i) {
303  HeapGraphEdge* edge = &edges()[i];
304  edge->ReplaceToIndexWithEntry(this);
305  edge->from()->add_child(edge);
306  }
307 }
308 
309 
311  public:
312  explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
313  int operator()(HeapEntry* const* entry) {
314  if ((*entry)->id() == id_) return 0;
315  return (*entry)->id() < id_ ? -1 : 1;
316  }
317  private:
318  SnapshotObjectId id_;
319 };
320 
321 
323  List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
324  // Perform a binary search by id.
325  int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
326  if (index == -1)
327  return NULL;
328  return entries_by_id->at(index);
329 }
330 
331 
332 template<class T>
333 static int SortByIds(const T* entry1_ptr,
334  const T* entry2_ptr) {
335  if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
336  return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
337 }
338 
339 
341  if (sorted_entries_.is_empty()) {
342  sorted_entries_.Allocate(entries_.length());
343  for (int i = 0; i < entries_.length(); ++i) {
344  sorted_entries_[i] = &entries_[i];
345  }
346  sorted_entries_.Sort(SortByIds);
347  }
348  return &sorted_entries_;
349 }
350 
351 
352 void HeapSnapshot::Print(int max_depth) {
353  root()->Print("", "", max_depth, 0);
354 }
355 
356 
358  return
359  sizeof(*this) +
360  GetMemoryUsedByList(entries_) +
361  GetMemoryUsedByList(edges_) +
362  GetMemoryUsedByList(children_) +
363  GetMemoryUsedByList(sorted_entries_);
364 }
365 
366 
367 // We split IDs on evens for embedder objects (see
368 // HeapObjectsMap::GenerateId) and odds for native objects.
377 
378 
379 static bool AddressesMatch(void* key1, void* key2) {
380  return key1 == key2;
381 }
382 
383 
385  : next_id_(kFirstAvailableObjectId),
386  entries_map_(AddressesMatch),
387  heap_(heap) {
388  // This dummy element solves a problem with entries_map_.
389  // When we do lookup in HashMap we see no difference between two cases:
390  // it has an entry with NULL as the value or it has created
391  // a new entry on the fly with NULL as the default value.
392  // With such dummy element we have a guaranty that all entries_map_ entries
393  // will have the value field grater than 0.
394  // This fact is using in MoveObject method.
395  entries_.Add(EntryInfo(0, NULL, 0));
396 }
397 
398 
399 bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
400  ASSERT(to != NULL);
401  ASSERT(from != NULL);
402  if (from == to) return false;
403  void* from_value = entries_map_.Remove(from, ComputePointerHash(from));
404  if (from_value == NULL) {
405  // It may occur that some untracked object moves to an address X and there
406  // is a tracked object at that address. In this case we should remove the
407  // entry as we know that the object has died.
408  void* to_value = entries_map_.Remove(to, ComputePointerHash(to));
409  if (to_value != NULL) {
410  int to_entry_info_index =
411  static_cast<int>(reinterpret_cast<intptr_t>(to_value));
412  entries_.at(to_entry_info_index).addr = NULL;
413  }
414  } else {
415  HashMap::Entry* to_entry = entries_map_.Lookup(to, ComputePointerHash(to),
416  true);
417  if (to_entry->value != NULL) {
418  // We found the existing entry with to address for an old object.
419  // Without this operation we will have two EntryInfo's with the same
420  // value in addr field. It is bad because later at RemoveDeadEntries
421  // one of this entry will be removed with the corresponding entries_map_
422  // entry.
423  int to_entry_info_index =
424  static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
425  entries_.at(to_entry_info_index).addr = NULL;
426  }
427  int from_entry_info_index =
428  static_cast<int>(reinterpret_cast<intptr_t>(from_value));
429  entries_.at(from_entry_info_index).addr = to;
430  // Size of an object can change during its life, so to keep information
431  // about the object in entries_ consistent, we have to adjust size when the
432  // object is migrated.
433  if (FLAG_heap_profiler_trace_objects) {
434  PrintF("Move object from %p to %p old size %6d new size %6d\n",
435  from,
436  to,
437  entries_.at(from_entry_info_index).size,
438  object_size);
439  }
440  entries_.at(from_entry_info_index).size = object_size;
441  to_entry->value = from_value;
442  }
443  return from_value != NULL;
444 }
445 
446 
448  FindOrAddEntry(addr, size, false);
449 }
450 
451 
453  HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
454  false);
455  if (entry == NULL) return 0;
456  int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
457  EntryInfo& entry_info = entries_.at(entry_index);
458  ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
459  return entry_info.id;
460 }
461 
462 
464  unsigned int size,
465  bool accessed) {
466  ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
467  HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
468  true);
469  if (entry->value != NULL) {
470  int entry_index =
471  static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
472  EntryInfo& entry_info = entries_.at(entry_index);
473  entry_info.accessed = accessed;
474  if (FLAG_heap_profiler_trace_objects) {
475  PrintF("Update object size : %p with old size %d and new size %d\n",
476  addr,
477  entry_info.size,
478  size);
479  }
480  entry_info.size = size;
481  return entry_info.id;
482  }
483  entry->value = reinterpret_cast<void*>(entries_.length());
484  SnapshotObjectId id = next_id_;
485  next_id_ += kObjectIdStep;
486  entries_.Add(EntryInfo(id, addr, size, accessed));
487  ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
488  return id;
489 }
490 
491 
493  time_intervals_.Clear();
494 }
495 
496 
498  if (FLAG_heap_profiler_trace_objects) {
499  PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
500  entries_map_.occupancy());
501  }
503  "HeapObjectsMap::UpdateHeapObjectsMap");
504  HeapIterator iterator(heap_);
505  for (HeapObject* obj = iterator.next();
506  obj != NULL;
507  obj = iterator.next()) {
508  FindOrAddEntry(obj->address(), obj->Size());
509  if (FLAG_heap_profiler_trace_objects) {
510  PrintF("Update object : %p %6d. Next address is %p\n",
511  obj->address(),
512  obj->Size(),
513  obj->address() + obj->Size());
514  }
515  }
517  if (FLAG_heap_profiler_trace_objects) {
518  PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
519  entries_map_.occupancy());
520  }
521 }
522 
523 
524 namespace {
525 
526 
527 struct HeapObjectInfo {
528  HeapObjectInfo(HeapObject* obj, int expected_size)
529  : obj(obj),
530  expected_size(expected_size) {
531  }
532 
533  HeapObject* obj;
535 
536  bool IsValid() const { return expected_size == obj->Size(); }
537 
538  void Print() const {
539  if (expected_size == 0) {
540  PrintF("Untracked object : %p %6d. Next address is %p\n",
541  obj->address(),
542  obj->Size(),
543  obj->address() + obj->Size());
544  } else if (obj->Size() != expected_size) {
545  PrintF("Wrong size %6d: %p %6d. Next address is %p\n",
547  obj->address(),
548  obj->Size(),
549  obj->address() + obj->Size());
550  } else {
551  PrintF("Good object : %p %6d. Next address is %p\n",
552  obj->address(),
554  obj->address() + obj->Size());
555  }
556  }
557 };
558 
559 
560 static int comparator(const HeapObjectInfo* a, const HeapObjectInfo* b) {
561  if (a->obj < b->obj) return -1;
562  if (a->obj > b->obj) return 1;
563  return 0;
564 }
565 
566 
567 } // namespace
568 
569 
571  List<HeapObjectInfo> heap_objects(1000);
572 
573  HeapIterator iterator(heap_);
574  int untracked = 0;
575  for (HeapObject* obj = iterator.next();
576  obj != NULL;
577  obj = iterator.next()) {
578  HashMap::Entry* entry = entries_map_.Lookup(
579  obj->address(), ComputePointerHash(obj->address()), false);
580  if (entry == NULL) {
581  ++untracked;
582  if (FLAG_heap_profiler_trace_objects) {
583  heap_objects.Add(HeapObjectInfo(obj, 0));
584  }
585  } else {
586  int entry_index = static_cast<int>(
587  reinterpret_cast<intptr_t>(entry->value));
588  EntryInfo& entry_info = entries_.at(entry_index);
589  if (FLAG_heap_profiler_trace_objects) {
590  heap_objects.Add(HeapObjectInfo(obj,
591  static_cast<int>(entry_info.size)));
592  if (obj->Size() != static_cast<int>(entry_info.size))
593  ++untracked;
594  } else {
595  CHECK_EQ(obj->Size(), static_cast<int>(entry_info.size));
596  }
597  }
598  }
599  if (FLAG_heap_profiler_trace_objects) {
600  PrintF("\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n",
601  entries_map_.occupancy());
602  heap_objects.Sort(comparator);
603  int last_printed_object = -1;
604  bool print_next_object = false;
605  for (int i = 0; i < heap_objects.length(); ++i) {
606  const HeapObjectInfo& object_info = heap_objects[i];
607  if (!object_info.IsValid()) {
608  ++untracked;
609  if (last_printed_object != i - 1) {
610  if (i > 0) {
611  PrintF("%d objects were skipped\n", i - 1 - last_printed_object);
612  heap_objects[i - 1].Print();
613  }
614  }
615  object_info.Print();
616  last_printed_object = i;
617  print_next_object = true;
618  } else if (print_next_object) {
619  object_info.Print();
620  print_next_object = false;
621  last_printed_object = i;
622  }
623  }
624  if (last_printed_object < heap_objects.length() - 1) {
625  PrintF("Last %d objects were skipped\n",
626  heap_objects.length() - 1 - last_printed_object);
627  }
628  PrintF("End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n",
629  entries_map_.occupancy());
630  }
631  return untracked;
632 }
633 
634 
637  time_intervals_.Add(TimeInterval(next_id_));
638  int prefered_chunk_size = stream->GetChunkSize();
639  List<v8::HeapStatsUpdate> stats_buffer;
640  ASSERT(!entries_.is_empty());
641  EntryInfo* entry_info = &entries_.first();
642  EntryInfo* end_entry_info = &entries_.last() + 1;
643  for (int time_interval_index = 0;
644  time_interval_index < time_intervals_.length();
645  ++time_interval_index) {
646  TimeInterval& time_interval = time_intervals_[time_interval_index];
647  SnapshotObjectId time_interval_id = time_interval.id;
648  uint32_t entries_size = 0;
649  EntryInfo* start_entry_info = entry_info;
650  while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
651  entries_size += entry_info->size;
652  ++entry_info;
653  }
654  uint32_t entries_count =
655  static_cast<uint32_t>(entry_info - start_entry_info);
656  if (time_interval.count != entries_count ||
657  time_interval.size != entries_size) {
658  stats_buffer.Add(v8::HeapStatsUpdate(
659  time_interval_index,
660  time_interval.count = entries_count,
661  time_interval.size = entries_size));
662  if (stats_buffer.length() >= prefered_chunk_size) {
664  &stats_buffer.first(), stats_buffer.length());
665  if (result == OutputStream::kAbort) return last_assigned_id();
666  stats_buffer.Clear();
667  }
668  }
669  }
670  ASSERT(entry_info == end_entry_info);
671  if (!stats_buffer.is_empty()) {
673  &stats_buffer.first(), stats_buffer.length());
674  if (result == OutputStream::kAbort) return last_assigned_id();
675  }
676  stream->EndOfStream();
677  return last_assigned_id();
678 }
679 
680 
682  ASSERT(entries_.length() > 0 &&
683  entries_.at(0).id == 0 &&
684  entries_.at(0).addr == NULL);
685  int first_free_entry = 1;
686  for (int i = 1; i < entries_.length(); ++i) {
687  EntryInfo& entry_info = entries_.at(i);
688  if (entry_info.accessed) {
689  if (first_free_entry != i) {
690  entries_.at(first_free_entry) = entry_info;
691  }
692  entries_.at(first_free_entry).accessed = false;
693  HashMap::Entry* entry = entries_map_.Lookup(
694  entry_info.addr, ComputePointerHash(entry_info.addr), false);
695  ASSERT(entry);
696  entry->value = reinterpret_cast<void*>(first_free_entry);
697  ++first_free_entry;
698  } else {
699  if (entry_info.addr) {
700  entries_map_.Remove(entry_info.addr,
701  ComputePointerHash(entry_info.addr));
702  }
703  }
704  }
705  entries_.Rewind(first_free_entry);
706  ASSERT(static_cast<uint32_t>(entries_.length()) - 1 ==
707  entries_map_.occupancy());
708 }
709 
710 
712  SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
713  const char* label = info->GetLabel();
715  static_cast<int>(strlen(label)),
716  heap_->HashSeed());
717  intptr_t element_count = info->GetElementCount();
718  if (element_count != -1)
719  id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
720  v8::internal::kZeroHashSeed);
721  return id << 1;
722 }
723 
724 
726  return
727  sizeof(*this) +
728  sizeof(HashMap::Entry) * entries_map_.capacity() +
729  GetMemoryUsedByList(entries_) +
730  GetMemoryUsedByList(time_intervals_);
731 }
732 
733 
735  : entries_(HeapThingsMatch) {
736 }
737 
738 
740  HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
741  if (cache_entry == NULL) return HeapEntry::kNoEntry;
742  return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
743 }
744 
745 
746 void HeapEntriesMap::Pair(HeapThing thing, int entry) {
747  HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
748  ASSERT(cache_entry->value == NULL);
749  cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
750 }
751 
752 
754  : entries_(HeapEntriesMap::HeapThingsMatch) {
755 }
756 
757 
759  entries_.Clear();
760 }
761 
762 
764  if (!obj->IsHeapObject()) return false;
765  HeapObject* object = HeapObject::cast(obj);
766  return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL;
767 }
768 
769 
771  if (!obj->IsHeapObject()) return;
772  HeapObject* object = HeapObject::cast(obj);
773  entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
774 }
775 
776 
778  HeapObject* object = HeapObject::cast(obj);
779  HashMap::Entry* cache_entry =
780  entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
781  return cache_entry != NULL
782  ? reinterpret_cast<const char*>(cache_entry->value)
783  : NULL;
784 }
785 
786 
787 void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
788  if (!obj->IsHeapObject()) return;
789  HeapObject* object = HeapObject::cast(obj);
790  HashMap::Entry* cache_entry =
791  entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
792  cache_entry->value = const_cast<char*>(tag);
793 }
794 
795 
797  reinterpret_cast<HeapObject*>(
798  static_cast<intptr_t>(HeapObjectsMap::kInternalRootObjectId));
799 HeapObject* const V8HeapExplorer::kGcRootsObject =
800  reinterpret_cast<HeapObject*>(
801  static_cast<intptr_t>(HeapObjectsMap::kGcRootsObjectId));
802 HeapObject* const V8HeapExplorer::kFirstGcSubrootObject =
803  reinterpret_cast<HeapObject*>(
804  static_cast<intptr_t>(HeapObjectsMap::kGcRootsFirstSubrootId));
805 HeapObject* const V8HeapExplorer::kLastGcSubrootObject =
806  reinterpret_cast<HeapObject*>(
807  static_cast<intptr_t>(HeapObjectsMap::kFirstAvailableObjectId));
808 
809 
811  HeapSnapshot* snapshot,
814  : heap_(snapshot->profiler()->heap_object_map()->heap()),
815  snapshot_(snapshot),
816  names_(snapshot_->profiler()->names()),
817  heap_object_map_(snapshot_->profiler()->heap_object_map()),
818  progress_(progress),
819  filler_(NULL),
820  global_object_name_resolver_(resolver) {
821 }
822 
823 
825 }
826 
827 
829  return AddEntry(reinterpret_cast<HeapObject*>(ptr));
830 }
831 
832 
833 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
834  if (object == kInternalRootObject) {
835  snapshot_->AddRootEntry();
836  return snapshot_->root();
837  } else if (object == kGcRootsObject) {
838  HeapEntry* entry = snapshot_->AddGcRootsEntry();
839  return entry;
840  } else if (object >= kFirstGcSubrootObject && object < kLastGcSubrootObject) {
841  HeapEntry* entry = snapshot_->AddGcSubrootEntry(GetGcSubrootOrder(object));
842  return entry;
843  } else if (object->IsJSFunction()) {
844  JSFunction* func = JSFunction::cast(object);
845  SharedFunctionInfo* shared = func->shared();
846  const char* name = shared->bound() ? "native_bind" :
847  names_->GetName(String::cast(shared->name()));
848  return AddEntry(object, HeapEntry::kClosure, name);
849  } else if (object->IsJSRegExp()) {
850  JSRegExp* re = JSRegExp::cast(object);
851  return AddEntry(object,
852  HeapEntry::kRegExp,
853  names_->GetName(re->Pattern()));
854  } else if (object->IsJSObject()) {
855  const char* name = names_->GetName(
857  if (object->IsJSGlobalObject()) {
858  const char* tag = objects_tags_.GetTag(object);
859  if (tag != NULL) {
860  name = names_->GetFormatted("%s / %s", name, tag);
861  }
862  }
863  return AddEntry(object, HeapEntry::kObject, name);
864  } else if (object->IsString()) {
865  String* string = String::cast(object);
866  if (string->IsConsString())
867  return AddEntry(object,
868  HeapEntry::kConsString,
869  "(concatenated string)");
870  if (string->IsSlicedString())
871  return AddEntry(object,
872  HeapEntry::kSlicedString,
873  "(sliced string)");
874  return AddEntry(object,
875  HeapEntry::kString,
876  names_->GetName(String::cast(object)));
877  } else if (object->IsCode()) {
878  return AddEntry(object, HeapEntry::kCode, "");
879  } else if (object->IsSharedFunctionInfo()) {
880  String* name = String::cast(SharedFunctionInfo::cast(object)->name());
881  return AddEntry(object,
882  HeapEntry::kCode,
883  names_->GetName(name));
884  } else if (object->IsScript()) {
885  Object* name = Script::cast(object)->name();
886  return AddEntry(object,
887  HeapEntry::kCode,
888  name->IsString()
889  ? names_->GetName(String::cast(name))
890  : "");
891  } else if (object->IsNativeContext()) {
892  return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
893  } else if (object->IsContext()) {
894  return AddEntry(object, HeapEntry::kObject, "system / Context");
895  } else if (object->IsFixedArray() ||
896  object->IsFixedDoubleArray() ||
897  object->IsByteArray() ||
898  object->IsExternalArray()) {
899  return AddEntry(object, HeapEntry::kArray, "");
900  } else if (object->IsHeapNumber()) {
901  return AddEntry(object, HeapEntry::kHeapNumber, "number");
902  }
903  return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
904 }
905 
906 
907 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
908  HeapEntry::Type type,
909  const char* name) {
910  return AddEntry(object->address(), type, name, object->Size());
911 }
912 
913 
915  HeapEntry::Type type,
916  const char* name,
917  size_t size) {
918  SnapshotObjectId object_id = heap_object_map_->FindOrAddEntry(
919  address, static_cast<unsigned int>(size));
920  unsigned trace_node_id = 0;
921  if (AllocationTracker* allocation_tracker =
922  snapshot_->profiler()->allocation_tracker()) {
923  trace_node_id =
924  allocation_tracker->address_to_trace()->GetTraceNodeId(address);
925  }
926  return snapshot_->AddEntry(type, name, object_id, size, trace_node_id);
927 }
928 
929 
931  public:
932  explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
933  : snapshot_(snapshot),
934  names_(snapshot->profiler()->names()),
935  entries_(entries) { }
936  HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
937  HeapEntry* entry = allocator->AllocateEntry(ptr);
938  entries_->Pair(ptr, entry->index());
939  return entry;
940  }
941  HeapEntry* FindEntry(HeapThing ptr) {
942  int index = entries_->Map(ptr);
943  return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
944  }
945  HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
946  HeapEntry* entry = FindEntry(ptr);
947  return entry != NULL ? entry : AddEntry(ptr, allocator);
948  }
950  int parent,
951  int index,
952  HeapEntry* child_entry) {
953  HeapEntry* parent_entry = &snapshot_->entries()[parent];
954  parent_entry->SetIndexedReference(type, index, child_entry);
955  }
957  int parent,
958  HeapEntry* child_entry) {
959  HeapEntry* parent_entry = &snapshot_->entries()[parent];
960  int index = parent_entry->children_count() + 1;
961  parent_entry->SetIndexedReference(type, index, child_entry);
962  }
964  int parent,
965  const char* reference_name,
966  HeapEntry* child_entry) {
967  HeapEntry* parent_entry = &snapshot_->entries()[parent];
968  parent_entry->SetNamedReference(type, reference_name, child_entry);
969  }
971  int parent,
972  HeapEntry* child_entry) {
973  HeapEntry* parent_entry = &snapshot_->entries()[parent];
974  int index = parent_entry->children_count() + 1;
975  parent_entry->SetNamedReference(
976  type,
977  names_->GetName(index),
978  child_entry);
979  }
980 
981  private:
982  HeapSnapshot* snapshot_;
983  StringsStorage* names_;
984  HeapEntriesMap* entries_;
985 };
986 
987 
988 class GcSubrootsEnumerator : public ObjectVisitor {
989  public:
991  SnapshotFiller* filler, V8HeapExplorer* explorer)
992  : filler_(filler),
993  explorer_(explorer),
994  previous_object_count_(0),
995  object_count_(0) {
996  }
997  void VisitPointers(Object** start, Object** end) {
998  object_count_ += end - start;
999  }
1001  // Skip empty subroots.
1002  if (previous_object_count_ != object_count_) {
1003  previous_object_count_ = object_count_;
1004  filler_->AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_);
1005  }
1006  }
1007  private:
1008  SnapshotFiller* filler_;
1009  V8HeapExplorer* explorer_;
1010  intptr_t previous_object_count_;
1011  intptr_t object_count_;
1012 };
1013 
1014 
1016  filler->AddEntry(kInternalRootObject, this);
1017  filler->AddEntry(kGcRootsObject, this);
1018  GcSubrootsEnumerator enumerator(filler, this);
1019  heap_->IterateRoots(&enumerator, VISIT_ALL);
1020 }
1021 
1022 
1023 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
1024  switch (object->map()->instance_type()) {
1025  case MAP_TYPE:
1026  switch (Map::cast(object)->instance_type()) {
1027 #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
1028  case instance_type: return "system / Map (" #Name ")";
1030 #undef MAKE_STRING_MAP_CASE
1031  default: return "system / Map";
1032  }
1033  case CELL_TYPE: return "system / Cell";
1034  case PROPERTY_CELL_TYPE: return "system / PropertyCell";
1035  case FOREIGN_TYPE: return "system / Foreign";
1036  case ODDBALL_TYPE: return "system / Oddball";
1037 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1038  case NAME##_TYPE: return "system / "#Name;
1040 #undef MAKE_STRUCT_CASE
1041  default: return "system";
1042  }
1043 }
1044 
1045 
1046 int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
1047  int objects_count = 0;
1048  for (HeapObject* obj = iterator->next();
1049  obj != NULL;
1050  obj = iterator->next()) {
1051  objects_count++;
1052  }
1053  return objects_count;
1054 }
1055 
1056 
1057 class IndexedReferencesExtractor : public ObjectVisitor {
1058  public:
1060  HeapObject* parent_obj,
1061  int parent)
1062  : generator_(generator),
1063  parent_obj_(parent_obj),
1064  parent_(parent),
1065  next_index_(0) {
1066  }
1067  void VisitCodeEntry(Address entry_address) {
1069  generator_->SetInternalReference(parent_obj_, parent_, "code", code);
1070  generator_->TagCodeObject(code);
1071  }
1072  void VisitPointers(Object** start, Object** end) {
1073  for (Object** p = start; p < end; p++) {
1074  ++next_index_;
1075  if (CheckVisitedAndUnmark(p)) continue;
1076  generator_->SetHiddenReference(parent_obj_, parent_, next_index_, *p);
1077  }
1078  }
1079  static void MarkVisitedField(HeapObject* obj, int offset) {
1080  if (offset < 0) return;
1081  Address field = obj->address() + offset;
1082  ASSERT(!Memory::Object_at(field)->IsFailure());
1083  ASSERT(Memory::Object_at(field)->IsHeapObject());
1084  *field |= kFailureTag;
1085  }
1086 
1087  private:
1088  bool CheckVisitedAndUnmark(Object** field) {
1089  if ((*field)->IsFailure()) {
1090  intptr_t untagged = reinterpret_cast<intptr_t>(*field) & ~kFailureTagMask;
1091  *field = reinterpret_cast<Object*>(untagged | kHeapObjectTag);
1092  ASSERT((*field)->IsHeapObject());
1093  return true;
1094  }
1095  return false;
1096  }
1097  V8HeapExplorer* generator_;
1098  HeapObject* parent_obj_;
1099  int parent_;
1100  int next_index_;
1101 };
1102 
1103 
1104 void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
1105  HeapEntry* heap_entry = GetEntry(obj);
1106  if (heap_entry == NULL) return; // No interest in this object.
1107  int entry = heap_entry->index();
1108 
1109  if (obj->IsJSGlobalProxy()) {
1110  ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj));
1111  } else if (obj->IsJSArrayBuffer()) {
1112  ExtractJSArrayBufferReferences(entry, JSArrayBuffer::cast(obj));
1113  } else if (obj->IsJSObject()) {
1114  ExtractJSObjectReferences(entry, JSObject::cast(obj));
1115  } else if (obj->IsString()) {
1116  ExtractStringReferences(entry, String::cast(obj));
1117  } else if (obj->IsContext()) {
1118  ExtractContextReferences(entry, Context::cast(obj));
1119  } else if (obj->IsMap()) {
1120  ExtractMapReferences(entry, Map::cast(obj));
1121  } else if (obj->IsSharedFunctionInfo()) {
1122  ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1123  } else if (obj->IsScript()) {
1124  ExtractScriptReferences(entry, Script::cast(obj));
1125  } else if (obj->IsAccessorPair()) {
1126  ExtractAccessorPairReferences(entry, AccessorPair::cast(obj));
1127  } else if (obj->IsCodeCache()) {
1128  ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
1129  } else if (obj->IsCode()) {
1130  ExtractCodeReferences(entry, Code::cast(obj));
1131  } else if (obj->IsBox()) {
1132  ExtractBoxReferences(entry, Box::cast(obj));
1133  } else if (obj->IsCell()) {
1134  ExtractCellReferences(entry, Cell::cast(obj));
1135  } else if (obj->IsPropertyCell()) {
1136  ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
1137  } else if (obj->IsAllocationSite()) {
1138  ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
1139  }
1140  SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
1141 
1142  // Extract unvisited fields as hidden references and restore tags
1143  // of visited fields.
1144  IndexedReferencesExtractor refs_extractor(this, obj, entry);
1145  obj->Iterate(&refs_extractor);
1146 }
1147 
1148 
1149 void V8HeapExplorer::ExtractJSGlobalProxyReferences(
1150  int entry, JSGlobalProxy* proxy) {
1151  SetInternalReference(proxy, entry,
1152  "native_context", proxy->native_context(),
1154 }
1155 
1156 
1157 void V8HeapExplorer::ExtractJSObjectReferences(
1158  int entry, JSObject* js_obj) {
1159  HeapObject* obj = js_obj;
1160  ExtractClosureReferences(js_obj, entry);
1161  ExtractPropertyReferences(js_obj, entry);
1162  ExtractElementReferences(js_obj, entry);
1163  ExtractInternalReferences(js_obj, entry);
1164  SetPropertyReference(
1165  obj, entry, heap_->proto_string(), js_obj->GetPrototype());
1166  if (obj->IsJSFunction()) {
1167  JSFunction* js_fun = JSFunction::cast(js_obj);
1168  Object* proto_or_map = js_fun->prototype_or_initial_map();
1169  if (!proto_or_map->IsTheHole()) {
1170  if (!proto_or_map->IsMap()) {
1171  SetPropertyReference(
1172  obj, entry,
1173  heap_->prototype_string(), proto_or_map,
1174  NULL,
1176  } else {
1177  SetPropertyReference(
1178  obj, entry,
1179  heap_->prototype_string(), js_fun->prototype());
1180  SetInternalReference(
1181  obj, entry, "initial_map", proto_or_map,
1183  }
1184  }
1185  SharedFunctionInfo* shared_info = js_fun->shared();
1186  // JSFunction has either bindings or literals and never both.
1187  bool bound = shared_info->bound();
1188  TagObject(js_fun->literals_or_bindings(),
1189  bound ? "(function bindings)" : "(function literals)");
1190  SetInternalReference(js_fun, entry,
1191  bound ? "bindings" : "literals",
1192  js_fun->literals_or_bindings(),
1194  TagObject(shared_info, "(shared function info)");
1195  SetInternalReference(js_fun, entry,
1196  "shared", shared_info,
1198  TagObject(js_fun->context(), "(context)");
1199  SetInternalReference(js_fun, entry,
1200  "context", js_fun->context(),
1202  SetWeakReference(js_fun, entry,
1203  "next_function_link", js_fun->next_function_link(),
1208  == JSFunction::kSize);
1209  } else if (obj->IsGlobalObject()) {
1210  GlobalObject* global_obj = GlobalObject::cast(obj);
1211  SetInternalReference(global_obj, entry,
1212  "builtins", global_obj->builtins(),
1214  SetInternalReference(global_obj, entry,
1215  "native_context", global_obj->native_context(),
1217  SetInternalReference(global_obj, entry,
1218  "global_context", global_obj->global_context(),
1220  SetInternalReference(global_obj, entry,
1221  "global_receiver", global_obj->global_receiver(),
1224  4 * kPointerSize);
1225  } else if (obj->IsJSArrayBufferView()) {
1226  JSArrayBufferView* view = JSArrayBufferView::cast(obj);
1227  SetInternalReference(view, entry, "buffer", view->buffer(),
1229  SetWeakReference(view, entry, "weak_next", view->weak_next(),
1231  }
1232  TagObject(js_obj->properties(), "(object properties)");
1233  SetInternalReference(obj, entry,
1234  "properties", js_obj->properties(),
1236  TagObject(js_obj->elements(), "(object elements)");
1237  SetInternalReference(obj, entry,
1238  "elements", js_obj->elements(),
1240 }
1241 
1242 
1243 void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1244  if (string->IsConsString()) {
1245  ConsString* cs = ConsString::cast(string);
1246  SetInternalReference(cs, entry, "first", cs->first(),
1248  SetInternalReference(cs, entry, "second", cs->second(),
1250  } else if (string->IsSlicedString()) {
1251  SlicedString* ss = SlicedString::cast(string);
1252  SetInternalReference(ss, entry, "parent", ss->parent(),
1254  }
1255 }
1256 
1257 
1258 void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1259  if (context == context->declaration_context()) {
1260  ScopeInfo* scope_info = context->closure()->shared()->scope_info();
1261  // Add context allocated locals.
1262  int context_locals = scope_info->ContextLocalCount();
1263  for (int i = 0; i < context_locals; ++i) {
1264  String* local_name = scope_info->ContextLocalName(i);
1265  int idx = Context::MIN_CONTEXT_SLOTS + i;
1266  SetContextReference(context, entry, local_name, context->get(idx),
1268  }
1269  if (scope_info->HasFunctionName()) {
1270  String* name = scope_info->FunctionName();
1272  int idx = scope_info->FunctionContextSlotIndex(name, &mode);
1273  if (idx >= 0) {
1274  SetContextReference(context, entry, name, context->get(idx),
1276  }
1277  }
1278  }
1279 
1280 #define EXTRACT_CONTEXT_FIELD(index, type, name) \
1281  if (Context::index < Context::FIRST_WEAK_SLOT || \
1282  Context::index == Context::MAP_CACHE_INDEX) { \
1283  SetInternalReference(context, entry, #name, context->get(Context::index), \
1284  FixedArray::OffsetOfElementAt(Context::index)); \
1285  } else { \
1286  SetWeakReference(context, entry, #name, context->get(Context::index), \
1287  FixedArray::OffsetOfElementAt(Context::index)); \
1288  }
1289  EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1290  EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1291  EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
1292  EXTRACT_CONTEXT_FIELD(GLOBAL_OBJECT_INDEX, GlobalObject, global);
1293  if (context->IsNativeContext()) {
1294  TagObject(context->jsfunction_result_caches(),
1295  "(context func. result caches)");
1296  TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1297  TagObject(context->runtime_context(), "(runtime context)");
1298  TagObject(context->embedder_data(), "(context data)");
1300  EXTRACT_CONTEXT_FIELD(OPTIMIZED_FUNCTIONS_LIST, unused,
1301  optimized_functions_list);
1302  EXTRACT_CONTEXT_FIELD(OPTIMIZED_CODE_LIST, unused, optimized_code_list);
1303  EXTRACT_CONTEXT_FIELD(DEOPTIMIZED_CODE_LIST, unused, deoptimized_code_list);
1304  EXTRACT_CONTEXT_FIELD(NEXT_CONTEXT_LINK, unused, next_context_link);
1305 #undef EXTRACT_CONTEXT_FIELD
1310  }
1311 }
1312 
1313 
1314 void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1315  if (map->HasTransitionArray()) {
1316  TransitionArray* transitions = map->transitions();
1317  int transitions_entry = GetEntry(transitions)->index();
1318  Object* back_pointer = transitions->back_pointer_storage();
1319  TagObject(back_pointer, "(back pointer)");
1320  SetInternalReference(transitions, transitions_entry,
1321  "back_pointer", back_pointer);
1322  TagObject(transitions, "(transition array)");
1323  SetInternalReference(map, entry,
1324  "transitions", transitions,
1326  } else {
1327  Object* back_pointer = map->GetBackPointer();
1328  TagObject(back_pointer, "(back pointer)");
1329  SetInternalReference(map, entry,
1330  "back_pointer", back_pointer,
1332  }
1333  DescriptorArray* descriptors = map->instance_descriptors();
1334  TagObject(descriptors, "(map descriptors)");
1335  SetInternalReference(map, entry,
1336  "descriptors", descriptors,
1338 
1339  SetInternalReference(map, entry,
1340  "code_cache", map->code_cache(),
1342  SetInternalReference(map, entry,
1343  "prototype", map->prototype(), Map::kPrototypeOffset);
1344  SetInternalReference(map, entry,
1345  "constructor", map->constructor(),
1347  TagObject(map->dependent_code(), "(dependent code)");
1348  SetInternalReference(map, entry,
1349  "dependent_code", map->dependent_code(),
1351 }
1352 
1353 
1354 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1355  int entry, SharedFunctionInfo* shared) {
1356  HeapObject* obj = shared;
1357  String* shared_name = shared->DebugName();
1358  const char* name = NULL;
1359  if (shared_name != *heap_->isolate()->factory()->empty_string()) {
1360  name = names_->GetName(shared_name);
1361  TagObject(shared->code(), names_->GetFormatted("(code for %s)", name));
1362  } else {
1363  TagObject(shared->code(), names_->GetFormatted("(%s code)",
1364  Code::Kind2String(shared->code()->kind())));
1365  }
1366 
1367  SetInternalReference(obj, entry,
1368  "name", shared->name(),
1370  SetInternalReference(obj, entry,
1371  "code", shared->code(),
1373  TagObject(shared->scope_info(), "(function scope info)");
1374  SetInternalReference(obj, entry,
1375  "scope_info", shared->scope_info(),
1377  SetInternalReference(obj, entry,
1378  "instance_class_name", shared->instance_class_name(),
1380  SetInternalReference(obj, entry,
1381  "script", shared->script(),
1383  const char* construct_stub_name = name ?
1384  names_->GetFormatted("(construct stub code for %s)", name) :
1385  "(construct stub code)";
1386  TagObject(shared->construct_stub(), construct_stub_name);
1387  SetInternalReference(obj, entry,
1388  "construct_stub", shared->construct_stub(),
1390  SetInternalReference(obj, entry,
1391  "function_data", shared->function_data(),
1393  SetInternalReference(obj, entry,
1394  "debug_info", shared->debug_info(),
1396  SetInternalReference(obj, entry,
1397  "inferred_name", shared->inferred_name(),
1399  SetInternalReference(obj, entry,
1400  "optimized_code_map", shared->optimized_code_map(),
1402  SetWeakReference(obj, entry,
1403  "initial_map", shared->initial_map(),
1405 }
1406 
1407 
1408 void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
1409  HeapObject* obj = script;
1410  SetInternalReference(obj, entry,
1411  "source", script->source(),
1413  SetInternalReference(obj, entry,
1414  "name", script->name(),
1416  SetInternalReference(obj, entry,
1417  "context_data", script->context_data(),
1419  TagObject(script->line_ends(), "(script line ends)");
1420  SetInternalReference(obj, entry,
1421  "line_ends", script->line_ends(),
1423 }
1424 
1425 
1426 void V8HeapExplorer::ExtractAccessorPairReferences(
1427  int entry, AccessorPair* accessors) {
1428  SetInternalReference(accessors, entry, "getter", accessors->getter(),
1430  SetInternalReference(accessors, entry, "setter", accessors->setter(),
1432 }
1433 
1434 
1435 void V8HeapExplorer::ExtractCodeCacheReferences(
1436  int entry, CodeCache* code_cache) {
1437  TagObject(code_cache->default_cache(), "(default code cache)");
1438  SetInternalReference(code_cache, entry,
1439  "default_cache", code_cache->default_cache(),
1441  TagObject(code_cache->normal_type_cache(), "(code type cache)");
1442  SetInternalReference(code_cache, entry,
1443  "type_cache", code_cache->normal_type_cache(),
1445 }
1446 
1447 
1449  TagObject(code, names_->GetFormatted("(%s builtin)", name));
1450 }
1451 
1452 
1454  if (code->kind() == Code::STUB) {
1455  TagObject(code, names_->GetFormatted(
1456  "(%s code)", CodeStub::MajorName(
1457  static_cast<CodeStub::Major>(code->major_key()), true)));
1458  }
1459 }
1460 
1461 
1462 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
1463  TagCodeObject(code);
1464  TagObject(code->relocation_info(), "(code relocation info)");
1465  SetInternalReference(code, entry,
1466  "relocation_info", code->relocation_info(),
1468  SetInternalReference(code, entry,
1469  "handler_table", code->handler_table(),
1471  TagObject(code->deoptimization_data(), "(code deopt data)");
1472  SetInternalReference(code, entry,
1473  "deoptimization_data", code->deoptimization_data(),
1475  if (code->kind() == Code::FUNCTION) {
1476  SetInternalReference(code, entry,
1477  "type_feedback_info", code->type_feedback_info(),
1479  }
1480  SetInternalReference(code, entry,
1481  "gc_metadata", code->gc_metadata(),
1483  SetInternalReference(code, entry,
1484  "constant_pool", code->constant_pool(),
1486  if (code->kind() == Code::OPTIMIZED_FUNCTION) {
1487  SetWeakReference(code, entry,
1488  "next_code_link", code->next_code_link(),
1490  }
1491 }
1492 
1493 
1494 void V8HeapExplorer::ExtractBoxReferences(int entry, Box* box) {
1495  SetInternalReference(box, entry, "value", box->value(), Box::kValueOffset);
1496 }
1497 
1498 
1499 void V8HeapExplorer::ExtractCellReferences(int entry, Cell* cell) {
1500  SetInternalReference(cell, entry, "value", cell->value(), Cell::kValueOffset);
1501 }
1502 
1503 
1504 void V8HeapExplorer::ExtractPropertyCellReferences(int entry,
1505  PropertyCell* cell) {
1506  ExtractCellReferences(entry, cell);
1507  SetInternalReference(cell, entry, "type", cell->type(),
1509  SetInternalReference(cell, entry, "dependent_code", cell->dependent_code(),
1511 }
1512 
1513 
1514 void V8HeapExplorer::ExtractAllocationSiteReferences(int entry,
1515  AllocationSite* site) {
1516  SetInternalReference(site, entry, "transition_info", site->transition_info(),
1518  SetInternalReference(site, entry, "nested_site", site->nested_site(),
1520  SetInternalReference(site, entry, "dependent_code", site->dependent_code(),
1522  // Do not visit weak_next as it is not visited by the StaticVisitor,
1523  // and we're not very interested in weak_next field here.
1526 }
1527 
1528 
1530  public:
1532  : size_(size)
1533  , explorer_(explorer) {
1534  }
1535  virtual HeapEntry* AllocateEntry(HeapThing ptr) {
1536  return explorer_->AddEntry(
1537  static_cast<Address>(ptr),
1538  HeapEntry::kNative, "system / JSArrayBufferData", size_);
1539  }
1540  private:
1541  size_t size_;
1542  V8HeapExplorer* explorer_;
1543 };
1544 
1545 
1546 void V8HeapExplorer::ExtractJSArrayBufferReferences(
1547  int entry, JSArrayBuffer* buffer) {
1548  SetWeakReference(buffer, entry, "weak_next", buffer->weak_next(),
1550  SetWeakReference(buffer, entry,
1551  "weak_first_view", buffer->weak_first_view(),
1553  // Setup a reference to a native memory backing_store object.
1554  if (!buffer->backing_store())
1555  return;
1556  size_t data_size = NumberToSize(heap_->isolate(), buffer->byte_length());
1557  JSArrayBufferDataEntryAllocator allocator(data_size, this);
1558  HeapEntry* data_entry =
1559  filler_->FindOrAddEntry(buffer->backing_store(), &allocator);
1561  entry, "backing_store", data_entry);
1562 }
1563 
1564 
1565 void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
1566  if (!js_obj->IsJSFunction()) return;
1567 
1568  JSFunction* func = JSFunction::cast(js_obj);
1569  if (func->shared()->bound()) {
1570  FixedArray* bindings = func->function_bindings();
1571  SetNativeBindReference(js_obj, entry, "bound_this",
1572  bindings->get(JSFunction::kBoundThisIndex));
1573  SetNativeBindReference(js_obj, entry, "bound_function",
1574  bindings->get(JSFunction::kBoundFunctionIndex));
1576  i < bindings->length(); i++) {
1577  const char* reference_name = names_->GetFormatted(
1578  "bound_argument_%d",
1580  SetNativeBindReference(js_obj, entry, reference_name,
1581  bindings->get(i));
1582  }
1583  }
1584 }
1585 
1586 
1587 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
1588  if (js_obj->HasFastProperties()) {
1589  DescriptorArray* descs = js_obj->map()->instance_descriptors();
1590  int real_size = js_obj->map()->NumberOfOwnDescriptors();
1591  for (int i = 0; i < real_size; i++) {
1592  switch (descs->GetType(i)) {
1593  case FIELD: {
1594  int index = descs->GetFieldIndex(i);
1595 
1596  Name* k = descs->GetKey(i);
1597  if (index < js_obj->map()->inobject_properties()) {
1598  Object* value = js_obj->InObjectPropertyAt(index);
1599  if (k != heap_->hidden_string()) {
1600  SetPropertyReference(
1601  js_obj, entry,
1602  k, value,
1603  NULL,
1604  js_obj->GetInObjectPropertyOffset(index));
1605  } else {
1606  TagObject(value, "(hidden properties)");
1607  SetInternalReference(
1608  js_obj, entry,
1609  "hidden_properties", value,
1610  js_obj->GetInObjectPropertyOffset(index));
1611  }
1612  } else {
1613  Object* value = js_obj->RawFastPropertyAt(index);
1614  if (k != heap_->hidden_string()) {
1615  SetPropertyReference(js_obj, entry, k, value);
1616  } else {
1617  TagObject(value, "(hidden properties)");
1618  SetInternalReference(js_obj, entry, "hidden_properties", value);
1619  }
1620  }
1621  break;
1622  }
1623  case CONSTANT:
1624  SetPropertyReference(
1625  js_obj, entry,
1626  descs->GetKey(i), descs->GetConstant(i));
1627  break;
1628  case CALLBACKS:
1629  ExtractAccessorPairProperty(
1630  js_obj, entry,
1631  descs->GetKey(i), descs->GetValue(i));
1632  break;
1633  case NORMAL: // only in slow mode
1634  case HANDLER: // only in lookup results, not in descriptors
1635  case INTERCEPTOR: // only in lookup results, not in descriptors
1636  break;
1637  case TRANSITION:
1638  case NONEXISTENT:
1639  UNREACHABLE();
1640  break;
1641  }
1642  }
1643  } else {
1644  NameDictionary* dictionary = js_obj->property_dictionary();
1645  int length = dictionary->Capacity();
1646  for (int i = 0; i < length; ++i) {
1647  Object* k = dictionary->KeyAt(i);
1648  if (dictionary->IsKey(k)) {
1649  Object* target = dictionary->ValueAt(i);
1650  // We assume that global objects can only have slow properties.
1651  Object* value = target->IsPropertyCell()
1652  ? PropertyCell::cast(target)->value()
1653  : target;
1654  if (k == heap_->hidden_string()) {
1655  TagObject(value, "(hidden properties)");
1656  SetInternalReference(js_obj, entry, "hidden_properties", value);
1657  continue;
1658  }
1659  if (ExtractAccessorPairProperty(js_obj, entry, k, value)) continue;
1660  SetPropertyReference(js_obj, entry, String::cast(k), value);
1661  }
1662  }
1663  }
1664 }
1665 
1666 
1667 bool V8HeapExplorer::ExtractAccessorPairProperty(
1668  JSObject* js_obj, int entry, Object* key, Object* callback_obj) {
1669  if (!callback_obj->IsAccessorPair()) return false;
1670  AccessorPair* accessors = AccessorPair::cast(callback_obj);
1671  Object* getter = accessors->getter();
1672  if (!getter->IsOddball()) {
1673  SetPropertyReference(js_obj, entry, String::cast(key), getter, "get %s");
1674  }
1675  Object* setter = accessors->setter();
1676  if (!setter->IsOddball()) {
1677  SetPropertyReference(js_obj, entry, String::cast(key), setter, "set %s");
1678  }
1679  return true;
1680 }
1681 
1682 
1683 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
1684  if (js_obj->HasFastObjectElements()) {
1685  FixedArray* elements = FixedArray::cast(js_obj->elements());
1686  int length = js_obj->IsJSArray() ?
1687  Smi::cast(JSArray::cast(js_obj)->length())->value() :
1688  elements->length();
1689  for (int i = 0; i < length; ++i) {
1690  if (!elements->get(i)->IsTheHole()) {
1691  SetElementReference(js_obj, entry, i, elements->get(i));
1692  }
1693  }
1694  } else if (js_obj->HasDictionaryElements()) {
1695  SeededNumberDictionary* dictionary = js_obj->element_dictionary();
1696  int length = dictionary->Capacity();
1697  for (int i = 0; i < length; ++i) {
1698  Object* k = dictionary->KeyAt(i);
1699  if (dictionary->IsKey(k)) {
1700  ASSERT(k->IsNumber());
1701  uint32_t index = static_cast<uint32_t>(k->Number());
1702  SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1703  }
1704  }
1705  }
1706 }
1707 
1708 
1709 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
1710  int length = js_obj->GetInternalFieldCount();
1711  for (int i = 0; i < length; ++i) {
1712  Object* o = js_obj->GetInternalField(i);
1713  SetInternalReference(
1714  js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
1715  }
1716 }
1717 
1718 
1720  Heap* heap = object->GetHeap();
1721  if (object->IsJSFunction()) return heap->closure_string();
1722  String* constructor_name = object->constructor_name();
1723  if (constructor_name == heap->Object_string()) {
1724  // Look up an immediate "constructor" property, if it is a function,
1725  // return its name. This is for instances of binding objects, which
1726  // have prototype constructor type "Object".
1727  Object* constructor_prop = NULL;
1728  LookupResult result(heap->isolate());
1729  object->LocalLookupRealNamedProperty(heap->constructor_string(), &result);
1730  if (!result.IsFound()) return object->constructor_name();
1731 
1732  constructor_prop = result.GetLazyValue();
1733  if (constructor_prop->IsJSFunction()) {
1734  Object* maybe_name =
1735  JSFunction::cast(constructor_prop)->shared()->name();
1736  if (maybe_name->IsString()) {
1737  String* name = String::cast(maybe_name);
1738  if (name->length() > 0) return name;
1739  }
1740  }
1741  }
1742  return object->constructor_name();
1743 }
1744 
1745 
1746 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
1747  if (!obj->IsHeapObject()) return NULL;
1748  return filler_->FindOrAddEntry(obj, this);
1749 }
1750 
1751 
1752 class RootsReferencesExtractor : public ObjectVisitor {
1753  private:
1754  struct IndexTag {
1755  IndexTag(int index, VisitorSynchronization::SyncTag tag)
1756  : index(index), tag(tag) { }
1757  int index;
1759  };
1760 
1761  public:
1763  : collecting_all_references_(false),
1764  previous_reference_count_(0),
1765  heap_(heap) {
1766  }
1767 
1768  void VisitPointers(Object** start, Object** end) {
1769  if (collecting_all_references_) {
1770  for (Object** p = start; p < end; p++) all_references_.Add(*p);
1771  } else {
1772  for (Object** p = start; p < end; p++) strong_references_.Add(*p);
1773  }
1774  }
1775 
1776  void SetCollectingAllReferences() { collecting_all_references_ = true; }
1777 
1778  void FillReferences(V8HeapExplorer* explorer) {
1779  ASSERT(strong_references_.length() <= all_references_.length());
1780  Builtins* builtins = heap_->isolate()->builtins();
1781  for (int i = 0; i < reference_tags_.length(); ++i) {
1782  explorer->SetGcRootsReference(reference_tags_[i].tag);
1783  }
1784  int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0;
1785  while (all_index < all_references_.length()) {
1786  bool is_strong = strong_index < strong_references_.length()
1787  && strong_references_[strong_index] == all_references_[all_index];
1788  explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1789  !is_strong,
1790  all_references_[all_index]);
1791  if (reference_tags_[tags_index].tag ==
1792  VisitorSynchronization::kBuiltins) {
1793  ASSERT(all_references_[all_index]->IsCode());
1794  explorer->TagBuiltinCodeObject(
1795  Code::cast(all_references_[all_index]),
1796  builtins->name(builtin_index++));
1797  }
1798  ++all_index;
1799  if (is_strong) ++strong_index;
1800  if (reference_tags_[tags_index].index == all_index) ++tags_index;
1801  }
1802  }
1803 
1805  if (collecting_all_references_ &&
1806  previous_reference_count_ != all_references_.length()) {
1807  previous_reference_count_ = all_references_.length();
1808  reference_tags_.Add(IndexTag(previous_reference_count_, tag));
1809  }
1810  }
1811 
1812  private:
1813  bool collecting_all_references_;
1814  List<Object*> strong_references_;
1815  List<Object*> all_references_;
1816  int previous_reference_count_;
1817  List<IndexTag> reference_tags_;
1818  Heap* heap_;
1819 };
1820 
1821 
1823  SnapshotFiller* filler) {
1824  filler_ = filler;
1825 
1826  // Make sure builtin code objects get their builtin tags
1827  // first. Otherwise a particular JSFunction object could set
1828  // its custom name to a generic builtin.
1829  SetRootGcRootsReference();
1830  RootsReferencesExtractor extractor(heap_);
1831  heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
1832  extractor.SetCollectingAllReferences();
1833  heap_->IterateRoots(&extractor, VISIT_ALL);
1834  extractor.FillReferences(this);
1835 
1836  // Now iterate the whole heap.
1837  bool interrupted = false;
1838  HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1839  // Heap iteration with filtering must be finished in any case.
1840  for (HeapObject* obj = iterator.next();
1841  obj != NULL;
1842  obj = iterator.next(), progress_->ProgressStep()) {
1843  if (!interrupted) {
1844  ExtractReferences(obj);
1845  if (!progress_->ProgressReport(false)) interrupted = true;
1846  }
1847  }
1848  if (interrupted) {
1849  filler_ = NULL;
1850  return false;
1851  }
1852 
1853  filler_ = NULL;
1854  return progress_->ProgressReport(true);
1855 }
1856 
1857 
1858 bool V8HeapExplorer::IsEssentialObject(Object* object) {
1859  return object->IsHeapObject()
1860  && !object->IsOddball()
1861  && object != heap_->empty_byte_array()
1862  && object != heap_->empty_fixed_array()
1863  && object != heap_->empty_descriptor_array()
1864  && object != heap_->fixed_array_map()
1865  && object != heap_->cell_map()
1866  && object != heap_->global_property_cell_map()
1867  && object != heap_->shared_function_info_map()
1868  && object != heap_->free_space_map()
1869  && object != heap_->one_pointer_filler_map()
1870  && object != heap_->two_pointer_filler_map();
1871 }
1872 
1873 
1874 void V8HeapExplorer::SetContextReference(HeapObject* parent_obj,
1875  int parent_entry,
1876  String* reference_name,
1877  Object* child_obj,
1878  int field_offset) {
1879  ASSERT(parent_entry == GetEntry(parent_obj)->index());
1880  HeapEntry* child_entry = GetEntry(child_obj);
1881  if (child_entry != NULL) {
1883  parent_entry,
1884  names_->GetName(reference_name),
1885  child_entry);
1886  IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1887  }
1888 }
1889 
1890 
1891 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
1892  int parent_entry,
1893  const char* reference_name,
1894  Object* child_obj) {
1895  ASSERT(parent_entry == GetEntry(parent_obj)->index());
1896  HeapEntry* child_entry = GetEntry(child_obj);
1897  if (child_entry != NULL) {
1899  parent_entry,
1900  reference_name,
1901  child_entry);
1902  }
1903 }
1904 
1905 
1906 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
1907  int parent_entry,
1908  int index,
1909  Object* child_obj) {
1910  ASSERT(parent_entry == GetEntry(parent_obj)->index());
1911  HeapEntry* child_entry = GetEntry(child_obj);
1912  if (child_entry != NULL) {
1914  parent_entry,
1915  index,
1916  child_entry);
1917  }
1918 }
1919 
1920 
1921 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1922  int parent_entry,
1923  const char* reference_name,
1924  Object* child_obj,
1925  int field_offset) {
1926  ASSERT(parent_entry == GetEntry(parent_obj)->index());
1927  HeapEntry* child_entry = GetEntry(child_obj);
1928  if (child_entry == NULL) return;
1929  if (IsEssentialObject(child_obj)) {
1931  parent_entry,
1932  reference_name,
1933  child_entry);
1934  }
1935  IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1936 }
1937 
1938 
1939 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1940  int parent_entry,
1941  int index,
1942  Object* child_obj,
1943  int field_offset) {
1944  ASSERT(parent_entry == GetEntry(parent_obj)->index());
1945  HeapEntry* child_entry = GetEntry(child_obj);
1946  if (child_entry == NULL) return;
1947  if (IsEssentialObject(child_obj)) {
1949  parent_entry,
1950  names_->GetName(index),
1951  child_entry);
1952  }
1953  IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1954 }
1955 
1956 
1957 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
1958  int parent_entry,
1959  int index,
1960  Object* child_obj) {
1961  ASSERT(parent_entry == GetEntry(parent_obj)->index());
1962  HeapEntry* child_entry = GetEntry(child_obj);
1963  if (child_entry != NULL && IsEssentialObject(child_obj)) {
1965  parent_entry,
1966  index,
1967  child_entry);
1968  }
1969 }
1970 
1971 
1972 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
1973  int parent_entry,
1974  const char* reference_name,
1975  Object* child_obj,
1976  int field_offset) {
1977  ASSERT(parent_entry == GetEntry(parent_obj)->index());
1978  HeapEntry* child_entry = GetEntry(child_obj);
1979  if (child_entry == NULL) return;
1980  if (IsEssentialObject(child_obj)) {
1982  parent_entry,
1983  reference_name,
1984  child_entry);
1985  }
1986  IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1987 }
1988 
1989 
1990 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
1991  int parent_entry,
1992  Name* reference_name,
1993  Object* child_obj,
1994  const char* name_format_string,
1995  int field_offset) {
1996  ASSERT(parent_entry == GetEntry(parent_obj)->index());
1997  HeapEntry* child_entry = GetEntry(child_obj);
1998  if (child_entry != NULL) {
1999  HeapGraphEdge::Type type =
2000  reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
2002  const char* name = name_format_string != NULL && reference_name->IsString()
2003  ? names_->GetFormatted(
2004  name_format_string,
2005  String::cast(reference_name)->ToCString(
2007  names_->GetName(reference_name);
2008 
2009  filler_->SetNamedReference(type,
2010  parent_entry,
2011  name,
2012  child_entry);
2013  IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2014  }
2015 }
2016 
2017 
2018 void V8HeapExplorer::SetRootGcRootsReference() {
2021  snapshot_->root()->index(),
2022  snapshot_->gc_roots());
2023 }
2024 
2025 
2026 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
2027  HeapEntry* child_entry = GetEntry(child_obj);
2028  ASSERT(child_entry != NULL);
2029  filler_->SetNamedAutoIndexReference(
2031  snapshot_->root()->index(),
2032  child_entry);
2033 }
2034 
2035 
2036 void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
2039  snapshot_->gc_roots()->index(),
2040  snapshot_->gc_subroot(tag));
2041 }
2042 
2043 
2044 void V8HeapExplorer::SetGcSubrootReference(
2045  VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
2046  HeapEntry* child_entry = GetEntry(child_obj);
2047  if (child_entry != NULL) {
2048  const char* name = GetStrongGcSubrootName(child_obj);
2049  if (name != NULL) {
2050  filler_->SetNamedReference(
2052  snapshot_->gc_subroot(tag)->index(),
2053  name,
2054  child_entry);
2055  } else {
2056  if (is_weak) {
2057  filler_->SetNamedAutoIndexReference(
2059  snapshot_->gc_subroot(tag)->index(),
2060  child_entry);
2061  } else {
2064  snapshot_->gc_subroot(tag)->index(),
2065  child_entry);
2066  }
2067  }
2068 
2069  // Add a shortcut to JS global object reference at snapshot root.
2070  if (child_obj->IsNativeContext()) {
2071  Context* context = Context::cast(child_obj);
2072  GlobalObject* global = context->global_object();
2073  if (global->IsJSGlobalObject()) {
2074  bool is_debug_object = false;
2075 #ifdef ENABLE_DEBUGGER_SUPPORT
2076  is_debug_object = heap_->isolate()->debug()->IsDebugGlobal(global);
2077 #endif
2078  if (!is_debug_object && !user_roots_.Contains(global)) {
2079  user_roots_.Insert(global);
2080  SetUserGlobalReference(global);
2081  }
2082  }
2083  }
2084  }
2085 }
2086 
2087 
2088 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
2089  if (strong_gc_subroot_names_.is_empty()) {
2090 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2091 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2093 #undef ROOT_NAME
2094 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2096 #undef STRUCT_MAP_NAME
2097 #define STRING_NAME(name, str) NAME_ENTRY(name)
2099 #undef STRING_NAME
2100 #undef NAME_ENTRY
2101  CHECK(!strong_gc_subroot_names_.is_empty());
2102  }
2103  return strong_gc_subroot_names_.GetTag(object);
2104 }
2105 
2106 
2107 void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2108  if (IsEssentialObject(obj)) {
2109  HeapEntry* entry = GetEntry(obj);
2110  if (entry->name()[0] == '\0') {
2111  entry->set_name(tag);
2112  }
2113  }
2114 }
2115 
2116 
2117 class GlobalObjectsEnumerator : public ObjectVisitor {
2118  public:
2119  virtual void VisitPointers(Object** start, Object** end) {
2120  for (Object** p = start; p < end; p++) {
2121  if ((*p)->IsNativeContext()) {
2122  Context* context = Context::cast(*p);
2123  JSObject* proxy = context->global_proxy();
2124  if (proxy->IsJSGlobalProxy()) {
2125  Object* global = proxy->map()->prototype();
2126  if (global->IsJSGlobalObject()) {
2127  objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2128  }
2129  }
2130  }
2131  }
2132  }
2133  int count() { return objects_.length(); }
2134  Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2135 
2136  private:
2137  List<Handle<JSGlobalObject> > objects_;
2138 };
2139 
2140 
2141 // Modifies heap. Must not be run during heap traversal.
2143  Isolate* isolate = heap_->isolate();
2144  HandleScope scope(isolate);
2145  GlobalObjectsEnumerator enumerator;
2146  isolate->global_handles()->IterateAllRoots(&enumerator);
2147  const char** urls = NewArray<const char*>(enumerator.count());
2148  for (int i = 0, l = enumerator.count(); i < l; ++i) {
2149  if (global_object_name_resolver_) {
2150  HandleScope scope(isolate);
2151  Handle<JSGlobalObject> global_obj = enumerator.at(i);
2152  urls[i] = global_object_name_resolver_->GetName(
2153  Utils::ToLocal(Handle<JSObject>::cast(global_obj)));
2154  } else {
2155  urls[i] = NULL;
2156  }
2157  }
2158 
2159  DisallowHeapAllocation no_allocation;
2160  for (int i = 0, l = enumerator.count(); i < l; ++i) {
2161  objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2162  }
2163 
2164  DeleteArray(urls);
2165 }
2166 
2167 
2168 class GlobalHandlesExtractor : public ObjectVisitor {
2169  public:
2171  : explorer_(explorer) {}
2173  virtual void VisitPointers(Object** start, Object** end) {
2174  UNREACHABLE();
2175  }
2176  virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
2177  explorer_->VisitSubtreeWrapper(p, class_id);
2178  }
2179  private:
2180  NativeObjectsExplorer* explorer_;
2181 };
2182 
2183 
2185  public:
2187  HeapSnapshot* snapshot,
2188  HeapEntry::Type entries_type)
2189  : snapshot_(snapshot),
2190  names_(snapshot_->profiler()->names()),
2191  heap_object_map_(snapshot_->profiler()->heap_object_map()),
2192  entries_type_(entries_type) {
2193  }
2194  virtual HeapEntry* AllocateEntry(HeapThing ptr);
2195  private:
2196  HeapSnapshot* snapshot_;
2197  StringsStorage* names_;
2198  HeapObjectsMap* heap_object_map_;
2199  HeapEntry::Type entries_type_;
2200 };
2201 
2202 
2204  v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2205  intptr_t elements = info->GetElementCount();
2206  intptr_t size = info->GetSizeInBytes();
2207  const char* name = elements != -1
2208  ? names_->GetFormatted(
2209  "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
2210  : names_->GetCopy(info->GetLabel());
2211  return snapshot_->AddEntry(
2212  entries_type_,
2213  name,
2214  heap_object_map_->GenerateId(info),
2215  size != -1 ? static_cast<int>(size) : 0,
2216  0);
2217 }
2218 
2219 
2221  HeapSnapshot* snapshot,
2223  : isolate_(snapshot->profiler()->heap_object_map()->heap()->isolate()),
2224  snapshot_(snapshot),
2225  names_(snapshot_->profiler()->names()),
2226  progress_(progress),
2227  embedder_queried_(false),
2228  objects_by_info_(RetainedInfosMatch),
2229  native_groups_(StringsMatch),
2230  filler_(NULL) {
2231  synthetic_entries_allocator_ =
2232  new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2233  native_entries_allocator_ =
2234  new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2235 }
2236 
2237 
2239  for (HashMap::Entry* p = objects_by_info_.Start();
2240  p != NULL;
2241  p = objects_by_info_.Next(p)) {
2243  reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2244  info->Dispose();
2245  List<HeapObject*>* objects =
2246  reinterpret_cast<List<HeapObject*>* >(p->value);
2247  delete objects;
2248  }
2249  for (HashMap::Entry* p = native_groups_.Start();
2250  p != NULL;
2251  p = native_groups_.Next(p)) {
2253  reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2254  info->Dispose();
2255  }
2256  delete synthetic_entries_allocator_;
2257  delete native_entries_allocator_;
2258 }
2259 
2260 
2262  FillRetainedObjects();
2263  return objects_by_info_.occupancy();
2264 }
2265 
2266 
2267 void NativeObjectsExplorer::FillRetainedObjects() {
2268  if (embedder_queried_) return;
2269  Isolate* isolate = isolate_;
2270  const GCType major_gc_type = kGCTypeMarkSweepCompact;
2271  // Record objects that are joined into ObjectGroups.
2272  isolate->heap()->CallGCPrologueCallbacks(
2274  List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2275  for (int i = 0; i < groups->length(); ++i) {
2276  ObjectGroup* group = groups->at(i);
2277  if (group->info == NULL) continue;
2278  List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info);
2279  for (size_t j = 0; j < group->length; ++j) {
2280  HeapObject* obj = HeapObject::cast(*group->objects[j]);
2281  list->Add(obj);
2282  in_groups_.Insert(obj);
2283  }
2284  group->info = NULL; // Acquire info object ownership.
2285  }
2286  isolate->global_handles()->RemoveObjectGroups();
2287  isolate->heap()->CallGCEpilogueCallbacks(major_gc_type, kNoGCCallbackFlags);
2288  // Record objects that are not in ObjectGroups, but have class ID.
2289  GlobalHandlesExtractor extractor(this);
2290  isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2291  embedder_queried_ = true;
2292 }
2293 
2294 
2295 void NativeObjectsExplorer::FillImplicitReferences() {
2296  Isolate* isolate = isolate_;
2297  List<ImplicitRefGroup*>* groups =
2298  isolate->global_handles()->implicit_ref_groups();
2299  for (int i = 0; i < groups->length(); ++i) {
2300  ImplicitRefGroup* group = groups->at(i);
2301  HeapObject* parent = *group->parent;
2302  int parent_entry =
2303  filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2304  ASSERT(parent_entry != HeapEntry::kNoEntry);
2305  Object*** children = group->children;
2306  for (size_t j = 0; j < group->length; ++j) {
2307  Object* child = *children[j];
2308  HeapEntry* child_entry =
2309  filler_->FindOrAddEntry(child, native_entries_allocator_);
2310  filler_->SetNamedReference(
2312  parent_entry,
2313  "native",
2314  child_entry);
2315  }
2316  }
2317  isolate->global_handles()->RemoveImplicitRefGroups();
2318 }
2319 
2320 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2322  HashMap::Entry* entry =
2323  objects_by_info_.Lookup(info, InfoHash(info), true);
2324  if (entry->value != NULL) {
2325  info->Dispose();
2326  } else {
2327  entry->value = new List<HeapObject*>(4);
2328  }
2329  return reinterpret_cast<List<HeapObject*>* >(entry->value);
2330 }
2331 
2332 
2334  SnapshotFiller* filler) {
2335  filler_ = filler;
2336  FillRetainedObjects();
2337  FillImplicitReferences();
2338  if (EstimateObjectsCount() > 0) {
2339  for (HashMap::Entry* p = objects_by_info_.Start();
2340  p != NULL;
2341  p = objects_by_info_.Next(p)) {
2342  v8::RetainedObjectInfo* info =
2343  reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2344  SetNativeRootReference(info);
2345  List<HeapObject*>* objects =
2346  reinterpret_cast<List<HeapObject*>* >(p->value);
2347  for (int i = 0; i < objects->length(); ++i) {
2348  SetWrapperNativeReferences(objects->at(i), info);
2349  }
2350  }
2351  SetRootNativeRootsReference();
2352  }
2353  filler_ = NULL;
2354  return true;
2355 }
2356 
2357 
2359  public:
2360  explicit NativeGroupRetainedObjectInfo(const char* label)
2361  : disposed_(false),
2362  hash_(reinterpret_cast<intptr_t>(label)),
2363  label_(label) {
2364  }
2365 
2367  virtual void Dispose() {
2368  CHECK(!disposed_);
2369  disposed_ = true;
2370  delete this;
2371  }
2372  virtual bool IsEquivalent(RetainedObjectInfo* other) {
2373  return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2374  }
2375  virtual intptr_t GetHash() { return hash_; }
2376  virtual const char* GetLabel() { return label_; }
2377 
2378  private:
2379  bool disposed_;
2380  intptr_t hash_;
2381  const char* label_;
2382 };
2383 
2384 
2385 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2386  const char* label) {
2387  const char* label_copy = names_->GetCopy(label);
2388  uint32_t hash = StringHasher::HashSequentialString(
2389  label_copy,
2390  static_cast<int>(strlen(label_copy)),
2391  isolate_->heap()->HashSeed());
2392  HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
2393  hash, true);
2394  if (entry->value == NULL) {
2395  entry->value = new NativeGroupRetainedObjectInfo(label);
2396  }
2397  return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2398 }
2399 
2400 
2401 void NativeObjectsExplorer::SetNativeRootReference(
2402  v8::RetainedObjectInfo* info) {
2403  HeapEntry* child_entry =
2404  filler_->FindOrAddEntry(info, native_entries_allocator_);
2405  ASSERT(child_entry != NULL);
2406  NativeGroupRetainedObjectInfo* group_info =
2407  FindOrAddGroupInfo(info->GetGroupLabel());
2408  HeapEntry* group_entry =
2409  filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2410  filler_->SetNamedAutoIndexReference(
2412  group_entry->index(),
2413  child_entry);
2414 }
2415 
2416 
2417 void NativeObjectsExplorer::SetWrapperNativeReferences(
2418  HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2419  HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2420  ASSERT(wrapper_entry != NULL);
2421  HeapEntry* info_entry =
2422  filler_->FindOrAddEntry(info, native_entries_allocator_);
2423  ASSERT(info_entry != NULL);
2425  wrapper_entry->index(),
2426  "native",
2427  info_entry);
2429  info_entry->index(),
2430  wrapper_entry);
2431 }
2432 
2433 
2434 void NativeObjectsExplorer::SetRootNativeRootsReference() {
2435  for (HashMap::Entry* entry = native_groups_.Start();
2436  entry;
2437  entry = native_groups_.Next(entry)) {
2438  NativeGroupRetainedObjectInfo* group_info =
2439  static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2440  HeapEntry* group_entry =
2441  filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2442  ASSERT(group_entry != NULL);
2445  snapshot_->root()->index(),
2446  group_entry);
2447  }
2448 }
2449 
2450 
2451 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2452  if (in_groups_.Contains(*p)) return;
2453  Isolate* isolate = isolate_;
2454  v8::RetainedObjectInfo* info =
2455  isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2456  if (info == NULL) return;
2457  GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2458 }
2459 
2460 
2462  HeapSnapshot* snapshot,
2463  v8::ActivityControl* control,
2465  Heap* heap)
2466  : snapshot_(snapshot),
2467  control_(control),
2468  v8_heap_explorer_(snapshot_, this, resolver),
2469  dom_explorer_(snapshot_, this),
2470  heap_(heap) {
2471 }
2472 
2473 
2475  v8_heap_explorer_.TagGlobalObjects();
2476 
2477  // TODO(1562) Profiler assumes that any object that is in the heap after
2478  // full GC is reachable from the root when computing dominators.
2479  // This is not true for weakly reachable objects.
2480  // As a temporary solution we call GC twice.
2481  heap_->CollectAllGarbage(
2483  "HeapSnapshotGenerator::GenerateSnapshot");
2484  heap_->CollectAllGarbage(
2486  "HeapSnapshotGenerator::GenerateSnapshot");
2487 
2488 #ifdef VERIFY_HEAP
2489  Heap* debug_heap = heap_;
2490  CHECK(!debug_heap->old_data_space()->was_swept_conservatively());
2492  CHECK(!debug_heap->code_space()->was_swept_conservatively());
2493  CHECK(!debug_heap->cell_space()->was_swept_conservatively());
2494  CHECK(!debug_heap->property_cell_space()->
2495  was_swept_conservatively());
2496  CHECK(!debug_heap->map_space()->was_swept_conservatively());
2497 #endif
2498 
2499  // The following code uses heap iterators, so we want the heap to be
2500  // stable. It should follow TagGlobalObjects as that can allocate.
2501  DisallowHeapAllocation no_alloc;
2502 
2503 #ifdef VERIFY_HEAP
2504  debug_heap->Verify();
2505 #endif
2506 
2507  SetProgressTotal(1); // 1 pass.
2508 
2509 #ifdef VERIFY_HEAP
2510  debug_heap->Verify();
2511 #endif
2512 
2513  if (!FillReferences()) return false;
2514 
2515  snapshot_->FillChildren();
2516  snapshot_->RememberLastJSObjectId();
2517 
2518  progress_counter_ = progress_total_;
2519  if (!ProgressReport(true)) return false;
2520  return true;
2521 }
2522 
2523 
2524 void HeapSnapshotGenerator::ProgressStep() {
2525  ++progress_counter_;
2526 }
2527 
2528 
2529 bool HeapSnapshotGenerator::ProgressReport(bool force) {
2530  const int kProgressReportGranularity = 10000;
2531  if (control_ != NULL
2532  && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2533  return
2534  control_->ReportProgressValue(progress_counter_, progress_total_) ==
2536  }
2537  return true;
2538 }
2539 
2540 
2541 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
2542  if (control_ == NULL) return;
2543  HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2544  progress_total_ = iterations_count * (
2545  v8_heap_explorer_.EstimateObjectsCount(&iterator) +
2546  dom_explorer_.EstimateObjectsCount());
2547  progress_counter_ = 0;
2548 }
2549 
2550 
2551 bool HeapSnapshotGenerator::FillReferences() {
2552  SnapshotFiller filler(snapshot_, &entries_);
2553  v8_heap_explorer_.AddRootEntries(&filler);
2554  return v8_heap_explorer_.IterateAndExtractReferences(&filler)
2555  && dom_explorer_.IterateAndExtractReferences(&filler);
2556 }
2557 
2558 
2559 template<int bytes> struct MaxDecimalDigitsIn;
2560 template<> struct MaxDecimalDigitsIn<4> {
2561  static const int kSigned = 11;
2562  static const int kUnsigned = 10;
2563 };
2564 template<> struct MaxDecimalDigitsIn<8> {
2565  static const int kSigned = 20;
2566  static const int kUnsigned = 20;
2567 };
2568 
2569 
2571  public:
2573  : stream_(stream),
2574  chunk_size_(stream->GetChunkSize()),
2575  chunk_(chunk_size_),
2576  chunk_pos_(0),
2577  aborted_(false) {
2578  ASSERT(chunk_size_ > 0);
2579  }
2580  bool aborted() { return aborted_; }
2581  void AddCharacter(char c) {
2582  ASSERT(c != '\0');
2583  ASSERT(chunk_pos_ < chunk_size_);
2584  chunk_[chunk_pos_++] = c;
2585  MaybeWriteChunk();
2586  }
2587  void AddString(const char* s) {
2588  AddSubstring(s, StrLength(s));
2589  }
2590  void AddSubstring(const char* s, int n) {
2591  if (n <= 0) return;
2592  ASSERT(static_cast<size_t>(n) <= strlen(s));
2593  const char* s_end = s + n;
2594  while (s < s_end) {
2595  int s_chunk_size = Min(
2596  chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2597  ASSERT(s_chunk_size > 0);
2598  OS::MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2599  s += s_chunk_size;
2600  chunk_pos_ += s_chunk_size;
2601  MaybeWriteChunk();
2602  }
2603  }
2604  void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
2605  void Finalize() {
2606  if (aborted_) return;
2607  ASSERT(chunk_pos_ < chunk_size_);
2608  if (chunk_pos_ != 0) {
2609  WriteChunk();
2610  }
2611  stream_->EndOfStream();
2612  }
2613 
2614  private:
2615  template<typename T>
2616  void AddNumberImpl(T n, const char* format) {
2617  // Buffer for the longest value plus trailing \0
2618  static const int kMaxNumberSize =
2620  if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2621  int result = OS::SNPrintF(
2622  chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2623  ASSERT(result != -1);
2624  chunk_pos_ += result;
2625  MaybeWriteChunk();
2626  } else {
2627  EmbeddedVector<char, kMaxNumberSize> buffer;
2628  int result = OS::SNPrintF(buffer, format, n);
2629  USE(result);
2630  ASSERT(result != -1);
2631  AddString(buffer.start());
2632  }
2633  }
2634  void MaybeWriteChunk() {
2635  ASSERT(chunk_pos_ <= chunk_size_);
2636  if (chunk_pos_ == chunk_size_) {
2637  WriteChunk();
2638  }
2639  }
2640  void WriteChunk() {
2641  if (aborted_) return;
2642  if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
2643  v8::OutputStream::kAbort) aborted_ = true;
2644  chunk_pos_ = 0;
2645  }
2646 
2647  v8::OutputStream* stream_;
2648  int chunk_size_;
2649  ScopedVector<char> chunk_;
2650  int chunk_pos_;
2651  bool aborted_;
2652 };
2653 
2654 
2655 // type, name|index, to_node.
2656 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2657 // type, name, id, self_size, edge_count, trace_node_id.
2658 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 6;
2659 
2661  if (AllocationTracker* allocation_tracker =
2662  snapshot_->profiler()->allocation_tracker()) {
2663  allocation_tracker->PrepareForSerialization();
2664  }
2665  ASSERT(writer_ == NULL);
2666  writer_ = new OutputStreamWriter(stream);
2667  SerializeImpl();
2668  delete writer_;
2669  writer_ = NULL;
2670 }
2671 
2672 
2673 void HeapSnapshotJSONSerializer::SerializeImpl() {
2674  ASSERT(0 == snapshot_->root()->index());
2675  writer_->AddCharacter('{');
2676  writer_->AddString("\"snapshot\":{");
2677  SerializeSnapshot();
2678  if (writer_->aborted()) return;
2679  writer_->AddString("},\n");
2680  writer_->AddString("\"nodes\":[");
2681  SerializeNodes();
2682  if (writer_->aborted()) return;
2683  writer_->AddString("],\n");
2684  writer_->AddString("\"edges\":[");
2685  SerializeEdges();
2686  if (writer_->aborted()) return;
2687  writer_->AddString("],\n");
2688 
2689  writer_->AddString("\"trace_function_infos\":[");
2690  SerializeTraceNodeInfos();
2691  if (writer_->aborted()) return;
2692  writer_->AddString("],\n");
2693  writer_->AddString("\"trace_tree\":[");
2694  SerializeTraceTree();
2695  if (writer_->aborted()) return;
2696  writer_->AddString("],\n");
2697 
2698  writer_->AddString("\"strings\":[");
2699  SerializeStrings();
2700  if (writer_->aborted()) return;
2701  writer_->AddCharacter(']');
2702  writer_->AddCharacter('}');
2703  writer_->Finalize();
2704 }
2705 
2706 
2707 int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2708  HashMap::Entry* cache_entry = strings_.Lookup(
2709  const_cast<char*>(s), StringHash(s), true);
2710  if (cache_entry->value == NULL) {
2711  cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2712  }
2713  return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2714 }
2715 
2716 
2717 namespace {
2718 
2719 template<size_t size> struct ToUnsigned;
2720 
2721 template<> struct ToUnsigned<4> {
2722  typedef uint32_t Type;
2723 };
2724 
2725 template<> struct ToUnsigned<8> {
2726  typedef uint64_t Type;
2727 };
2728 
2729 } // namespace
2730 
2731 
2732 template<typename T>
2733 static int utoa_impl(T value, const Vector<char>& buffer, int buffer_pos) {
2734  STATIC_CHECK(static_cast<T>(-1) > 0); // Check that T is unsigned
2735  int number_of_digits = 0;
2736  T t = value;
2737  do {
2738  ++number_of_digits;
2739  } while (t /= 10);
2740 
2741  buffer_pos += number_of_digits;
2742  int result = buffer_pos;
2743  do {
2744  int last_digit = static_cast<int>(value % 10);
2745  buffer[--buffer_pos] = '0' + last_digit;
2746  value /= 10;
2747  } while (value);
2748  return result;
2749 }
2750 
2751 
2752 template<typename T>
2753 static int utoa(T value, const Vector<char>& buffer, int buffer_pos) {
2754  typename ToUnsigned<sizeof(value)>::Type unsigned_value = value;
2755  STATIC_CHECK(sizeof(value) == sizeof(unsigned_value));
2756  return utoa_impl(unsigned_value, buffer, buffer_pos);
2757 }
2758 
2759 
2760 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2761  bool first_edge) {
2762  // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2763  static const int kBufferSize =
2764  MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
2765  EmbeddedVector<char, kBufferSize> buffer;
2766  int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2767  || edge->type() == HeapGraphEdge::kHidden
2768  ? edge->index() : GetStringId(edge->name());
2769  int buffer_pos = 0;
2770  if (!first_edge) {
2771  buffer[buffer_pos++] = ',';
2772  }
2773  buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2774  buffer[buffer_pos++] = ',';
2775  buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2776  buffer[buffer_pos++] = ',';
2777  buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
2778  buffer[buffer_pos++] = '\n';
2779  buffer[buffer_pos++] = '\0';
2780  writer_->AddString(buffer.start());
2781 }
2782 
2783 
2784 void HeapSnapshotJSONSerializer::SerializeEdges() {
2785  List<HeapGraphEdge*>& edges = snapshot_->children();
2786  for (int i = 0; i < edges.length(); ++i) {
2787  ASSERT(i == 0 ||
2788  edges[i - 1]->from()->index() <= edges[i]->from()->index());
2789  SerializeEdge(edges[i], i == 0);
2790  if (writer_->aborted()) return;
2791  }
2792 }
2793 
2794 
2795 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
2796  // The buffer needs space for 4 unsigned ints, 1 size_t, 5 commas, \n and \0
2797  static const int kBufferSize =
2798  5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2799  + MaxDecimalDigitsIn<sizeof(size_t)>::kUnsigned // NOLINT
2800  + 6 + 1 + 1;
2801  EmbeddedVector<char, kBufferSize> buffer;
2802  int buffer_pos = 0;
2803  if (entry_index(entry) != 0) {
2804  buffer[buffer_pos++] = ',';
2805  }
2806  buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2807  buffer[buffer_pos++] = ',';
2808  buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2809  buffer[buffer_pos++] = ',';
2810  buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2811  buffer[buffer_pos++] = ',';
2812  buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2813  buffer[buffer_pos++] = ',';
2814  buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2815  buffer[buffer_pos++] = ',';
2816  buffer_pos = utoa(entry->trace_node_id(), buffer, buffer_pos);
2817  buffer[buffer_pos++] = '\n';
2818  buffer[buffer_pos++] = '\0';
2819  writer_->AddString(buffer.start());
2820 }
2821 
2822 
2823 void HeapSnapshotJSONSerializer::SerializeNodes() {
2824  List<HeapEntry>& entries = snapshot_->entries();
2825  for (int i = 0; i < entries.length(); ++i) {
2826  SerializeNode(&entries[i]);
2827  if (writer_->aborted()) return;
2828  }
2829 }
2830 
2831 
2832 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2833  writer_->AddString("\"title\":\"");
2834  writer_->AddString(snapshot_->title());
2835  writer_->AddString("\"");
2836  writer_->AddString(",\"uid\":");
2837  writer_->AddNumber(snapshot_->uid());
2838  writer_->AddString(",\"meta\":");
2839  // The object describing node serialization layout.
2840  // We use a set of macros to improve readability.
2841 #define JSON_A(s) "[" s "]"
2842 #define JSON_O(s) "{" s "}"
2843 #define JSON_S(s) "\"" s "\""
2844  writer_->AddString(JSON_O(
2845  JSON_S("node_fields") ":" JSON_A(
2846  JSON_S("type") ","
2847  JSON_S("name") ","
2848  JSON_S("id") ","
2849  JSON_S("self_size") ","
2850  JSON_S("edge_count") ","
2851  JSON_S("trace_node_id")) ","
2852  JSON_S("node_types") ":" JSON_A(
2853  JSON_A(
2854  JSON_S("hidden") ","
2855  JSON_S("array") ","
2856  JSON_S("string") ","
2857  JSON_S("object") ","
2858  JSON_S("code") ","
2859  JSON_S("closure") ","
2860  JSON_S("regexp") ","
2861  JSON_S("number") ","
2862  JSON_S("native") ","
2863  JSON_S("synthetic") ","
2864  JSON_S("concatenated string") ","
2865  JSON_S("sliced string")) ","
2866  JSON_S("string") ","
2867  JSON_S("number") ","
2868  JSON_S("number") ","
2869  JSON_S("number") ","
2870  JSON_S("number") ","
2871  JSON_S("number")) ","
2872  JSON_S("edge_fields") ":" JSON_A(
2873  JSON_S("type") ","
2874  JSON_S("name_or_index") ","
2875  JSON_S("to_node")) ","
2876  JSON_S("edge_types") ":" JSON_A(
2877  JSON_A(
2878  JSON_S("context") ","
2879  JSON_S("element") ","
2880  JSON_S("property") ","
2881  JSON_S("internal") ","
2882  JSON_S("hidden") ","
2883  JSON_S("shortcut") ","
2884  JSON_S("weak")) ","
2885  JSON_S("string_or_number") ","
2886  JSON_S("node")) ","
2887  JSON_S("trace_function_info_fields") ":" JSON_A(
2888  JSON_S("function_id") ","
2889  JSON_S("name") ","
2890  JSON_S("script_name") ","
2891  JSON_S("script_id") ","
2892  JSON_S("line") ","
2893  JSON_S("column")) ","
2894  JSON_S("trace_node_fields") ":" JSON_A(
2895  JSON_S("id") ","
2896  JSON_S("function_info_index") ","
2897  JSON_S("count") ","
2898  JSON_S("size") ","
2899  JSON_S("children"))));
2900 #undef JSON_S
2901 #undef JSON_O
2902 #undef JSON_A
2903  writer_->AddString(",\"node_count\":");
2904  writer_->AddNumber(snapshot_->entries().length());
2905  writer_->AddString(",\"edge_count\":");
2906  writer_->AddNumber(snapshot_->edges().length());
2907  writer_->AddString(",\"trace_function_count\":");
2908  uint32_t count = 0;
2909  AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2910  if (tracker) {
2911  count = tracker->function_info_list().length();
2912  }
2913  writer_->AddNumber(count);
2914 }
2915 
2916 
2917 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2918  static const char hex_chars[] = "0123456789ABCDEF";
2919  w->AddString("\\u");
2920  w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2921  w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2922  w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
2923  w->AddCharacter(hex_chars[u & 0xf]);
2924 }
2925 
2926 
2927 void HeapSnapshotJSONSerializer::SerializeTraceTree() {
2928  AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2929  if (!tracker) return;
2930  AllocationTraceTree* traces = tracker->trace_tree();
2931  SerializeTraceNode(traces->root());
2932 }
2933 
2934 
2935 void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
2936  // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
2937  const int kBufferSize =
2938  4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2939  + 4 + 1 + 1;
2940  EmbeddedVector<char, kBufferSize> buffer;
2941  int buffer_pos = 0;
2942  buffer_pos = utoa(node->id(), buffer, buffer_pos);
2943  buffer[buffer_pos++] = ',';
2944  buffer_pos = utoa(node->function_info_index(), buffer, buffer_pos);
2945  buffer[buffer_pos++] = ',';
2946  buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
2947  buffer[buffer_pos++] = ',';
2948  buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
2949  buffer[buffer_pos++] = ',';
2950  buffer[buffer_pos++] = '[';
2951  buffer[buffer_pos++] = '\0';
2952  writer_->AddString(buffer.start());
2953 
2954  Vector<AllocationTraceNode*> children = node->children();
2955  for (int i = 0; i < children.length(); i++) {
2956  if (i > 0) {
2957  writer_->AddCharacter(',');
2958  }
2959  SerializeTraceNode(children[i]);
2960  }
2961  writer_->AddCharacter(']');
2962 }
2963 
2964 
2965 // 0-based position is converted to 1-based during the serialization.
2966 static int SerializePosition(int position, const Vector<char>& buffer,
2967  int buffer_pos) {
2968  if (position == -1) {
2969  buffer[buffer_pos++] = '0';
2970  } else {
2971  ASSERT(position >= 0);
2972  buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
2973  }
2974  return buffer_pos;
2975 }
2976 
2977 
2978 void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
2979  AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2980  if (!tracker) return;
2981  // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
2982  const int kBufferSize =
2983  6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2984  + 6 + 1 + 1;
2985  EmbeddedVector<char, kBufferSize> buffer;
2986  const List<AllocationTracker::FunctionInfo*>& list =
2987  tracker->function_info_list();
2988  bool first_entry = true;
2989  for (int i = 0; i < list.length(); i++) {
2990  AllocationTracker::FunctionInfo* info = list[i];
2991  int buffer_pos = 0;
2992  if (first_entry) {
2993  first_entry = false;
2994  } else {
2995  buffer[buffer_pos++] = ',';
2996  }
2997  buffer_pos = utoa(info->function_id, buffer, buffer_pos);
2998  buffer[buffer_pos++] = ',';
2999  buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
3000  buffer[buffer_pos++] = ',';
3001  buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
3002  buffer[buffer_pos++] = ',';
3003  // The cast is safe because script id is a non-negative Smi.
3004  buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
3005  buffer_pos);
3006  buffer[buffer_pos++] = ',';
3007  buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
3008  buffer[buffer_pos++] = ',';
3009  buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
3010  buffer[buffer_pos++] = '\n';
3011  buffer[buffer_pos++] = '\0';
3012  writer_->AddString(buffer.start());
3013  }
3014 }
3015 
3016 
3017 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3018  writer_->AddCharacter('\n');
3019  writer_->AddCharacter('\"');
3020  for ( ; *s != '\0'; ++s) {
3021  switch (*s) {
3022  case '\b':
3023  writer_->AddString("\\b");
3024  continue;
3025  case '\f':
3026  writer_->AddString("\\f");
3027  continue;
3028  case '\n':
3029  writer_->AddString("\\n");
3030  continue;
3031  case '\r':
3032  writer_->AddString("\\r");
3033  continue;
3034  case '\t':
3035  writer_->AddString("\\t");
3036  continue;
3037  case '\"':
3038  case '\\':
3039  writer_->AddCharacter('\\');
3040  writer_->AddCharacter(*s);
3041  continue;
3042  default:
3043  if (*s > 31 && *s < 128) {
3044  writer_->AddCharacter(*s);
3045  } else if (*s <= 31) {
3046  // Special character with no dedicated literal.
3047  WriteUChar(writer_, *s);
3048  } else {
3049  // Convert UTF-8 into \u UTF-16 literal.
3050  unsigned length = 1, cursor = 0;
3051  for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3052  unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3053  if (c != unibrow::Utf8::kBadChar) {
3054  WriteUChar(writer_, c);
3055  ASSERT(cursor != 0);
3056  s += cursor - 1;
3057  } else {
3058  writer_->AddCharacter('?');
3059  }
3060  }
3061  }
3062  }
3063  writer_->AddCharacter('\"');
3064 }
3065 
3066 
3067 void HeapSnapshotJSONSerializer::SerializeStrings() {
3068  ScopedVector<const unsigned char*> sorted_strings(
3069  strings_.occupancy() + 1);
3070  for (HashMap::Entry* entry = strings_.Start();
3071  entry != NULL;
3072  entry = strings_.Next(entry)) {
3073  int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value));
3074  sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key);
3075  }
3076  writer_->AddString("\"<dummy>\"");
3077  for (int i = 1; i < sorted_strings.length(); ++i) {
3078  writer_->AddCharacter(',');
3079  SerializeString(sorted_strings[i]);
3080  if (writer_->aborted()) return;
3081  }
3082 }
3083 
3084 
3085 } } // namespace v8::internal
void VisitPointers(Object **start, Object **end)
byte * Address
Definition: globals.h:186
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
v8::RetainedObjectInfo * info
Object * type_feedback_info()
Definition: objects-inl.h:5819
void SetIndexedReference(HeapGraphEdge::Type type, int parent, int index, HeapEntry *child_entry)
static const int kWeakNextOffset
Definition: objects.h:9880
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
virtual HeapEntry * AllocateEntry(HeapThing ptr)=0
static const int kDefaultCacheOffset
Definition: objects.h:8074
OutputStreamWriter(v8::OutputStream *stream)
static const int kTypeOffset
Definition: objects.h:9597
void TagBuiltinCodeObject(Code *code, const char *name)
SnapshotFiller(HeapSnapshot *snapshot, HeapEntriesMap *entries)
static const int kCodeOffset
Definition: objects.h:7103
#define CHECK_EQ(expected, value)
Definition: checks.h:252
static Object *& Object_at(Address addr)
Definition: v8memory.h:83
static const SnapshotObjectId kGcRootsFirstSubrootId
#define NATIVE_CONTEXT_FIELDS(V)
Definition: contexts.h:99
static const int kGetterOffset
Definition: objects.h:10348
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:7519
const char * ToCString(const v8::String::Utf8Value &value)
static const int kValueOffset
Definition: objects.h:9547
static void MarkVisitedField(HeapObject *obj, int offset)
HeapObjectsMap * heap_object_map() const
Definition: heap-profiler.h:61
static const int kBuiltinsOffset
Definition: objects.h:7610
virtual HeapEntry * AllocateEntry(HeapThing ptr)
#define JSON_A(s)
void PrintF(const char *format,...)
Definition: v8utils.cc:40
void CollectAllGarbage(int flags, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
Definition: heap.cc:731
virtual intptr_t GetHash()=0
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
#define JSON_S(s)
bool was_swept_conservatively()
Definition: spaces.h:1883
static const int kTransitionsOrBackPointerOffset
Definition: objects.h:6433
static String * cast(Object *obj)
HeapEntry * AddEntry(Address address, HeapEntry::Type type, const char *name, size_t size)
uint32_t HashSeed()
Definition: heap.h:1831
Isolate * isolate()
Definition: heap-inl.h:624
FindEntryById(SnapshotObjectId id)
GlobalHandlesExtractor(NativeObjectsExplorer *explorer)
static Object * GetObjectFromEntryAddress(Address location_of_address)
Definition: objects-inl.h:4673
static const int kDependentCodeOffset
Definition: objects.h:6438
void VisitPointers(Object **start, Object **end)
static const int kOptimizedCodeMapOffset
Definition: objects.h:7104
static SnapshotObjectId GetNthGcSubrootId(int delta)
static HeapObject * cast(Object *obj)
static const int kGlobalReceiverOffset
Definition: objects.h:7613
static const int kDeoptimizationDataOffset
Definition: objects.h:5584
static AccessorPair * cast(Object *obj)
void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags)
Definition: heap.cc:1238
IndexedReferencesExtractor(V8HeapExplorer *generator, HeapObject *parent_obj, int parent)
static Map * cast(Object *obj)
SnapshotObjectId FindOrAddEntry(Address addr, unsigned int size, bool accessed=true)
BasicHeapEntriesAllocator(HeapSnapshot *snapshot, HeapEntry::Type entries_type)
void RemoveSnapshot(HeapSnapshot *snapshot)
kSerializedDataOffset Object
Definition: objects-inl.h:5016
void VisitPointers(Object **start, Object **end)
JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer *explorer)
T & at(int i) const
Definition: list.h:90
Vector< T > SubVector(int from, int to)
Definition: utils.h:412
TypeImpl< ZoneTypeConfig > Type
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including on console Map counters to a file Enable debugger compile events enable GDBJIT enable GDBJIT interface for all code objects dump only objects containing this substring stress the GC compactor to flush out pretty print source code print source AST function name where to insert a breakpoint print scopes for builtins trace contexts operations print stuff during garbage collection report code statistics after GC report handles after GC trace cache state transitions print interface inference details prints when objects are turned into dictionaries report heap spill statistics along with trace isolate state changes trace regexp bytecode execution Minimal Log all events to the log file Log API events to the log file Log heap samples on garbage collection for the hp2ps tool log positions Log suspect operations Used with turns on browser compatible mode for profiling v8 Specify the name of the log file Enable low level linux profiler Enable perf linux profiler(experimental annotate support).") DEFINE_string(gc_fake_mmap
Builtins * builtins()
Definition: isolate.h:948
static AllocationSite * cast(Object *obj)
static const int kSetterOffset
Definition: objects.h:10349
size_t NumberToSize(Isolate *isolate, Object *number)
void UpdateObjectSize(Address addr, int size)
const char * GetName(Name *name)
virtual const char * GetName(Handle< Object > object)=0
bool IterateAndExtractReferences(SnapshotFiller *filler)
virtual bool IsEquivalent(RetainedObjectInfo *other)
void SetNamedAutoIndexReference(HeapGraphEdge::Type type, int parent, HeapEntry *child_entry)
static const int kHandlerTableOffset
Definition: objects.h:5583
#define ASSERT(condition)
Definition: checks.h:329
const char * GetFormatted(const char *format,...)
SnapshotObjectId PushHeapObjectsStats(OutputStream *stream)
static Script * cast(Object *obj)
V8HeapExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress, v8::HeapProfiler::ObjectNameResolver *resolver)
unsigned short uint16_t
Definition: unicode.cc:46
static const int kDebugInfoOffset
Definition: objects.h:7112
static JSRegExp * cast(Object *obj)
static const int kNativeContextOffset
Definition: objects.h:7567
#define STRONG_ROOT_LIST(V)
Definition: heap.h:50
static Context * cast(Object *context)
Definition: contexts.h:244
static const int kInitialMapOffset
Definition: objects.h:7114
static SharedFunctionInfo * cast(Object *obj)
void SetTag(Object *obj, const char *tag)
#define CHECK(condition)
Definition: checks.h:75
static uchar CalculateValue(const byte *str, unsigned length, unsigned *cursor)
Definition: unicode.cc:214
#define MAKE_STRING_MAP_CASE(instance_type, size, name, Name)
static const int kInstanceClassNameOffset
Definition: objects.h:7107
#define INTERNALIZED_STRING_LIST(V)
Definition: heap.h:276
#define STRING_TYPE_LIST(V)
Definition: objects.h:459
static const int kDescriptorsOffset
Definition: objects.h:6435
virtual HeapEntry * AllocateEntry(HeapThing ptr)
Factory * factory()
Definition: isolate.h:995
static const int kGlobalContextOffset
Definition: objects.h:7612
virtual ControlOption ReportProgressValue(int done, int total)=0
static const int kContextOffset
Definition: objects.h:7523
static Code * cast(Object *obj)
HeapEntry * FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)
static const int kHeaderSize
Definition: objects.h:7614
HeapEntry * AddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)
void SetNamedReference(HeapGraphEdge::Type type, int parent, const char *reference_name, HeapEntry *child_entry)
static const int kDependentCodeOffset
Definition: objects.h:8416
ConstantPoolArray * constant_pool()
Definition: objects-inl.h:4589
static Smi * cast(Object *object)
int operator()(HeapEntry *const *entry)
static const SnapshotObjectId kGcRootsObjectId
SnapshotObjectId GenerateId(v8::RetainedObjectInfo *info)
void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, int parent, HeapEntry *child_entry)
static const int kFirstOffset
Definition: objects.h:9165
static const int kWeakFirstViewOffset
Definition: objects.h:9881
uint32_t ComputePointerHash(void *ptr)
Definition: utils.h:347
static const int kParentOffset
Definition: objects.h:9209
static const int kLiteralsOffset
Definition: objects.h:7524
static const int kNestedSiteOffset
Definition: objects.h:8412
static const int kSourceOffset
Definition: objects.h:6626
#define UNREACHABLE()
Definition: checks.h:52
SnapshotObjectId last_assigned_id() const
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Definition: flags.cc:211
T * start() const
Definition: utils.h:426
bool MoveObject(Address from, Address to, int size)
#define STRING_NAME(name, str)
T & last() const
Definition: list.h:91
static JSGlobalProxy * cast(Object *obj)
NativeObjectsExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress)
String * hidden_string()
Definition: heap.h:1349
static const int kGCMetadataOffset
Definition: objects.h:5589
const intptr_t kFailureTagMask
Definition: v8globals.h:64
static Cell * cast(Object *obj)
const char * GetTag(Object *obj)
#define ROOT_NAME(type, name, camel_name)
static SlicedString * cast(Object *obj)
static void MemCopy(void *dest, const void *src, size_t size)
Definition: platform.h:399
static const int kScopeInfoOffset
Definition: objects.h:7105
static Box * cast(Object *obj)
HeapSnapshotGenerator(HeapSnapshot *snapshot, v8::ActivityControl *control, v8::HeapProfiler::ObjectNameResolver *resolver, Heap *heap)
virtual int GetChunkSize()
Definition: v8-profiler.h:293
static String * GetConstructorName(JSObject *object)
HeapEntry * gc_subroot(int index)
virtual const char * GetLabel()=0
static const int kBufferOffset
Definition: objects.h:9915
JSObject * global_proxy()
Definition: contexts.cc:87
PropertyCellSpace * property_cell_space()
Definition: heap.h:643
const int kPointerSize
Definition: globals.h:268
static const int kTransitionInfoOffset
Definition: objects.h:8411
uint32_t occupancy() const
Definition: hashmap.h:83
static HeapObject *const kInternalRootObject
const int kHeapObjectTag
Definition: v8.h:5473
GlobalHandles * global_handles()
Definition: isolate.h:918
bool IterateAndExtractReferences(SnapshotFiller *filler)
virtual WriteResult WriteHeapStatsChunk(HeapStatsUpdate *data, int count)
Definition: v8-profiler.h:305
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
Entry * Lookup(void *key, uint32_t hash, bool insert, AllocationPolicy allocator=AllocationPolicy())
Definition: hashmap.h:131
static const char * Kind2String(Kind kind)
Definition: objects.cc:10803
static const int kNameOffset
Definition: objects.h:7102
OldSpace * old_pointer_space()
Definition: heap.h:638
static const int kPropertiesOffset
Definition: objects.h:2755
static const SnapshotObjectId kFirstAvailableObjectId
List< HeapGraphEdge > & edges()
GCType
Definition: v8.h:4067
void IterateAllRoots(ObjectVisitor *v)
SnapshotObjectId FindEntry(Address addr)
OldSpace * code_space()
Definition: heap.h:640
static const int kMakeHeapIterableMask
Definition: heap.h:1264
const List< FunctionInfo * > & function_info_list() const
#define V8_PTR_PREFIX
Definition: globals.h:220
static const int kNextFunctionLinkOffset
Definition: objects.h:7526
static const int kLineEndsOffset
Definition: objects.h:6633
static const int kElementsOffset
Definition: objects.h:2756
static PropertyCell * cast(Object *obj)
HeapSnapshot(HeapProfiler *profiler, const char *title, unsigned uid)
HeapEntry * FindEntry(HeapThing ptr)
static const int kTypeFeedbackInfoOffset
Definition: objects.h:5586
virtual void VisitPointers(Object **start, Object **end)
void IterateAllRootsWithClassIds(ObjectVisitor *v)
static const int kRelocationInfoOffset
Definition: objects.h:5582
static const int kNonWeakFieldsEndOffset
Definition: objects.h:7525
CellSpace * cell_space()
Definition: heap.h:642
int StrLength(const char *string)
Definition: utils.h:253
static Local< Context > ToLocal(v8::internal::Handle< v8::internal::Context > obj)
static int OffsetOfElementAt(int index)
Definition: objects.h:3070
static const int kNextCodeLinkOffset
Definition: objects.h:5588
static JSArray * cast(Object *obj)
static void Print(const char *format,...)
#define T(name, string, precedence)
Definition: token.cc:48
AllocationTraceTree * trace_tree()
HeapEntry * GetEntryById(SnapshotObjectId id)
V8_INLINE bool IsString() const
Definition: v8.h:6265
#define V8PRIuPTR
Definition: globals.h:230
List< ObjectGroup * > * object_groups()
static int SNPrintF(Vector< char > str, const char *format,...)
void AddRootEntries(SnapshotFiller *filler)
HeapEntry * AddEntry(HeapEntry::Type type, const char *name, SnapshotObjectId id, size_t size, unsigned trace_node_id)
static const int kMapOffset
Definition: objects.h:1890
static const int kFunctionDataOffset
Definition: objects.h:7109
static const int kNormalTypeCacheOffset
Definition: objects.h:8075
static uint32_t HashSequentialString(const schar *chars, int length, uint32_t seed)
Definition: objects-inl.h:6258
virtual WriteResult WriteAsciiChunk(char *data, int size)=0
void IterateRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:6266
void Sort(int(*cmp)(const T *x, const T *y))
Definition: list-inl.h:216
static const int kSecondOffset
Definition: objects.h:9166
virtual void Dispose()=0
uint32_t ComputeIntegerHash(uint32_t key, uint32_t seed)
Definition: utils.h:322
#define STRUCT_LIST(V)
Definition: objects.h:590
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
static const SnapshotObjectId kInternalRootObjectId
virtual void VisitEmbedderReference(Object **p, uint16_t class_id)
AllocationTracker * allocation_tracker() const
Definition: heap-profiler.h:58
List< HeapEntry * > * GetSortedEntriesList()
uint32_t SnapshotObjectId
Definition: v8-profiler.h:39
static const int kInferredNameOffset
Definition: objects.h:7113
size_t GetMemoryUsedByList(const List< T, P > &list)
Definition: list.h:205
#define EXTRACT_CONTEXT_FIELD(index, type, name)
const char * GetCopy(const char *src)
void Pair(HeapThing thing, int entry)
T & first() const
Definition: list.h:92
HeapEntry * AddGcSubrootEntry(int tag)
void * Remove(void *key, uint32_t hash)
Definition: hashmap.h:162
uint32_t capacity() const
Definition: hashmap.h:88
static const int kNameOffset
Definition: objects.h:6627
virtual intptr_t GetElementCount()
Definition: v8-profiler.h:591
InstanceType instance_type()
Definition: objects-inl.h:4012
static const uchar kBadChar
Definition: unicode.h:162
void USE(T)
Definition: globals.h:341
static const int kConstructorOffset
Definition: objects.h:6428
int SortedListBSearch(const List< T > &list, P cmp)
Definition: list-inl.h:241
Handle< JSGlobalObject > & at(int i)
static FixedArray * cast(Object *obj)
static const int kWeakNextOffset
Definition: objects.h:9918
static const int kHeaderSize
Definition: objects.h:2757
void Print(const v8::FunctionCallbackInfo< v8::Value > &args)
void FillReferences(V8HeapExplorer *explorer)
MapSpace * map_space()
Definition: heap.h:641
static const int kBoundFunctionIndex
Definition: objects.h:7534
const int kFailureTag
Definition: v8globals.h:62
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:39
static const int kScriptOffset
Definition: objects.h:7111
static const int kPrototypeOffset
Definition: objects.h:6427
void Synchronize(VisitorSynchronization::SyncTag tag)
static const int kWeakNextOffset
Definition: objects.h:8418
static const int kSize
Definition: objects.h:7527
GcSubrootsEnumerator(SnapshotFiller *filler, V8HeapExplorer *explorer)
List< HeapGraphEdge * > & children()
HeapObject * obj
virtual void VisitPointers(Object **start, Object **end)
static JSArrayBuffer * cast(Object *obj)
void Synchronize(VisitorSynchronization::SyncTag tag)
static const int kValueOffset
Definition: objects.h:6531
static const int kContextOffset
Definition: objects.h:6630
static const int kNativeContextOffset
Definition: objects.h:7611
int EstimateObjectsCount(HeapIterator *iterator)
virtual void EndOfStream()=0
static GlobalObject * cast(Object *obj)
static const int kBoundThisIndex
Definition: objects.h:7535
static const int kConstructStubOffset
Definition: objects.h:7106
#define STRUCT_MAP_NAME(NAME, Name, name)
#define JSON_O(s)
void DeleteArray(T *array)
Definition: allocation.h:91
T Min(T a, T b)
Definition: utils.h:234
static const int kSharedFunctionInfoOffset
Definition: objects.h:7521
static ConsString * cast(Object *obj)
static CodeCache * cast(Object *obj)
virtual intptr_t GetSizeInBytes()
Definition: v8-profiler.h:594
virtual HeapEntry * AllocateEntry(HeapThing ptr)
static const int kCodeCacheOffset
Definition: objects.h:6437
static const int kConstantPoolOffset
Definition: objects.h:5598
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
static const int kBoundArgumentsStartIndex
Definition: objects.h:7536
int expected_size
static const int kDependentCodeOffset
Definition: objects.h:9598
#define MAKE_STRUCT_CASE(NAME, Name, name)
static JSArrayBufferView * cast(Object *obj)
virtual const char * GetGroupLabel()
Definition: v8-profiler.h:585
static JSObject * cast(Object *obj)
OldSpace * old_data_space()
Definition: heap.h:639
unsigned int uchar
Definition: unicode.h:40
Entry * Next(Entry *p) const
Definition: hashmap.h:243
static const char *const kTagNames[kNumberOfSyncTags]
Definition: objects.h:10724
void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags)
Definition: heap.cc:1221
String * constructor_name()
Definition: objects.cc:1952
static JSGlobalObject * cast(Object *obj)
static JSFunction * cast(Object *obj)