43 HeapGraphEdge::HeapGraphEdge(
Type type,
const char*
name,
int from,
int to)
48 ASSERT(type == kContextVariable
56 HeapGraphEdge::HeapGraphEdge(
Type type,
int index,
int from,
int to)
61 ASSERT(type == kElement || type == kHidden);
65 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
66 to_entry_ = &snapshot->entries()[to_index_];
70 const int HeapEntry::kNoEntry = -1;
72 HeapEntry::HeapEntry(HeapSnapshot* snapshot,
77 unsigned trace_node_id)
81 self_size_(self_size),
85 trace_node_id_(trace_node_id) { }
91 HeapGraphEdge edge(type, name, this->index(), entry->index());
92 snapshot_->edges().Add(edge);
100 HeapGraphEdge edge(type, index, this->index(), entry->index());
101 snapshot_->edges().Add(edge);
107 const char* prefix,
const char* edge_name,
int max_depth,
int indent) {
110 self_size(),
id(), indent,
' ', prefix, edge_name);
111 if (type() != kString) {
112 OS::Print(
"%s %.40s\n", TypeAsString(), name_);
115 const char* c = name_;
116 while (*c && (c - name_) <= 40) {
125 if (--max_depth == 0)
return;
126 Vector<HeapGraphEdge*> ch = children();
127 for (
int i = 0; i < ch.length(); ++i) {
128 HeapGraphEdge& edge = *ch[i];
129 const char* edge_prefix =
"";
130 EmbeddedVector<char, 64> index;
131 const char* edge_name = index.start();
132 switch (edge.type()) {
135 edge_name = edge.name();
142 edge_name = edge.name();
145 edge_name = edge.name();
153 edge_name = edge.name();
157 edge_name = edge.name();
160 OS::SNPrintF(index,
"!!! unknown edge type: %d ", edge.type());
162 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
167 const char* HeapEntry::TypeAsString() {
169 case kHidden:
return "/hidden/";
170 case kObject:
return "/object/";
171 case kClosure:
return "/closure/";
172 case kString:
return "/string/";
173 case kCode:
return "/code/";
174 case kArray:
return "/array/";
175 case kRegExp:
return "/regexp/";
176 case kHeapNumber:
return "/number/";
177 case kNative:
return "/native/";
178 case kSynthetic:
return "/synthetic/";
179 case kConsString:
return "/concatenated string/";
180 case kSlicedString:
return "/sliced string/";
181 default:
return "???";
190 template <
size_t ptr_size>
struct SnapshotSizeConstants;
192 template <>
struct SnapshotSizeConstants<4> {
193 static const int kExpectedHeapGraphEdgeSize = 12;
194 static const int kExpectedHeapEntrySize = 28;
197 template <>
struct SnapshotSizeConstants<8> {
198 static const int kExpectedHeapGraphEdgeSize = 24;
199 static const int kExpectedHeapEntrySize = 40;
208 : profiler_(profiler),
211 root_index_(HeapEntry::kNoEntry),
212 gc_roots_index_(HeapEntry::kNoEntry),
213 natives_root_index_(HeapEntry::kNoEntry),
214 max_snapshot_js_object_id_(0) {
217 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
220 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
221 USE(SnapshotSizeConstants<4>::kExpectedHeapGraphEdgeSize);
222 USE(SnapshotSizeConstants<4>::kExpectedHeapEntrySize);
223 USE(SnapshotSizeConstants<8>::kExpectedHeapGraphEdgeSize);
224 USE(SnapshotSizeConstants<8>::kExpectedHeapEntrySize);
226 gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
243 ASSERT(root_index_ == HeapEntry::kNoEntry);
244 ASSERT(entries_.is_empty());
245 HeapEntry* entry =
AddEntry(HeapEntry::kSynthetic,
250 root_index_ = entry->index();
257 ASSERT(gc_roots_index_ == HeapEntry::kNoEntry);
258 HeapEntry* entry =
AddEntry(HeapEntry::kSynthetic,
263 gc_roots_index_ = entry->index();
269 ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
272 HeapEntry::kSynthetic,
277 gc_subroot_indexes_[tag] = entry->index();
286 unsigned trace_node_id) {
287 HeapEntry entry(
this, type, name,
id, size, trace_node_id);
289 return &entries_.
last();
296 int children_index = 0;
297 for (
int i = 0; i <
entries().length(); ++i) {
298 HeapEntry* entry = &
entries()[i];
299 children_index = entry->set_children_index(children_index);
302 for (
int i = 0; i <
edges().length(); ++i) {
304 edge->ReplaceToIndexWithEntry(
this);
305 edge->from()->add_child(edge);
314 if ((*entry)->id() == id_)
return 0;
315 return (*entry)->id() < id_ ? -1 : 1;
328 return entries_by_id->
at(index);
333 static int SortByIds(
const T* entry1_ptr,
334 const T* entry2_ptr) {
335 if ((*entry1_ptr)->id() == (*entry2_ptr)->id())
return 0;
336 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
341 if (sorted_entries_.is_empty()) {
342 sorted_entries_.Allocate(entries_.length());
343 for (
int i = 0; i < entries_.length(); ++i) {
344 sorted_entries_[i] = &entries_[i];
346 sorted_entries_.
Sort(SortByIds);
348 return &sorted_entries_;
353 root()->Print(
"",
"", max_depth, 0);
379 static bool AddressesMatch(
void* key1,
void* key2) {
385 : next_id_(kFirstAvailableObjectId),
386 entries_map_(AddressesMatch),
395 entries_.
Add(EntryInfo(0,
NULL, 0));
402 if (from == to)
return false;
404 if (from_value ==
NULL) {
409 if (to_value !=
NULL) {
410 int to_entry_info_index =
411 static_cast<int>(
reinterpret_cast<intptr_t
>(to_value));
412 entries_.
at(to_entry_info_index).addr =
NULL;
417 if (to_entry->value !=
NULL) {
423 int to_entry_info_index =
424 static_cast<int>(
reinterpret_cast<intptr_t
>(to_entry->value));
425 entries_.
at(to_entry_info_index).addr =
NULL;
427 int from_entry_info_index =
428 static_cast<int>(
reinterpret_cast<intptr_t
>(from_value));
429 entries_.
at(from_entry_info_index).addr = to;
433 if (FLAG_heap_profiler_trace_objects) {
434 PrintF(
"Move object from %p to %p old size %6d new size %6d\n",
437 entries_.
at(from_entry_info_index).size,
440 entries_.
at(from_entry_info_index).size = object_size;
441 to_entry->value = from_value;
443 return from_value !=
NULL;
455 if (entry ==
NULL)
return 0;
456 int entry_index =
static_cast<int>(
reinterpret_cast<intptr_t
>(entry->value));
457 EntryInfo& entry_info = entries_.
at(entry_index);
458 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.
occupancy());
459 return entry_info.id;
466 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.
occupancy());
469 if (entry->value !=
NULL) {
471 static_cast<int>(
reinterpret_cast<intptr_t
>(entry->value));
472 EntryInfo& entry_info = entries_.
at(entry_index);
473 entry_info.accessed = accessed;
474 if (FLAG_heap_profiler_trace_objects) {
475 PrintF(
"Update object size : %p with old size %d and new size %d\n",
480 entry_info.size =
size;
481 return entry_info.id;
483 entry->value =
reinterpret_cast<void*
>(entries_.length());
486 entries_.
Add(EntryInfo(
id, addr, size, accessed));
487 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.
occupancy());
493 time_intervals_.Clear();
498 if (FLAG_heap_profiler_trace_objects) {
499 PrintF(
"Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
503 "HeapObjectsMap::UpdateHeapObjectsMap");
504 HeapIterator iterator(heap_);
507 obj = iterator.next()) {
509 if (FLAG_heap_profiler_trace_objects) {
510 PrintF(
"Update object : %p %6d. Next address is %p\n",
513 obj->address() +
obj->Size());
517 if (FLAG_heap_profiler_trace_objects) {
518 PrintF(
"End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
527 struct HeapObjectInfo {
530 expected_size(expected_size) {
540 PrintF(
"Untracked object : %p %6d. Next address is %p\n",
543 obj->address() +
obj->Size());
545 PrintF(
"Wrong size %6d: %p %6d. Next address is %p\n",
549 obj->address() +
obj->Size());
551 PrintF(
"Good object : %p %6d. Next address is %p\n",
554 obj->address() +
obj->Size());
560 static int comparator(
const HeapObjectInfo* a,
const HeapObjectInfo* b) {
561 if (a->obj < b->obj)
return -1;
562 if (a->obj > b->obj)
return 1;
573 HeapIterator iterator(heap_);
577 obj = iterator.next()) {
578 HashMap::Entry* entry = entries_map_.
Lookup(
582 if (FLAG_heap_profiler_trace_objects) {
583 heap_objects.
Add(HeapObjectInfo(
obj, 0));
586 int entry_index =
static_cast<int>(
587 reinterpret_cast<intptr_t
>(entry->value));
588 EntryInfo& entry_info = entries_.
at(entry_index);
589 if (FLAG_heap_profiler_trace_objects) {
590 heap_objects.
Add(HeapObjectInfo(
obj,
591 static_cast<int>(entry_info.size)));
592 if (
obj->Size() !=
static_cast<int>(entry_info.size))
595 CHECK_EQ(
obj->Size(),
static_cast<int>(entry_info.size));
599 if (FLAG_heap_profiler_trace_objects) {
600 PrintF(
"\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n",
602 heap_objects.
Sort(comparator);
603 int last_printed_object = -1;
604 bool print_next_object =
false;
605 for (
int i = 0; i < heap_objects.length(); ++i) {
606 const HeapObjectInfo& object_info = heap_objects[i];
607 if (!object_info.IsValid()) {
609 if (last_printed_object != i - 1) {
611 PrintF(
"%d objects were skipped\n", i - 1 - last_printed_object);
612 heap_objects[i - 1].Print();
616 last_printed_object = i;
617 print_next_object =
true;
618 }
else if (print_next_object) {
620 print_next_object =
false;
621 last_printed_object = i;
624 if (last_printed_object < heap_objects.length() - 1) {
625 PrintF(
"Last %d objects were skipped\n",
626 heap_objects.length() - 1 - last_printed_object);
628 PrintF(
"End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n",
637 time_intervals_.
Add(TimeInterval(next_id_));
640 ASSERT(!entries_.is_empty());
641 EntryInfo* entry_info = &entries_.
first();
642 EntryInfo* end_entry_info = &entries_.
last() + 1;
643 for (
int time_interval_index = 0;
644 time_interval_index < time_intervals_.length();
645 ++time_interval_index) {
646 TimeInterval& time_interval = time_intervals_[time_interval_index];
648 uint32_t entries_size = 0;
649 EntryInfo* start_entry_info = entry_info;
650 while (entry_info < end_entry_info && entry_info->
id < time_interval_id) {
651 entries_size += entry_info->size;
654 uint32_t entries_count =
655 static_cast<uint32_t
>(entry_info - start_entry_info);
656 if (time_interval.count != entries_count ||
657 time_interval.size != entries_size) {
660 time_interval.count = entries_count,
661 time_interval.size = entries_size));
662 if (stats_buffer.length() >= prefered_chunk_size) {
664 &stats_buffer.
first(), stats_buffer.length());
666 stats_buffer.Clear();
670 ASSERT(entry_info == end_entry_info);
671 if (!stats_buffer.is_empty()) {
673 &stats_buffer.
first(), stats_buffer.length());
682 ASSERT(entries_.length() > 0 &&
683 entries_.
at(0).id == 0 &&
684 entries_.
at(0).addr ==
NULL);
685 int first_free_entry = 1;
686 for (
int i = 1; i < entries_.length(); ++i) {
687 EntryInfo& entry_info = entries_.
at(i);
688 if (entry_info.accessed) {
689 if (first_free_entry != i) {
690 entries_.
at(first_free_entry) = entry_info;
692 entries_.
at(first_free_entry).accessed =
false;
693 HashMap::Entry* entry = entries_map_.
Lookup(
696 entry->value =
reinterpret_cast<void*
>(first_free_entry);
699 if (entry_info.addr) {
700 entries_map_.
Remove(entry_info.addr,
705 entries_.Rewind(first_free_entry);
706 ASSERT(static_cast<uint32_t>(entries_.length()) - 1 ==
713 const char* label = info->
GetLabel();
715 static_cast<int>(strlen(label)),
718 if (element_count != -1)
720 v8::internal::kZeroHashSeed);
728 sizeof(HashMap::Entry) * entries_map_.
capacity() +
735 : entries_(HeapThingsMatch) {
740 HashMap::Entry* cache_entry = entries_.
Lookup(thing, Hash(thing),
false);
741 if (cache_entry ==
NULL)
return HeapEntry::kNoEntry;
742 return static_cast<int>(
reinterpret_cast<intptr_t
>(cache_entry->value));
747 HashMap::Entry* cache_entry = entries_.
Lookup(thing, Hash(thing),
true);
749 cache_entry->value =
reinterpret_cast<void*
>(
static_cast<intptr_t
>(entry));
764 if (!obj->IsHeapObject())
return false;
766 return entries_.
Lookup(
object, HeapEntriesMap::Hash(
object),
false) !=
NULL;
771 if (!obj->IsHeapObject())
return;
773 entries_.
Lookup(
object, HeapEntriesMap::Hash(
object),
true);
779 HashMap::Entry* cache_entry =
780 entries_.
Lookup(
object, HeapEntriesMap::Hash(
object),
false);
781 return cache_entry !=
NULL
782 ?
reinterpret_cast<const char*
>(cache_entry->value)
788 if (!obj->IsHeapObject())
return;
790 HashMap::Entry* cache_entry =
791 entries_.
Lookup(
object, HeapEntriesMap::Hash(
object),
true);
792 cache_entry->value =
const_cast<char*
>(tag);
799 HeapObject*
const V8HeapExplorer::kGcRootsObject =
802 HeapObject*
const V8HeapExplorer::kFirstGcSubrootObject =
805 HeapObject*
const V8HeapExplorer::kLastGcSubrootObject =
814 : heap_(snapshot->
profiler()->heap_object_map()->heap()),
816 names_(snapshot_->
profiler()->names()),
817 heap_object_map_(snapshot_->
profiler()->heap_object_map()),
820 global_object_name_resolver_(resolver) {
829 return AddEntry(reinterpret_cast<HeapObject*>(ptr));
836 return snapshot_->
root();
837 }
else if (
object == kGcRootsObject) {
840 }
else if (
object >= kFirstGcSubrootObject &&
object < kLastGcSubrootObject) {
843 }
else if (object->IsJSFunction()) {
845 SharedFunctionInfo* shared = func->shared();
846 const char* name = shared->bound() ?
"native_bind" :
848 return AddEntry(
object, HeapEntry::kClosure, name);
849 }
else if (object->IsJSRegExp()) {
853 names_->
GetName(re->Pattern()));
854 }
else if (object->IsJSObject()) {
855 const char* name = names_->
GetName(
857 if (object->IsJSGlobalObject()) {
858 const char* tag = objects_tags_.
GetTag(
object);
863 return AddEntry(
object, HeapEntry::kObject, name);
864 }
else if (object->IsString()) {
866 if (string->IsConsString())
868 HeapEntry::kConsString,
869 "(concatenated string)");
870 if (string->IsSlicedString())
872 HeapEntry::kSlicedString,
877 }
else if (object->IsCode()) {
878 return AddEntry(
object, HeapEntry::kCode,
"");
879 }
else if (object->IsSharedFunctionInfo()) {
884 }
else if (object->IsScript()) {
891 }
else if (object->IsNativeContext()) {
892 return AddEntry(
object, HeapEntry::kHidden,
"system / NativeContext");
893 }
else if (object->IsContext()) {
894 return AddEntry(
object, HeapEntry::kObject,
"system / Context");
895 }
else if (object->IsFixedArray() ||
896 object->IsFixedDoubleArray() ||
897 object->IsByteArray() ||
898 object->IsExternalArray()) {
899 return AddEntry(
object, HeapEntry::kArray,
"");
900 }
else if (object->IsHeapNumber()) {
901 return AddEntry(
object, HeapEntry::kHeapNumber,
"number");
903 return AddEntry(
object, HeapEntry::kHidden, GetSystemEntryName(
object));
910 return AddEntry(object->address(), type,
name,
object->Size());
919 address, static_cast<unsigned int>(size));
920 unsigned trace_node_id = 0;
924 allocation_tracker->address_to_trace()->GetTraceNodeId(address);
926 return snapshot_->
AddEntry(type, name, object_id, size, trace_node_id);
933 : snapshot_(snapshot),
934 names_(snapshot->
profiler()->names()),
935 entries_(entries) { }
938 entries_->
Pair(ptr, entry->index());
942 int index = entries_->
Map(ptr);
943 return index != HeapEntry::kNoEntry ? &snapshot_->
entries()[index] :
NULL;
947 return entry !=
NULL ? entry :
AddEntry(ptr, allocator);
952 HeapEntry* child_entry) {
953 HeapEntry* parent_entry = &snapshot_->
entries()[parent];
954 parent_entry->SetIndexedReference(type, index, child_entry);
958 HeapEntry* child_entry) {
959 HeapEntry* parent_entry = &snapshot_->
entries()[parent];
960 int index = parent_entry->children_count() + 1;
961 parent_entry->SetIndexedReference(type, index, child_entry);
965 const char* reference_name,
966 HeapEntry* child_entry) {
967 HeapEntry* parent_entry = &snapshot_->
entries()[parent];
968 parent_entry->SetNamedReference(type, reference_name, child_entry);
972 HeapEntry* child_entry) {
973 HeapEntry* parent_entry = &snapshot_->
entries()[parent];
974 int index = parent_entry->children_count() + 1;
975 parent_entry->SetNamedReference(
994 previous_object_count_(0),
998 object_count_ += end - start;
1002 if (previous_object_count_ != object_count_) {
1003 previous_object_count_ = object_count_;
1004 filler_->
AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_);
1010 intptr_t previous_object_count_;
1011 intptr_t object_count_;
1017 filler->
AddEntry(kGcRootsObject,
this);
1023 const char* V8HeapExplorer::GetSystemEntryName(
HeapObject*
object) {
1027 #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
1028 case instance_type: return "system / Map (" #Name ")";
1030 #undef MAKE_STRING_MAP_CASE
1031 default:
return "system / Map";
1037 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1038 case NAME##_TYPE: return "system / "#Name;
1040 #undef MAKE_STRUCT_CASE
1041 default:
return "system";
1047 int objects_count = 0;
1050 obj = iterator->next()) {
1053 return objects_count;
1062 : generator_(generator),
1063 parent_obj_(parent_obj),
1069 generator_->SetInternalReference(parent_obj_, parent_,
"code", code);
1073 for (
Object** p = start; p < end; p++) {
1075 if (CheckVisitedAndUnmark(p))
continue;
1076 generator_->SetHiddenReference(parent_obj_, parent_, next_index_, *p);
1080 if (offset < 0)
return;
1088 bool CheckVisitedAndUnmark(
Object** field) {
1089 if ((*field)->IsFailure()) {
1090 intptr_t untagged =
reinterpret_cast<intptr_t
>(*field) & ~
kFailureTagMask;
1092 ASSERT((*field)->IsHeapObject());
1097 V8HeapExplorer* generator_;
1098 HeapObject* parent_obj_;
1104 void V8HeapExplorer::ExtractReferences(HeapObject*
obj) {
1105 HeapEntry* heap_entry = GetEntry(obj);
1106 if (heap_entry ==
NULL)
return;
1107 int entry = heap_entry->index();
1109 if (obj->IsJSGlobalProxy()) {
1111 }
else if (obj->IsJSArrayBuffer()) {
1113 }
else if (obj->IsJSObject()) {
1115 }
else if (obj->IsString()) {
1117 }
else if (obj->IsContext()) {
1119 }
else if (obj->IsMap()) {
1120 ExtractMapReferences(entry,
Map::cast(obj));
1121 }
else if (obj->IsSharedFunctionInfo()) {
1123 }
else if (obj->IsScript()) {
1125 }
else if (obj->IsAccessorPair()) {
1127 }
else if (obj->IsCodeCache()) {
1129 }
else if (obj->IsCode()) {
1130 ExtractCodeReferences(entry,
Code::cast(obj));
1131 }
else if (obj->IsBox()) {
1132 ExtractBoxReferences(entry,
Box::cast(obj));
1133 }
else if (obj->IsCell()) {
1134 ExtractCellReferences(entry,
Cell::cast(obj));
1135 }
else if (obj->IsPropertyCell()) {
1137 }
else if (obj->IsAllocationSite()) {
1145 obj->Iterate(&refs_extractor);
1149 void V8HeapExplorer::ExtractJSGlobalProxyReferences(
1150 int entry, JSGlobalProxy* proxy) {
1151 SetInternalReference(proxy, entry,
1152 "native_context", proxy->native_context(),
1157 void V8HeapExplorer::ExtractJSObjectReferences(
1158 int entry, JSObject* js_obj) {
1159 HeapObject* obj = js_obj;
1160 ExtractClosureReferences(js_obj, entry);
1161 ExtractPropertyReferences(js_obj, entry);
1162 ExtractElementReferences(js_obj, entry);
1163 ExtractInternalReferences(js_obj, entry);
1164 SetPropertyReference(
1165 obj, entry, heap_->proto_string(), js_obj->GetPrototype());
1166 if (obj->IsJSFunction()) {
1168 Object* proto_or_map = js_fun->prototype_or_initial_map();
1169 if (!proto_or_map->IsTheHole()) {
1170 if (!proto_or_map->IsMap()) {
1171 SetPropertyReference(
1173 heap_->prototype_string(), proto_or_map,
1177 SetPropertyReference(
1179 heap_->prototype_string(), js_fun->prototype());
1180 SetInternalReference(
1181 obj, entry,
"initial_map", proto_or_map,
1185 SharedFunctionInfo* shared_info = js_fun->shared();
1187 bool bound = shared_info->bound();
1188 TagObject(js_fun->literals_or_bindings(),
1189 bound ?
"(function bindings)" :
"(function literals)");
1190 SetInternalReference(js_fun, entry,
1191 bound ?
"bindings" :
"literals",
1192 js_fun->literals_or_bindings(),
1194 TagObject(shared_info,
"(shared function info)");
1195 SetInternalReference(js_fun, entry,
1196 "shared", shared_info,
1198 TagObject(js_fun->context(),
"(context)");
1199 SetInternalReference(js_fun, entry,
1200 "context", js_fun->context(),
1202 SetWeakReference(js_fun, entry,
1203 "next_function_link", js_fun->next_function_link(),
1209 }
else if (obj->IsGlobalObject()) {
1211 SetInternalReference(global_obj, entry,
1212 "builtins", global_obj->builtins(),
1214 SetInternalReference(global_obj, entry,
1215 "native_context", global_obj->native_context(),
1217 SetInternalReference(global_obj, entry,
1218 "global_context", global_obj->global_context(),
1220 SetInternalReference(global_obj, entry,
1221 "global_receiver", global_obj->global_receiver(),
1225 }
else if (obj->IsJSArrayBufferView()) {
1227 SetInternalReference(view, entry,
"buffer", view->buffer(),
1229 SetWeakReference(view, entry,
"weak_next", view->weak_next(),
1232 TagObject(js_obj->properties(),
"(object properties)");
1233 SetInternalReference(obj, entry,
1234 "properties", js_obj->properties(),
1236 TagObject(js_obj->elements(),
"(object elements)");
1237 SetInternalReference(obj, entry,
1238 "elements", js_obj->elements(),
1243 void V8HeapExplorer::ExtractStringReferences(
int entry, String*
string) {
1244 if (string->IsConsString()) {
1246 SetInternalReference(cs, entry,
"first", cs->first(),
1248 SetInternalReference(cs, entry,
"second", cs->second(),
1250 }
else if (string->IsSlicedString()) {
1252 SetInternalReference(ss, entry,
"parent", ss->parent(),
1258 void V8HeapExplorer::ExtractContextReferences(
int entry, Context* context) {
1259 if (context == context->declaration_context()) {
1260 ScopeInfo* scope_info = context->closure()->shared()->scope_info();
1262 int context_locals = scope_info->ContextLocalCount();
1263 for (
int i = 0; i < context_locals; ++i) {
1264 String* local_name = scope_info->ContextLocalName(i);
1266 SetContextReference(context, entry, local_name, context->get(idx),
1269 if (scope_info->HasFunctionName()) {
1270 String* name = scope_info->FunctionName();
1272 int idx = scope_info->FunctionContextSlotIndex(name, &mode);
1274 SetContextReference(context, entry, name, context->get(idx),
1280 #define EXTRACT_CONTEXT_FIELD(index, type, name) \
1281 if (Context::index < Context::FIRST_WEAK_SLOT || \
1282 Context::index == Context::MAP_CACHE_INDEX) { \
1283 SetInternalReference(context, entry, #name, context->get(Context::index), \
1284 FixedArray::OffsetOfElementAt(Context::index)); \
1286 SetWeakReference(context, entry, #name, context->get(Context::index), \
1287 FixedArray::OffsetOfElementAt(Context::index)); \
1293 if (context->IsNativeContext()) {
1294 TagObject(context->jsfunction_result_caches(),
1295 "(context func. result caches)");
1296 TagObject(context->normalized_map_cache(),
"(context norm. map cache)");
1297 TagObject(context->runtime_context(),
"(runtime context)");
1298 TagObject(context->embedder_data(),
"(context data)");
1301 optimized_functions_list);
1305 #undef EXTRACT_CONTEXT_FIELD
1314 void V8HeapExplorer::ExtractMapReferences(
int entry, Map*
map) {
1315 if (map->HasTransitionArray()) {
1316 TransitionArray* transitions = map->transitions();
1317 int transitions_entry = GetEntry(transitions)->index();
1318 Object* back_pointer = transitions->back_pointer_storage();
1319 TagObject(back_pointer,
"(back pointer)");
1320 SetInternalReference(transitions, transitions_entry,
1321 "back_pointer", back_pointer);
1322 TagObject(transitions,
"(transition array)");
1323 SetInternalReference(map, entry,
1324 "transitions", transitions,
1327 Object* back_pointer = map->GetBackPointer();
1328 TagObject(back_pointer,
"(back pointer)");
1329 SetInternalReference(map, entry,
1330 "back_pointer", back_pointer,
1333 DescriptorArray* descriptors = map->instance_descriptors();
1334 TagObject(descriptors,
"(map descriptors)");
1335 SetInternalReference(map, entry,
1336 "descriptors", descriptors,
1339 SetInternalReference(map, entry,
1340 "code_cache", map->code_cache(),
1342 SetInternalReference(map, entry,
1344 SetInternalReference(map, entry,
1345 "constructor", map->constructor(),
1347 TagObject(map->dependent_code(),
"(dependent code)");
1348 SetInternalReference(map, entry,
1349 "dependent_code", map->dependent_code(),
1354 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1355 int entry, SharedFunctionInfo* shared) {
1356 HeapObject* obj = shared;
1357 String* shared_name = shared->DebugName();
1358 const char* name =
NULL;
1359 if (shared_name != *heap_->
isolate()->
factory()->empty_string()) {
1360 name = names_->
GetName(shared_name);
1361 TagObject(shared->code(), names_->
GetFormatted(
"(code for %s)", name));
1363 TagObject(shared->code(), names_->
GetFormatted(
"(%s code)",
1367 SetInternalReference(obj, entry,
1368 "name", shared->name(),
1370 SetInternalReference(obj, entry,
1371 "code", shared->code(),
1373 TagObject(shared->scope_info(),
"(function scope info)");
1374 SetInternalReference(obj, entry,
1375 "scope_info", shared->scope_info(),
1377 SetInternalReference(obj, entry,
1378 "instance_class_name", shared->instance_class_name(),
1380 SetInternalReference(obj, entry,
1381 "script", shared->script(),
1383 const char* construct_stub_name = name ?
1384 names_->
GetFormatted(
"(construct stub code for %s)", name) :
1385 "(construct stub code)";
1386 TagObject(shared->construct_stub(), construct_stub_name);
1387 SetInternalReference(obj, entry,
1388 "construct_stub", shared->construct_stub(),
1390 SetInternalReference(obj, entry,
1391 "function_data", shared->function_data(),
1393 SetInternalReference(obj, entry,
1394 "debug_info", shared->debug_info(),
1396 SetInternalReference(obj, entry,
1397 "inferred_name", shared->inferred_name(),
1399 SetInternalReference(obj, entry,
1400 "optimized_code_map", shared->optimized_code_map(),
1402 SetWeakReference(obj, entry,
1403 "initial_map", shared->initial_map(),
1408 void V8HeapExplorer::ExtractScriptReferences(
int entry, Script* script) {
1409 HeapObject* obj = script;
1410 SetInternalReference(obj, entry,
1411 "source", script->source(),
1413 SetInternalReference(obj, entry,
1414 "name", script->name(),
1416 SetInternalReference(obj, entry,
1417 "context_data", script->context_data(),
1419 TagObject(script->line_ends(),
"(script line ends)");
1420 SetInternalReference(obj, entry,
1421 "line_ends", script->line_ends(),
1426 void V8HeapExplorer::ExtractAccessorPairReferences(
1427 int entry, AccessorPair* accessors) {
1428 SetInternalReference(accessors, entry,
"getter", accessors->getter(),
1430 SetInternalReference(accessors, entry,
"setter", accessors->setter(),
1435 void V8HeapExplorer::ExtractCodeCacheReferences(
1436 int entry, CodeCache* code_cache) {
1437 TagObject(code_cache->default_cache(),
"(default code cache)");
1438 SetInternalReference(code_cache, entry,
1439 "default_cache", code_cache->default_cache(),
1441 TagObject(code_cache->normal_type_cache(),
"(code type cache)");
1442 SetInternalReference(code_cache, entry,
1443 "type_cache", code_cache->normal_type_cache(),
1449 TagObject(code, names_->
GetFormatted(
"(%s builtin)", name));
1456 "(%s code)", CodeStub::MajorName(
1457 static_cast<CodeStub::Major>(code->
major_key()),
true)));
1462 void V8HeapExplorer::ExtractCodeReferences(
int entry,
Code*
code) {
1464 TagObject(code->relocation_info(),
"(code relocation info)");
1465 SetInternalReference(code, entry,
1466 "relocation_info", code->relocation_info(),
1468 SetInternalReference(code, entry,
1469 "handler_table", code->handler_table(),
1471 TagObject(code->deoptimization_data(),
"(code deopt data)");
1472 SetInternalReference(code, entry,
1473 "deoptimization_data", code->deoptimization_data(),
1475 if (code->
kind() == Code::FUNCTION) {
1476 SetInternalReference(code, entry,
1480 SetInternalReference(code, entry,
1481 "gc_metadata", code->gc_metadata(),
1483 SetInternalReference(code, entry,
1486 if (code->
kind() == Code::OPTIMIZED_FUNCTION) {
1487 SetWeakReference(code, entry,
1488 "next_code_link", code->next_code_link(),
1494 void V8HeapExplorer::ExtractBoxReferences(
int entry, Box* box) {
1499 void V8HeapExplorer::ExtractCellReferences(
int entry, Cell* cell) {
1504 void V8HeapExplorer::ExtractPropertyCellReferences(
int entry,
1505 PropertyCell* cell) {
1506 ExtractCellReferences(entry, cell);
1507 SetInternalReference(cell, entry,
"type", cell->type(),
1509 SetInternalReference(cell, entry,
"dependent_code", cell->dependent_code(),
1514 void V8HeapExplorer::ExtractAllocationSiteReferences(
int entry,
1515 AllocationSite* site) {
1516 SetInternalReference(site, entry,
"transition_info", site->transition_info(),
1518 SetInternalReference(site, entry,
"nested_site", site->nested_site(),
1520 SetInternalReference(site, entry,
"dependent_code", site->dependent_code(),
1533 , explorer_(explorer) {
1537 static_cast<Address>(ptr),
1538 HeapEntry::kNative,
"system / JSArrayBufferData", size_);
1546 void V8HeapExplorer::ExtractJSArrayBufferReferences(
1547 int entry, JSArrayBuffer* buffer) {
1548 SetWeakReference(buffer, entry,
"weak_next", buffer->weak_next(),
1550 SetWeakReference(buffer, entry,
1551 "weak_first_view", buffer->weak_first_view(),
1554 if (!buffer->backing_store())
1557 JSArrayBufferDataEntryAllocator allocator(data_size,
this);
1558 HeapEntry* data_entry =
1561 entry,
"backing_store", data_entry);
1565 void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj,
int entry) {
1566 if (!js_obj->IsJSFunction())
return;
1569 if (func->shared()->bound()) {
1570 FixedArray* bindings = func->function_bindings();
1571 SetNativeBindReference(js_obj, entry,
"bound_this",
1573 SetNativeBindReference(js_obj, entry,
"bound_function",
1576 i < bindings->length(); i++) {
1578 "bound_argument_%d",
1580 SetNativeBindReference(js_obj, entry, reference_name,
1587 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj,
int entry) {
1588 if (js_obj->HasFastProperties()) {
1589 DescriptorArray* descs = js_obj->map()->instance_descriptors();
1590 int real_size = js_obj->map()->NumberOfOwnDescriptors();
1591 for (
int i = 0; i < real_size; i++) {
1592 switch (descs->GetType(i)) {
1594 int index = descs->GetFieldIndex(i);
1596 Name* k = descs->GetKey(i);
1597 if (index < js_obj->
map()->inobject_properties()) {
1598 Object* value = js_obj->InObjectPropertyAt(index);
1600 SetPropertyReference(
1604 js_obj->GetInObjectPropertyOffset(index));
1606 TagObject(value,
"(hidden properties)");
1607 SetInternalReference(
1609 "hidden_properties", value,
1610 js_obj->GetInObjectPropertyOffset(index));
1613 Object* value = js_obj->RawFastPropertyAt(index);
1615 SetPropertyReference(js_obj, entry, k, value);
1617 TagObject(value,
"(hidden properties)");
1618 SetInternalReference(js_obj, entry,
"hidden_properties", value);
1624 SetPropertyReference(
1626 descs->GetKey(i), descs->GetConstant(i));
1629 ExtractAccessorPairProperty(
1631 descs->GetKey(i), descs->GetValue(i));
1644 NameDictionary* dictionary = js_obj->property_dictionary();
1645 int length = dictionary->Capacity();
1646 for (
int i = 0; i < length; ++i) {
1647 Object* k = dictionary->KeyAt(i);
1648 if (dictionary->IsKey(k)) {
1649 Object* target = dictionary->ValueAt(i);
1651 Object* value = target->IsPropertyCell()
1655 TagObject(value,
"(hidden properties)");
1656 SetInternalReference(js_obj, entry,
"hidden_properties", value);
1659 if (ExtractAccessorPairProperty(js_obj, entry, k, value))
continue;
1660 SetPropertyReference(js_obj, entry,
String::cast(k), value);
1667 bool V8HeapExplorer::ExtractAccessorPairProperty(
1668 JSObject* js_obj,
int entry,
Object* key,
Object* callback_obj) {
1669 if (!callback_obj->IsAccessorPair())
return false;
1671 Object* getter = accessors->getter();
1672 if (!getter->IsOddball()) {
1673 SetPropertyReference(js_obj, entry,
String::cast(key), getter,
"get %s");
1675 Object* setter = accessors->setter();
1676 if (!setter->IsOddball()) {
1677 SetPropertyReference(js_obj, entry,
String::cast(key), setter,
"set %s");
1683 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj,
int entry) {
1684 if (js_obj->HasFastObjectElements()) {
1686 int length = js_obj->IsJSArray() ?
1689 for (
int i = 0; i < length; ++i) {
1690 if (!elements->get(i)->IsTheHole()) {
1691 SetElementReference(js_obj, entry, i, elements->get(i));
1694 }
else if (js_obj->HasDictionaryElements()) {
1695 SeededNumberDictionary* dictionary = js_obj->element_dictionary();
1696 int length = dictionary->Capacity();
1697 for (
int i = 0; i < length; ++i) {
1698 Object* k = dictionary->KeyAt(i);
1699 if (dictionary->IsKey(k)) {
1701 uint32_t index =
static_cast<uint32_t
>(k->Number());
1702 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1709 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj,
int entry) {
1710 int length = js_obj->GetInternalFieldCount();
1711 for (
int i = 0; i < length; ++i) {
1712 Object* o = js_obj->GetInternalField(i);
1713 SetInternalReference(
1714 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
1720 Heap* heap =
object->GetHeap();
1721 if (object->IsJSFunction())
return heap->closure_string();
1722 String* constructor_name =
object->constructor_name();
1723 if (constructor_name == heap->Object_string()) {
1728 LookupResult result(heap->
isolate());
1729 object->LocalLookupRealNamedProperty(heap->constructor_string(), &result);
1732 constructor_prop = result.GetLazyValue();
1733 if (constructor_prop->IsJSFunction()) {
1736 if (maybe_name->IsString()) {
1738 if (name->
length() > 0)
return name;
1742 return object->constructor_name();
1746 HeapEntry* V8HeapExplorer::GetEntry(
Object* obj) {
1747 if (!obj->IsHeapObject())
return NULL;
1756 : index(index), tag(tag) { }
1763 : collecting_all_references_(
false),
1764 previous_reference_count_(0),
1769 if (collecting_all_references_) {
1770 for (
Object** p = start; p < end; p++) all_references_.
Add(*p);
1772 for (
Object** p = start; p < end; p++) strong_references_.
Add(*p);
1779 ASSERT(strong_references_.length() <= all_references_.length());
1781 for (
int i = 0; i < reference_tags_.length(); ++i) {
1782 explorer->SetGcRootsReference(reference_tags_[i].tag);
1784 int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0;
1785 while (all_index < all_references_.length()) {
1786 bool is_strong = strong_index < strong_references_.length()
1787 && strong_references_[strong_index] == all_references_[all_index];
1788 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1790 all_references_[all_index]);
1791 if (reference_tags_[tags_index].tag ==
1792 VisitorSynchronization::kBuiltins) {
1793 ASSERT(all_references_[all_index]->IsCode());
1796 builtins->name(builtin_index++));
1799 if (is_strong) ++strong_index;
1800 if (reference_tags_[tags_index].index == all_index) ++tags_index;
1805 if (collecting_all_references_ &&
1806 previous_reference_count_ != all_references_.length()) {
1807 previous_reference_count_ = all_references_.length();
1808 reference_tags_.
Add(IndexTag(previous_reference_count_, tag));
1813 bool collecting_all_references_;
1816 int previous_reference_count_;
1829 SetRootGcRootsReference();
1837 bool interrupted =
false;
1838 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1844 ExtractReferences(obj);
1858 bool V8HeapExplorer::IsEssentialObject(
Object*
object) {
1859 return object->IsHeapObject()
1860 && !
object->IsOddball()
1861 &&
object != heap_->empty_byte_array()
1862 &&
object != heap_->empty_fixed_array()
1863 &&
object != heap_->empty_descriptor_array()
1864 &&
object != heap_->fixed_array_map()
1865 &&
object != heap_->cell_map()
1866 &&
object != heap_->global_property_cell_map()
1867 &&
object != heap_->shared_function_info_map()
1868 &&
object != heap_->free_space_map()
1869 &&
object != heap_->one_pointer_filler_map()
1870 &&
object != heap_->two_pointer_filler_map();
1874 void V8HeapExplorer::SetContextReference(HeapObject* parent_obj,
1879 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1880 HeapEntry* child_entry = GetEntry(child_obj);
1881 if (child_entry !=
NULL) {
1884 names_->
GetName(reference_name),
1891 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
1893 const char* reference_name,
1895 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1896 HeapEntry* child_entry = GetEntry(child_obj);
1897 if (child_entry !=
NULL) {
1906 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
1910 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1911 HeapEntry* child_entry = GetEntry(child_obj);
1912 if (child_entry !=
NULL) {
1921 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1923 const char* reference_name,
1926 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1927 HeapEntry* child_entry = GetEntry(child_obj);
1928 if (child_entry ==
NULL)
return;
1929 if (IsEssentialObject(child_obj)) {
1939 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1944 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1945 HeapEntry* child_entry = GetEntry(child_obj);
1946 if (child_entry ==
NULL)
return;
1947 if (IsEssentialObject(child_obj)) {
1957 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
1961 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1962 HeapEntry* child_entry = GetEntry(child_obj);
1963 if (child_entry !=
NULL && IsEssentialObject(child_obj)) {
1972 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
1974 const char* reference_name,
1977 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1978 HeapEntry* child_entry = GetEntry(child_obj);
1979 if (child_entry ==
NULL)
return;
1980 if (IsEssentialObject(child_obj)) {
1990 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
1992 Name* reference_name,
1994 const char* name_format_string,
1996 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1997 HeapEntry* child_entry = GetEntry(child_obj);
1998 if (child_entry !=
NULL) {
2002 const char* name = name_format_string !=
NULL && reference_name->
IsString()
2007 names_->
GetName(reference_name);
2018 void V8HeapExplorer::SetRootGcRootsReference() {
2021 snapshot_->
root()->index(),
2026 void V8HeapExplorer::SetUserGlobalReference(
Object* child_obj) {
2027 HeapEntry* child_entry = GetEntry(child_obj);
2031 snapshot_->
root()->index(),
2044 void V8HeapExplorer::SetGcSubrootReference(
2046 HeapEntry* child_entry = GetEntry(child_obj);
2047 if (child_entry !=
NULL) {
2048 const char* name = GetStrongGcSubrootName(child_obj);
2070 if (child_obj->IsNativeContext()) {
2072 GlobalObject* global = context->global_object();
2073 if (global->IsJSGlobalObject()) {
2074 bool is_debug_object =
false;
2075 #ifdef ENABLE_DEBUGGER_SUPPORT
2076 is_debug_object = heap_->
isolate()->debug()->IsDebugGlobal(global);
2078 if (!is_debug_object && !user_roots_.
Contains(global)) {
2079 user_roots_.
Insert(global);
2080 SetUserGlobalReference(global);
2088 const char* V8HeapExplorer::GetStrongGcSubrootName(
Object*
object) {
2089 if (strong_gc_subroot_names_.
is_empty()) {
2090 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2091 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2094 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2096 #undef STRUCT_MAP_NAME
2097 #define STRING_NAME(name, str) NAME_ENTRY(name)
2103 return strong_gc_subroot_names_.
GetTag(
object);
2107 void V8HeapExplorer::TagObject(
Object* obj,
const char* tag) {
2108 if (IsEssentialObject(obj)) {
2109 HeapEntry* entry = GetEntry(obj);
2110 if (entry->name()[0] ==
'\0') {
2111 entry->set_name(tag);
2120 for (
Object** p = start; p < end; p++) {
2121 if ((*p)->IsNativeContext()) {
2124 if (proxy->IsJSGlobalProxy()) {
2125 Object* global = proxy->
map()->prototype();
2126 if (global->IsJSGlobalObject()) {
2133 int count() {
return objects_.length(); }
2147 const char** urls = NewArray<const char*>(enumerator.
count());
2148 for (
int i = 0, l = enumerator.
count(); i < l; ++i) {
2149 if (global_object_name_resolver_) {
2152 urls[i] = global_object_name_resolver_->
GetName(
2160 for (
int i = 0, l = enumerator.
count(); i < l; ++i) {
2161 objects_tags_.
SetTag(*enumerator.
at(i), urls[i]);
2171 : explorer_(explorer) {}
2177 explorer_->VisitSubtreeWrapper(p, class_id);
2189 : snapshot_(snapshot),
2190 names_(snapshot_->
profiler()->names()),
2191 heap_object_map_(snapshot_->
profiler()->heap_object_map()),
2192 entries_type_(entries_type) {
2207 const char* name = elements != -1
2215 size != -1 ?
static_cast<int>(
size) : 0,
2223 : isolate_(snapshot->
profiler()->heap_object_map()->heap()->isolate()),
2224 snapshot_(snapshot),
2225 names_(snapshot_->
profiler()->names()),
2226 progress_(progress),
2227 embedder_queried_(
false),
2228 objects_by_info_(RetainedInfosMatch),
2229 native_groups_(StringsMatch),
2231 synthetic_entries_allocator_ =
2233 native_entries_allocator_ =
2239 for (HashMap::Entry* p = objects_by_info_.
Start();
2241 p = objects_by_info_.
Next(p)) {
2249 for (HashMap::Entry* p = native_groups_.
Start();
2251 p = native_groups_.
Next(p)) {
2256 delete synthetic_entries_allocator_;
2257 delete native_entries_allocator_;
2262 FillRetainedObjects();
2267 void NativeObjectsExplorer::FillRetainedObjects() {
2268 if (embedder_queried_)
return;
2275 for (
int i = 0; i < groups->length(); ++i) {
2279 for (
size_t j = 0; j < group->
length; ++j) {
2291 embedder_queried_ =
true;
2295 void NativeObjectsExplorer::FillImplicitReferences() {
2296 Isolate* isolate = isolate_;
2297 List<ImplicitRefGroup*>* groups =
2298 isolate->global_handles()->implicit_ref_groups();
2299 for (
int i = 0; i < groups->length(); ++i) {
2300 ImplicitRefGroup* group = groups->at(i);
2301 HeapObject* parent = *group->parent;
2303 filler_->
FindOrAddEntry(parent, native_entries_allocator_)->index();
2304 ASSERT(parent_entry != HeapEntry::kNoEntry);
2305 Object*** children = group->children;
2306 for (
size_t j = 0; j < group->length; ++j) {
2307 Object* child = *children[j];
2308 HeapEntry* child_entry =
2317 isolate->global_handles()->RemoveImplicitRefGroups();
2320 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2322 HashMap::Entry* entry =
2323 objects_by_info_.
Lookup(info, InfoHash(info),
true);
2324 if (entry->value !=
NULL) {
2327 entry->value =
new List<HeapObject*>(4);
2329 return reinterpret_cast<List<HeapObject*>*
>(entry->value);
2336 FillRetainedObjects();
2337 FillImplicitReferences();
2339 for (HashMap::Entry* p = objects_by_info_.
Start();
2341 p = objects_by_info_.
Next(p)) {
2344 SetNativeRootReference(info);
2347 for (
int i = 0; i < objects->length(); ++i) {
2348 SetWrapperNativeReferences(objects->
at(i),
info);
2351 SetRootNativeRootsReference();
2362 hash_(reinterpret_cast<intptr_t>(label)),
2373 return hash_ == other->
GetHash() && !strcmp(label_, other->
GetLabel());
2385 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2386 const char* label) {
2387 const char* label_copy = names_->
GetCopy(label);
2390 static_cast<int>(strlen(label_copy)),
2392 HashMap::Entry* entry = native_groups_.
Lookup(const_cast<char*>(label_copy),
2394 if (entry->value ==
NULL) {
2395 entry->value =
new NativeGroupRetainedObjectInfo(label);
2397 return static_cast<NativeGroupRetainedObjectInfo*
>(entry->value);
2401 void NativeObjectsExplorer::SetNativeRootReference(
2403 HeapEntry* child_entry =
2406 NativeGroupRetainedObjectInfo* group_info =
2408 HeapEntry* group_entry =
2409 filler_->
FindOrAddEntry(group_info, synthetic_entries_allocator_);
2412 group_entry->index(),
2417 void NativeObjectsExplorer::SetWrapperNativeReferences(
2419 HeapEntry* wrapper_entry = filler_->
FindEntry(wrapper);
2421 HeapEntry* info_entry =
2425 wrapper_entry->index(),
2429 info_entry->index(),
2434 void NativeObjectsExplorer::SetRootNativeRootsReference() {
2435 for (HashMap::Entry* entry = native_groups_.
Start();
2437 entry = native_groups_.
Next(entry)) {
2438 NativeGroupRetainedObjectInfo* group_info =
2439 static_cast<NativeGroupRetainedObjectInfo*
>(entry->value);
2440 HeapEntry* group_entry =
2445 snapshot_->
root()->index(),
2451 void NativeObjectsExplorer::VisitSubtreeWrapper(
Object** p,
uint16_t class_id) {
2452 if (in_groups_.
Contains(*p))
return;
2453 Isolate* isolate = isolate_;
2455 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2456 if (info ==
NULL)
return;
2466 : snapshot_(snapshot),
2468 v8_heap_explorer_(snapshot_, this, resolver),
2469 dom_explorer_(snapshot_, this),
2483 "HeapSnapshotGenerator::GenerateSnapshot");
2486 "HeapSnapshotGenerator::GenerateSnapshot");
2489 Heap* debug_heap = heap_;
2495 was_swept_conservatively());
2504 debug_heap->Verify();
2507 SetProgressTotal(1);
2510 debug_heap->Verify();
2513 if (!FillReferences())
return false;
2518 progress_counter_ = progress_total_;
2519 if (!ProgressReport(
true))
return false;
2524 void HeapSnapshotGenerator::ProgressStep() {
2525 ++progress_counter_;
2529 bool HeapSnapshotGenerator::ProgressReport(
bool force) {
2530 const int kProgressReportGranularity = 10000;
2531 if (control_ !=
NULL
2532 && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2541 void HeapSnapshotGenerator::SetProgressTotal(
int iterations_count) {
2542 if (control_ ==
NULL)
return;
2543 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2544 progress_total_ = iterations_count * (
2547 progress_counter_ = 0;
2551 bool HeapSnapshotGenerator::FillReferences() {
2552 SnapshotFiller filler(snapshot_, &entries_);
2561 static const int kSigned = 11;
2562 static const int kUnsigned = 10;
2565 static const int kSigned = 20;
2566 static const int kUnsigned = 20;
2574 chunk_size_(stream->GetChunkSize()),
2575 chunk_(chunk_size_),
2583 ASSERT(chunk_pos_ < chunk_size_);
2584 chunk_[chunk_pos_++] = c;
2592 ASSERT(static_cast<size_t>(n) <= strlen(s));
2593 const char* s_end = s + n;
2595 int s_chunk_size =
Min(
2596 chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2597 ASSERT(s_chunk_size > 0);
2600 chunk_pos_ += s_chunk_size;
2604 void AddNumber(
unsigned n) { AddNumberImpl<unsigned>(n,
"%u"); }
2606 if (aborted_)
return;
2607 ASSERT(chunk_pos_ < chunk_size_);
2608 if (chunk_pos_ != 0) {
2615 template<
typename T>
2616 void AddNumberImpl(
T n,
const char* format) {
2618 static const int kMaxNumberSize =
2620 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2622 chunk_.
SubVector(chunk_pos_, chunk_size_), format, n);
2624 chunk_pos_ += result;
2627 EmbeddedVector<char, kMaxNumberSize> buffer;
2634 void MaybeWriteChunk() {
2635 ASSERT(chunk_pos_ <= chunk_size_);
2636 if (chunk_pos_ == chunk_size_) {
2641 if (aborted_)
return;
2649 ScopedVector<char> chunk_;
2656 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2658 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 6;
2663 allocation_tracker->PrepareForSerialization();
2673 void HeapSnapshotJSONSerializer::SerializeImpl() {
2677 SerializeSnapshot();
2678 if (writer_->
aborted())
return;
2682 if (writer_->
aborted())
return;
2686 if (writer_->
aborted())
return;
2689 writer_->
AddString(
"\"trace_function_infos\":[");
2690 SerializeTraceNodeInfos();
2691 if (writer_->
aborted())
return;
2694 SerializeTraceTree();
2695 if (writer_->
aborted())
return;
2700 if (writer_->
aborted())
return;
2707 int HeapSnapshotJSONSerializer::GetStringId(
const char* s) {
2708 HashMap::Entry* cache_entry = strings_.
Lookup(
2709 const_cast<char*>(s), StringHash(s),
true);
2710 if (cache_entry->value ==
NULL) {
2711 cache_entry->value =
reinterpret_cast<void*
>(next_string_id_++);
2713 return static_cast<int>(
reinterpret_cast<intptr_t
>(cache_entry->value));
2719 template<
size_t size>
struct ToUnsigned;
2721 template<>
struct ToUnsigned<4> {
2722 typedef uint32_t
Type;
2725 template<>
struct ToUnsigned<8> {
2726 typedef uint64_t
Type;
2732 template<
typename T>
2733 static int utoa_impl(
T value,
const Vector<char>& buffer,
int buffer_pos) {
2735 int number_of_digits = 0;
2741 buffer_pos += number_of_digits;
2742 int result = buffer_pos;
2744 int last_digit =
static_cast<int>(value % 10);
2745 buffer[--buffer_pos] =
'0' + last_digit;
2752 template<
typename T>
2753 static int utoa(
T value,
const Vector<char>& buffer,
int buffer_pos) {
2756 return utoa_impl(unsigned_value, buffer, buffer_pos);
2760 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2763 static const int kBufferSize =
2764 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2;
2765 EmbeddedVector<char, kBufferSize> buffer;
2768 ? edge->index() : GetStringId(edge->name());
2771 buffer[buffer_pos++] =
',';
2773 buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2774 buffer[buffer_pos++] =
',';
2775 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2776 buffer[buffer_pos++] =
',';
2777 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
2778 buffer[buffer_pos++] =
'\n';
2779 buffer[buffer_pos++] =
'\0';
2784 void HeapSnapshotJSONSerializer::SerializeEdges() {
2785 List<HeapGraphEdge*>& edges = snapshot_->
children();
2786 for (
int i = 0; i < edges.length(); ++i) {
2788 edges[i - 1]->from()->index() <= edges[i]->from()->index());
2789 SerializeEdge(edges[i], i == 0);
2790 if (writer_->
aborted())
return;
2795 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
2797 static const int kBufferSize =
2798 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned
2799 + MaxDecimalDigitsIn<sizeof(size_t)>::kUnsigned
2801 EmbeddedVector<char, kBufferSize> buffer;
2803 if (entry_index(entry) != 0) {
2804 buffer[buffer_pos++] =
',';
2806 buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2807 buffer[buffer_pos++] =
',';
2808 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2809 buffer[buffer_pos++] =
',';
2810 buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2811 buffer[buffer_pos++] =
',';
2812 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2813 buffer[buffer_pos++] =
',';
2814 buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2815 buffer[buffer_pos++] =
',';
2816 buffer_pos = utoa(entry->trace_node_id(), buffer, buffer_pos);
2817 buffer[buffer_pos++] =
'\n';
2818 buffer[buffer_pos++] =
'\0';
2823 void HeapSnapshotJSONSerializer::SerializeNodes() {
2824 List<HeapEntry>& entries = snapshot_->
entries();
2825 for (
int i = 0; i < entries.length(); ++i) {
2826 SerializeNode(&entries[i]);
2827 if (writer_->
aborted())
return;
2832 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2841 #define JSON_A(s) "[" s "]"
2842 #define JSON_O(s) "{" s "}"
2843 #define JSON_S(s) "\"" s "\""
2851 JSON_S(
"trace_node_id"))
","
2864 JSON_S(
"concatenated string")
","
2865 JSON_S(
"sliced string"))
","
2874 JSON_S(
"name_or_index")
","
2885 JSON_S(
"string_or_number")
","
2888 JSON_S(
"function_id")
","
2890 JSON_S(
"script_name")
","
2896 JSON_S(
"function_info_index")
","
2907 writer_->
AddString(
",\"trace_function_count\":");
2917 static void WriteUChar(OutputStreamWriter* w,
unibrow::uchar u) {
2918 static const char hex_chars[] =
"0123456789ABCDEF";
2919 w->AddString(
"\\u");
2920 w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2921 w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2922 w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
2923 w->AddCharacter(hex_chars[u & 0xf]);
2927 void HeapSnapshotJSONSerializer::SerializeTraceTree() {
2929 if (!tracker)
return;
2930 AllocationTraceTree* traces = tracker->
trace_tree();
2931 SerializeTraceNode(traces->root());
2935 void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
2937 const int kBufferSize =
2938 4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned
2940 EmbeddedVector<char, kBufferSize> buffer;
2942 buffer_pos = utoa(node->id(), buffer, buffer_pos);
2943 buffer[buffer_pos++] =
',';
2944 buffer_pos = utoa(node->function_info_index(), buffer, buffer_pos);
2945 buffer[buffer_pos++] =
',';
2946 buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
2947 buffer[buffer_pos++] =
',';
2948 buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
2949 buffer[buffer_pos++] =
',';
2950 buffer[buffer_pos++] =
'[';
2951 buffer[buffer_pos++] =
'\0';
2954 Vector<AllocationTraceNode*> children = node->children();
2955 for (
int i = 0; i < children.length(); i++) {
2959 SerializeTraceNode(children[i]);
2966 static int SerializePosition(
int position,
const Vector<char>& buffer,
2968 if (position == -1) {
2969 buffer[buffer_pos++] =
'0';
2972 buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
2978 void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
2980 if (!tracker)
return;
2982 const int kBufferSize =
2983 6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned
2985 EmbeddedVector<char, kBufferSize> buffer;
2986 const List<AllocationTracker::FunctionInfo*>& list =
2987 tracker->function_info_list();
2988 bool first_entry =
true;
2989 for (
int i = 0; i < list.length(); i++) {
2990 AllocationTracker::FunctionInfo* info = list[i];
2993 first_entry =
false;
2995 buffer[buffer_pos++] =
',';
2997 buffer_pos = utoa(info->function_id, buffer, buffer_pos);
2998 buffer[buffer_pos++] =
',';
2999 buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
3000 buffer[buffer_pos++] =
',';
3001 buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
3002 buffer[buffer_pos++] =
',';
3004 buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
3006 buffer[buffer_pos++] =
',';
3007 buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
3008 buffer[buffer_pos++] =
',';
3009 buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
3010 buffer[buffer_pos++] =
'\n';
3011 buffer[buffer_pos++] =
'\0';
3017 void HeapSnapshotJSONSerializer::SerializeString(
const unsigned char* s) {
3020 for ( ; *s !=
'\0'; ++s) {
3043 if (*s > 31 && *s < 128) {
3045 }
else if (*s <= 31) {
3047 WriteUChar(writer_, *s);
3050 unsigned length = 1, cursor = 0;
3051 for ( ; length <= 4 && *(s + length) !=
'\0'; ++length) { }
3054 WriteUChar(writer_, c);
3067 void HeapSnapshotJSONSerializer::SerializeStrings() {
3068 ScopedVector<const unsigned char*> sorted_strings(
3070 for (HashMap::Entry* entry = strings_.
Start();
3072 entry = strings_.
Next(entry)) {
3073 int index =
static_cast<int>(
reinterpret_cast<uintptr_t
>(entry->value));
3074 sorted_strings[index] =
reinterpret_cast<const unsigned char*
>(entry->key);
3077 for (
int i = 1; i < sorted_strings.length(); ++i) {
3079 SerializeString(sorted_strings[i]);
3080 if (writer_->
aborted())
return;
void AddSubstring(const char *s, int n)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
v8::RetainedObjectInfo * info
Object * type_feedback_info()
virtual bool ProgressReport(bool force)=0
void SetIndexedReference(HeapGraphEdge::Type type, int parent, int index, HeapEntry *child_entry)
static const int kWeakNextOffset
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
virtual HeapEntry * AllocateEntry(HeapThing ptr)=0
static const int kDefaultCacheOffset
OutputStreamWriter(v8::OutputStream *stream)
virtual ~V8HeapExplorer()
static const int kTypeOffset
void TagBuiltinCodeObject(Code *code, const char *name)
SnapshotFiller(HeapSnapshot *snapshot, HeapEntriesMap *entries)
void UpdateHeapObjectsMap()
size_t GetUsedMemorySize() const
static const int kCodeOffset
#define CHECK_EQ(expected, value)
static Object *& Object_at(Address addr)
static const SnapshotObjectId kGcRootsFirstSubrootId
#define NATIVE_CONTEXT_FIELDS(V)
static const int kGetterOffset
static const int kPrototypeOrInitialMapOffset
const char * ToCString(const v8::String::Utf8Value &value)
static const int kValueOffset
HeapObjectsMap * heap_object_map() const
static const int kBuiltinsOffset
virtual HeapEntry * AllocateEntry(HeapThing ptr)
HeapProfiler * profiler()
void PrintF(const char *format,...)
void CollectAllGarbage(int flags, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
virtual intptr_t GetHash()=0
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
bool was_swept_conservatively()
static const int kTransitionsOrBackPointerOffset
static String * cast(Object *obj)
HeapEntry * AddEntry(Address address, HeapEntry::Type type, const char *name, size_t size)
FindEntryById(SnapshotObjectId id)
GlobalHandlesExtractor(NativeObjectsExplorer *explorer)
static Object * GetObjectFromEntryAddress(Address location_of_address)
static const int kDependentCodeOffset
void VisitPointers(Object **start, Object **end)
static const int kOptimizedCodeMapOffset
static SnapshotObjectId GetNthGcSubrootId(int delta)
static HeapObject * cast(Object *obj)
static const int kGlobalReceiverOffset
static const int kDeoptimizationDataOffset
static AccessorPair * cast(Object *obj)
void Print(int max_depth)
void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags)
void TagCodeObject(Code *code)
static Map * cast(Object *obj)
SnapshotObjectId FindOrAddEntry(Address addr, unsigned int size, bool accessed=true)
BasicHeapEntriesAllocator(HeapSnapshot *snapshot, HeapEntry::Type entries_type)
void RemoveSnapshot(HeapSnapshot *snapshot)
kSerializedDataOffset Object
JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer *explorer)
Vector< T > SubVector(int from, int to)
TypeImpl< ZoneTypeConfig > Type
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including on console Map counters to a file Enable debugger compile events enable GDBJIT enable GDBJIT interface for all code objects dump only objects containing this substring stress the GC compactor to flush out pretty print source code print source AST function name where to insert a breakpoint print scopes for builtins trace contexts operations print stuff during garbage collection report code statistics after GC report handles after GC trace cache state transitions print interface inference details prints when objects are turned into dictionaries report heap spill statistics along with trace isolate state changes trace regexp bytecode execution Minimal Log all events to the log file Log API events to the log file Log heap samples on garbage collection for the hp2ps tool log positions Log suspect operations Used with turns on browser compatible mode for profiling v8 Specify the name of the log file Enable low level linux profiler Enable perf linux profiler(experimental annotate support).") DEFINE_string(gc_fake_mmap
static AllocationSite * cast(Object *obj)
static const int kSetterOffset
size_t NumberToSize(Isolate *isolate, Object *number)
void UpdateObjectSize(Address addr, int size)
const char * GetName(Name *name)
virtual const char * GetName(Handle< Object > object)=0
bool IterateAndExtractReferences(SnapshotFiller *filler)
virtual bool IsEquivalent(RetainedObjectInfo *other)
void SetNamedAutoIndexReference(HeapGraphEdge::Type type, int parent, HeapEntry *child_entry)
size_t RawSnapshotSize() const
static const int kHandlerTableOffset
#define ASSERT(condition)
const char * GetFormatted(const char *format,...)
SnapshotObjectId PushHeapObjectsStats(OutputStream *stream)
static Script * cast(Object *obj)
V8HeapExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress, v8::HeapProfiler::ObjectNameResolver *resolver)
static const int kDebugInfoOffset
static JSRegExp * cast(Object *obj)
static const int kNativeContextOffset
#define STRONG_ROOT_LIST(V)
static Context * cast(Object *context)
static const int kInitialMapOffset
static SharedFunctionInfo * cast(Object *obj)
void SetTag(Object *obj, const char *tag)
static uchar CalculateValue(const byte *str, unsigned length, unsigned *cursor)
#define MAKE_STRING_MAP_CASE(instance_type, size, name, Name)
static const int kInstanceClassNameOffset
#define INTERNALIZED_STRING_LIST(V)
#define STRING_TYPE_LIST(V)
static const int kDescriptorsOffset
virtual HeapEntry * AllocateEntry(HeapThing ptr)
static const int kGlobalContextOffset
virtual ControlOption ReportProgressValue(int done, int total)=0
static const int kContextOffset
static Code * cast(Object *obj)
HeapEntry * FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)
static const int kHeaderSize
HeapEntry * AddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)
void SetNamedReference(HeapGraphEdge::Type type, int parent, const char *reference_name, HeapEntry *child_entry)
int EstimateObjectsCount()
static const int kDependentCodeOffset
ConstantPoolArray * constant_pool()
static Smi * cast(Object *object)
int operator()(HeapEntry *const *entry)
static const SnapshotObjectId kGcRootsObjectId
void AddCharacter(char c)
SnapshotObjectId GenerateId(v8::RetainedObjectInfo *info)
void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, int parent, HeapEntry *child_entry)
HeapEntry * AddGcRootsEntry()
static const int kFirstOffset
NativeGroupRetainedObjectInfo(const char *label)
static const int kWeakFirstViewOffset
uint32_t ComputePointerHash(void *ptr)
static const int kParentOffset
static const int kLiteralsOffset
static const int kNestedSiteOffset
static const int kSourceOffset
SnapshotObjectId last_assigned_id() const
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
bool MoveObject(Address from, Address to, int size)
#define STRING_NAME(name, str)
static JSGlobalProxy * cast(Object *obj)
NativeObjectsExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress)
List< HeapEntry > & entries()
static const int kGCMetadataOffset
const intptr_t kFailureTagMask
static Cell * cast(Object *obj)
const char * GetTag(Object *obj)
#define ROOT_NAME(type, name, camel_name)
static SlicedString * cast(Object *obj)
static void MemCopy(void *dest, const void *src, size_t size)
static const int kScopeInfoOffset
static const int kObjectIdStep
static Box * cast(Object *obj)
HeapSnapshotGenerator(HeapSnapshot *snapshot, v8::ActivityControl *control, v8::HeapProfiler::ObjectNameResolver *resolver, Heap *heap)
virtual int GetChunkSize()
static String * GetConstructorName(JSObject *object)
HeapEntry * gc_subroot(int index)
virtual const char * GetLabel()=0
static const int kBufferOffset
JSObject * global_proxy()
virtual const char * GetLabel()
PropertyCellSpace * property_cell_space()
static const int kTransitionInfoOffset
bool Contains(Object *object)
uint32_t occupancy() const
static HeapObject *const kInternalRootObject
virtual void ProgressStep()=0
GlobalHandles * global_handles()
void AddString(const char *s)
bool IterateAndExtractReferences(SnapshotFiller *filler)
HeapEntry * AddRootEntry()
virtual WriteResult WriteHeapStatsChunk(HeapStatsUpdate *data, int count)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
Entry * Lookup(void *key, uint32_t hash, bool insert, AllocationPolicy allocator=AllocationPolicy())
static const char * Kind2String(Kind kind)
static const int kNameOffset
virtual ~GlobalHandlesExtractor()
OldSpace * old_pointer_space()
static const int kPropertiesOffset
static const SnapshotObjectId kFirstAvailableObjectId
List< HeapGraphEdge > & edges()
void IterateAllRoots(ObjectVisitor *v)
SnapshotObjectId FindEntry(Address addr)
static const int kMakeHeapIterableMask
const List< FunctionInfo * > & function_info_list() const
friend class GlobalHandlesExtractor
static const int kNextFunctionLinkOffset
static const int kLineEndsOffset
static const int kElementsOffset
static PropertyCell * cast(Object *obj)
HeapSnapshot(HeapProfiler *profiler, const char *title, unsigned uid)
HeapEntry * FindEntry(HeapThing ptr)
static const int kTypeFeedbackInfoOffset
virtual void VisitPointers(Object **start, Object **end)
void IterateAllRootsWithClassIds(ObjectVisitor *v)
static const int kRelocationInfoOffset
static const int kNonWeakFieldsEndOffset
int StrLength(const char *string)
static Local< Context > ToLocal(v8::internal::Handle< v8::internal::Context > obj)
static int OffsetOfElementAt(int index)
static const int kNextCodeLinkOffset
static JSArray * cast(Object *obj)
static void Print(const char *format,...)
#define T(name, string, precedence)
AllocationTraceTree * trace_tree()
HeapEntry * GetEntryById(SnapshotObjectId id)
V8_INLINE bool IsString() const
List< ObjectGroup * > * object_groups()
static int SNPrintF(Vector< char > str, const char *format,...)
void AddRootEntries(SnapshotFiller *filler)
HeapEntry * AddEntry(HeapEntry::Type type, const char *name, SnapshotObjectId id, size_t size, unsigned trace_node_id)
static const int kMapOffset
static const int kFunctionDataOffset
static const int kNormalTypeCacheOffset
void Serialize(v8::OutputStream *stream)
static uint32_t HashSequentialString(const schar *chars, int length, uint32_t seed)
void StopHeapObjectsTracking()
virtual WriteResult WriteAsciiChunk(char *data, int size)=0
void IterateRoots(ObjectVisitor *v, VisitMode mode)
void Sort(int(*cmp)(const T *x, const T *y))
int FindUntrackedObjects()
static const int kSecondOffset
uint32_t ComputeIntegerHash(uint32_t key, uint32_t seed)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
static const SnapshotObjectId kInternalRootObjectId
virtual ~NativeObjectsExplorer()
virtual void VisitEmbedderReference(Object **p, uint16_t class_id)
AllocationTracker * allocation_tracker() const
List< HeapEntry * > * GetSortedEntriesList()
uint32_t SnapshotObjectId
friend class IndexedReferencesExtractor
static const int kEndOffset
void AddNumber(unsigned n)
static const int kInferredNameOffset
size_t GetMemoryUsedByList(const List< T, P > &list)
#define EXTRACT_CONTEXT_FIELD(index, type, name)
const char * GetCopy(const char *src)
void Pair(HeapThing thing, int entry)
HeapEntry * AddGcSubrootEntry(int tag)
void * Remove(void *key, uint32_t hash)
uint32_t capacity() const
static const int kNameOffset
virtual intptr_t GetElementCount()
InstanceType instance_type()
static const uchar kBadChar
static const int kConstructorOffset
int SortedListBSearch(const List< T > &list, P cmp)
Handle< JSGlobalObject > & at(int i)
static FixedArray * cast(Object *obj)
static const int kWeakNextOffset
static const int kHeaderSize
void Print(const v8::FunctionCallbackInfo< v8::Value > &args)
void RemoveObjectGroups()
HeapObjectsMap(Heap *heap)
static const int kBoundFunctionIndex
void RememberLastJSObjectId()
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
static const int kScriptOffset
static const int kPrototypeOffset
void Synchronize(VisitorSynchronization::SyncTag tag)
static const int kWeakNextOffset
GcSubrootsEnumerator(SnapshotFiller *filler, V8HeapExplorer *explorer)
List< HeapGraphEdge * > & children()
virtual void VisitPointers(Object **start, Object **end)
static JSArrayBuffer * cast(Object *obj)
static const int kValueOffset
static const int kContextOffset
static const int kNativeContextOffset
int EstimateObjectsCount(HeapIterator *iterator)
virtual void EndOfStream()=0
static GlobalObject * cast(Object *obj)
static const int kBoundThisIndex
static const int kConstructStubOffset
#define STRUCT_MAP_NAME(NAME, Name, name)
void DeleteArray(T *array)
virtual ~NativeGroupRetainedObjectInfo()
static const int kSharedFunctionInfoOffset
static ConsString * cast(Object *obj)
static CodeCache * cast(Object *obj)
virtual intptr_t GetSizeInBytes()
virtual HeapEntry * AllocateEntry(HeapThing ptr)
static const int kCodeCacheOffset
static const int kConstantPoolOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
static const int kBoundArgumentsStartIndex
static const int kDependentCodeOffset
#define MAKE_STRUCT_CASE(NAME, Name, name)
static JSArrayBufferView * cast(Object *obj)
virtual const char * GetGroupLabel()
static JSObject * cast(Object *obj)
OldSpace * old_data_space()
Entry * Next(Entry *p) const
static const char *const kTagNames[kNumberOfSyncTags]
virtual intptr_t GetHash()
void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags)
String * constructor_name()
static JSGlobalObject * cast(Object *obj)
static JSFunction * cast(Object *obj)