44 : token_locations_(4),
50 Isolate* isolate = Isolate::Current();
51 for (
int i = 0; i < token_locations_.length(); ++i) {
52 if (!token_removed_[i]) {
61 Isolate* isolate = Isolate::Current();
63 for (
int i = 0; i < token_locations_.length(); ++i) {
64 if (*token_locations_[i] == token && !token_removed_[i])
return i;
70 TokenRemovedCallback);
71 token_locations_.Add(handle.
location());
72 token_removed_.
Add(
false);
73 return token_locations_.length() - 1;
80 Utils::OpenHandle(*handle).location());
85 void TokenEnumerator::TokenRemoved(
Object** token_location) {
86 for (
int i = 0; i < token_locations_.length(); ++i) {
87 if (token_locations_[i] == token_location && !token_removed_[i]) {
88 token_removed_[i] =
true;
96 : names_(StringsMatch) {
101 for (HashMap::Entry* p = names_.
Start();
103 p = names_.
Next(p)) {
104 DeleteArray(reinterpret_cast<const char*>(p->value));
110 int len =
static_cast<int>(strlen(src));
116 return AddOrDisposeString(dst.
start(), hash);
122 va_start(args, format);
129 const char* StringsStorage::AddOrDisposeString(
char* str, uint32_t hash) {
130 HashMap::Entry* cache_entry = names_.
Lookup(str, hash,
true);
131 if (cache_entry->value ==
NULL) {
133 cache_entry->value = str;
137 return reinterpret_cast<const char*
>(cache_entry->value);
150 return AddOrDisposeString(str.
start(), hash);
155 if (name->IsString()) {
156 int length =
Min(kMaxNameSize, name->
length());
161 return AddOrDisposeString(data.Detach(), hash);
173 size_t size =
sizeof(*this);
174 size +=
sizeof(HashMap::Entry) * names_.
capacity();
175 for (HashMap::Entry* p = names_.
Start(); p !=
NULL; p = names_.
Next(p)) {
176 size += strlen(reinterpret_cast<const char*>(p->value)) + 1;
186 name_prefix_ = source.name_prefix_;
187 name_ = source.name_;
188 resource_name_ = source.resource_name_;
189 line_number_ = source.line_number_;
195 if (shared_id_ != 0) {
197 v8::internal::kZeroHashSeed);
200 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_prefix_)),
201 v8::internal::kZeroHashSeed);
203 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_)),
204 v8::internal::kZeroHashSeed);
206 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(resource_name_)),
207 v8::internal::kZeroHashSeed);
216 || (tag_ == entry->tag_
217 && shared_id_ == entry->shared_id_
219 || (name_prefix_ == entry->name_prefix_
220 && name_ == entry->name_
221 && resource_name_ == entry->resource_name_
222 && line_number_ == entry->line_number_)));
227 HashMap::Entry* map_entry =
228 children_.
Lookup(entry, CodeEntryHash(entry),
false);
229 return map_entry !=
NULL ?
235 HashMap::Entry* map_entry =
236 children_.
Lookup(entry, CodeEntryHash(entry),
true);
237 if (map_entry->value ==
NULL) {
240 map_entry->value = new_node;
241 children_list_.Add(new_node);
243 return reinterpret_cast<ProfileNode*
>(map_entry->value);
259 total_ticks_, self_ticks_,
261 entry_->name_prefix(),
263 entry_->security_token_id());
264 if (entry_->resource_name()[0] !=
'\0')
265 OS::Print(
" %s:%d", entry_->resource_name(), entry_->line_number());
267 for (HashMap::Entry* p = children_.
Start();
269 p = children_.
Next(p)) {
288 : root_entry_(
Logger::FUNCTION_TAG,
300 TraverseDepthFirst(&cb);
307 entry != path.
start() - 1;
309 if (*entry !=
NULL) {
313 node->IncrementSelfTicks();
322 if (*entry !=
NULL) {
326 node->IncrementSelfTicks();
332 : src(src), dst(dst) { }
342 security_token_id_(security_token_id) {
347 if (IsTokenAcceptable(child->entry()->security_token_id(),
348 parent->entry()->security_token_id())) {
349 ProfileNode* clone = stack_.last().dst->FindOrAddChild(child->entry());
350 clone->IncreaseSelfTicks(child->self_ticks());
354 stack_.last().dst->IncreaseSelfTicks(child->self_ticks());
361 if (stack_.last().src == child) {
367 bool IsTokenAcceptable(
int token,
int parent_token) {
369 || token == security_token_id_)
return true;
373 || parent_token == security_token_id_;
378 List<NodesPair> stack_;
379 int security_token_id_;
383 ms_to_ticks_scale_ = src->ms_to_ticks_scale_;
385 src->TraverseDepthFirst(&cb);
391 ms_to_ticks_scale_ = ticks_per_ms > 0 ? 1.0 / ticks_per_ms : 1.0;
398 : node(node), child_idx_(0) { }
400 return node->children()->at(child_idx_);
403 return child_idx_ <
node->children()->length();
405 INLINE(
void next_child()) { ++child_idx_; }
414 template <
typename Callback>
415 void ProfileTree::TraverseDepthFirst(Callback* callback) {
418 while (stack.length() > 0) {
420 if (current.has_current_child()) {
421 callback->BeforeTraversingChild(current.
node, current.current_child());
424 callback->AfterAllChildrenTraversed(current.
node);
425 if (stack.length() > 1) {
426 Position& parent = stack[stack.length() - 2];
427 callback->AfterChildTraversed(parent.
node, current.
node);
442 node->IncreaseTotalTicks(node->self_ticks());
446 parent->IncreaseTotalTicks(child->total_ticks());
453 TraverseDepthFirst(&cb);
459 root_->total_ticks(), root_->self_ticks(),
485 clone->top_down_.
FilteredClone(&top_down_, security_token_id);
486 clone->bottom_up_.
FilteredClone(&bottom_up_, security_token_id);
508 const CodeMap::CodeTreeConfig::Key CodeMap::CodeTreeConfig::kNoKey =
NULL;
512 DeleteAllCoveredCode(addr, addr + size);
513 CodeTree::Locator locator;
514 tree_.
Insert(addr, &locator);
515 locator.set_value(CodeEntryInfo(entry, size));
522 while (addr >= start) {
523 CodeTree::Locator locator;
525 Address start2 = locator.key(), end2 = start2 + locator.value().size;
526 if (start2 < end && start < end2) to_delete.
Add(start2);
529 for (
int i = 0; i < to_delete.length(); ++i) tree_.
Remove(to_delete[i]);
534 CodeTree::Locator locator;
537 const CodeEntryInfo& entry = locator.value();
538 if (addr < (locator.key() + entry.size))
546 CodeTree::Locator locator;
548 if (tree_.
Find(addr, &locator)) {
549 const CodeEntryInfo& entry = locator.value();
550 ASSERT(entry.entry == kSharedFunctionCodeEntry);
553 tree_.
Insert(addr, &locator);
554 int id = next_shared_id_++;
555 locator.set_value(CodeEntryInfo(kSharedFunctionCodeEntry,
id));
562 if (from == to)
return;
563 CodeTree::Locator locator;
564 if (!tree_.
Find(from, &locator))
return;
565 CodeEntryInfo entry = locator.value();
567 AddCode(to, entry.entry, entry.size);
571 void CodeMap::CodeTreePrinter::Call(
572 const Address& key,
const CodeMap::CodeEntryInfo& value) {
573 OS::Print(
"%p %5d %s\n", key, value.size, value.entry->name());
578 CodeTreePrinter printer;
584 : profiles_uids_(UidsMatch),
585 current_profiles_semaphore_(
OS::CreateSemaphore(1)) {
591 static void DeleteCodeEntry(
CodeEntry** entry_ptr) {
595 static void DeleteCpuProfile(
CpuProfile** profile_ptr) {
599 static void DeleteProfilesList(List<CpuProfile*>** list_ptr) {
600 if (*list_ptr !=
NULL) {
601 (*list_ptr)->Iterate(DeleteCpuProfile);
607 delete current_profiles_semaphore_;
608 current_profiles_.Iterate(DeleteCpuProfile);
609 detached_profiles_.Iterate(DeleteCpuProfile);
610 profiles_by_token_.Iterate(DeleteProfilesList);
611 code_entries_.Iterate(DeleteCodeEntry);
617 current_profiles_semaphore_->
Wait();
619 current_profiles_semaphore_->
Signal();
622 for (
int i = 0; i < current_profiles_.length(); ++i) {
623 if (strcmp(current_profiles_[i]->title(), title) == 0) {
625 current_profiles_semaphore_->
Signal();
629 current_profiles_.Add(
new CpuProfile(title, uid));
630 current_profiles_semaphore_->
Signal();
642 double actual_sampling_rate) {
645 current_profiles_semaphore_->
Wait();
646 for (
int i = current_profiles_.length() - 1; i >= 0; --i) {
647 if (title_len == 0 || strcmp(current_profiles_[i]->title(), title) == 0) {
648 profile = current_profiles_.Remove(i);
652 current_profiles_semaphore_->
Signal();
654 if (profile !=
NULL) {
659 unabridged_list->
Add(profile);
660 HashMap::Entry* entry =
661 profiles_uids_.
Lookup(reinterpret_cast<void*>(profile->uid()),
662 static_cast<uint32_t>(profile->uid()),
665 entry->value =
reinterpret_cast<void*
>(unabridged_list->length() - 1);
666 return GetProfile(security_token_id, profile->uid());
674 int index = GetProfileIndex(uid);
675 if (index < 0)
return NULL;
679 return unabridged_list->
at(index);
682 if (list->
at(index) ==
NULL) {
684 unabridged_list->
at(index)->FilteredClone(security_token_id);
686 return list->
at(index);
690 int CpuProfilesCollection::GetProfileIndex(
unsigned uid) {
691 HashMap::Entry* entry = profiles_uids_.
Lookup(reinterpret_cast<void*>(uid),
692 static_cast<uint32_t>(uid),
694 return entry !=
NULL ?
695 static_cast<int>(
reinterpret_cast<intptr_t
>(entry->value)) : -1;
702 if (current_profiles_.length() != 1)
return false;
704 || strcmp(current_profiles_[0]->title(), title) == 0;
710 unsigned uid = profile->uid();
711 int index = GetProfileIndex(uid);
713 detached_profiles_.RemoveElement(profile);
716 profiles_uids_.
Remove(reinterpret_cast<void*>(uid),
717 static_cast<uint32_t>(uid));
719 for (HashMap::Entry* p = profiles_uids_.
Start();
721 p = profiles_uids_.
Next(p)) {
722 intptr_t p_index =
reinterpret_cast<intptr_t
>(p->value);
723 if (p_index > index) {
724 p->value =
reinterpret_cast<void*
>(p_index - 1);
727 for (
int i = 0; i < profiles_by_token_.length(); ++i) {
729 if (list !=
NULL && index < list->length()) {
733 if (cloned_profile !=
NULL && cloned_profile != profile) {
734 detached_profiles_.Add(cloned_profile);
741 int CpuProfilesCollection::TokenToIndex(
int security_token_id) {
743 return security_token_id + 1;
747 List<CpuProfile*>* CpuProfilesCollection::GetProfilesList(
748 int security_token_id) {
749 const int index = TokenToIndex(security_token_id);
750 const int lists_to_add = index - profiles_by_token_.length() + 1;
751 if (lists_to_add > 0) profiles_by_token_.AddBlock(
NULL, lists_to_add);
752 List<CpuProfile*>* unabridged_list =
754 const int current_count = unabridged_list->length();
755 if (profiles_by_token_[index] ==
NULL) {
756 profiles_by_token_[index] =
new List<CpuProfile*>(current_count);
758 List<CpuProfile*>* list = profiles_by_token_[index];
759 const int profiles_to_add = current_count - list->length();
760 if (profiles_to_add > 0) list->AddBlock(
NULL, profiles_to_add);
769 return unabridged_list;
772 const int current_count = unabridged_list->length();
773 for (
int i = 0; i < current_count; ++i) {
774 if (list->
at(i) ==
NULL) {
775 (*list)[i] = unabridged_list->
at(i)->FilteredClone(security_token_id);
788 GetFunctionName(name),
792 code_entries_.Add(entry);
801 GetFunctionName(name),
805 code_entries_.Add(entry);
811 const char* name_prefix,
819 code_entries_.Add(entry);
832 code_entries_.Add(entry);
842 current_profiles_semaphore_->
Wait();
843 for (
int i = 0; i < current_profiles_.length(); ++i) {
844 current_profiles_[i]->AddPath(path);
846 current_profiles_semaphore_->
Signal();
851 if (--wall_time_query_countdown_ == 0)
857 if (measurements_count_++ != 0) {
858 const double measured_ticks_per_ms =
860 (current_time - last_wall_time_);
863 (measured_ticks_per_ms - ticks_per_ms_) / measurements_count_;
865 result_ =
static_cast<AtomicWord>(ticks_per_ms_ * kResultScale);
867 last_wall_time_ = current_time;
868 wall_time_query_countdown_ =
874 "(anonymous function)";
878 "(garbage collector)";
882 : profiles_(profiles),
884 profiles->NewCodeEntry(
Logger::FUNCTION_TAG, kProgramEntryName)),
886 profiles->NewCodeEntry(
Logger::BUILTIN_TAG,
887 kGarbageCollectorEntryName)) {
897 memset(entry, 0, entries.
length() *
sizeof(*entry));
907 }
else if (sample.
tos !=
NULL) {
911 if (*entry !=
NULL && !(*entry)->is_js_function()) {
919 stack_pos != stack_end;
921 *entry++ = code_map_.
FindEntry(*stack_pos);
925 if (FLAG_prof_browser_mode) {
926 bool no_symbolized_entries =
true;
929 no_symbolized_entries =
false;
934 if (no_symbolized_entries) {
935 *entry++ = EntryForVMState(sample.
state);
943 HeapGraphEdge::HeapGraphEdge(Type type,
const char* name,
int from,
int to)
948 ASSERT(type == kContextVariable
951 || type == kShortcut);
955 HeapGraphEdge::HeapGraphEdge(Type type,
int index,
int from,
int to)
960 ASSERT(type == kElement || type == kHidden || type == kWeak);
964 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot*
snapshot) {
965 to_entry_ = &snapshot->entries()[to_index_];
969 const int HeapEntry::kNoEntry = -1;
971 HeapEntry::HeapEntry(HeapSnapshot* snapshot,
979 self_size_(self_size),
988 HeapGraphEdge edge(type, name, this->index(), entry->index());
989 snapshot_->edges().Add(edge);
997 HeapGraphEdge edge(type, index, this->index(), entry->index());
998 snapshot_->edges().Add(edge);
1003 Handle<HeapObject> HeapEntry::GetHeapObject() {
1004 return snapshot_->collection()->FindHeapObjectById(
id());
1009 const char* prefix,
const char* edge_name,
int max_depth,
int indent) {
1012 self_size(),
id(), indent,
' ', prefix, edge_name);
1013 if (type() != kString) {
1014 OS::Print(
"%s %.40s\n", TypeAsString(), name_);
1017 const char* c = name_;
1018 while (*c && (c - name_) <= 40) {
1027 if (--max_depth == 0)
return;
1028 Vector<HeapGraphEdge*> ch = children();
1029 for (
int i = 0; i < ch.length(); ++i) {
1030 HeapGraphEdge& edge = *ch[i];
1031 const char* edge_prefix =
"";
1032 EmbeddedVector<char, 64> index;
1033 const char* edge_name = index.start();
1034 switch (edge.type()) {
1037 edge_name = edge.name();
1044 edge_name = edge.name();
1047 edge_name = edge.name();
1055 edge_name = edge.name();
1062 OS::SNPrintF(index,
"!!! unknown edge type: %d ", edge.type());
1064 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
1069 const char* HeapEntry::TypeAsString() {
1071 case kHidden:
return "/hidden/";
1072 case kObject:
return "/object/";
1073 case kClosure:
return "/closure/";
1074 case kString:
return "/string/";
1075 case kCode:
return "/code/";
1076 case kArray:
return "/array/";
1077 case kRegExp:
return "/regexp/";
1078 case kHeapNumber:
return "/number/";
1079 case kNative:
return "/native/";
1080 case kSynthetic:
return "/synthetic/";
1081 default:
return "???";
1090 template <
size_t ptr_size>
struct SnapshotSizeConstants;
1092 template <>
struct SnapshotSizeConstants<4> {
1093 static const int kExpectedHeapGraphEdgeSize = 12;
1094 static const int kExpectedHeapEntrySize = 24;
1095 static const int kExpectedHeapSnapshotsCollectionSize = 96;
1096 static const int kExpectedHeapSnapshotSize = 136;
1097 static const size_t kMaxSerializableSnapshotRawSize = 256 *
MB;
1100 template <>
struct SnapshotSizeConstants<8> {
1101 static const int kExpectedHeapGraphEdgeSize = 24;
1102 static const int kExpectedHeapEntrySize = 32;
1103 static const int kExpectedHeapSnapshotsCollectionSize = 144;
1104 static const int kExpectedHeapSnapshotSize = 168;
1105 static const uint64_t kMaxSerializableSnapshotRawSize =
1106 static_cast<uint64_t
>(6000) *
MB;
1115 : collection_(collection),
1119 root_index_(HeapEntry::kNoEntry),
1120 gc_roots_index_(HeapEntry::kNoEntry),
1121 natives_root_index_(HeapEntry::kNoEntry),
1122 max_snapshot_js_object_id_(0) {
1125 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
1127 sizeof(HeapEntry) ==
1128 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
1130 gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
1147 ASSERT(root_index_ == HeapEntry::kNoEntry);
1148 ASSERT(entries_.is_empty());
1149 HeapEntry* entry =
AddEntry(HeapEntry::kObject,
1153 root_index_ = entry->index();
1154 ASSERT(root_index_ == 0);
1160 ASSERT(gc_roots_index_ == HeapEntry::kNoEntry);
1161 HeapEntry* entry =
AddEntry(HeapEntry::kObject,
1165 gc_roots_index_ = entry->index();
1171 ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
1178 gc_subroot_indexes_[tag] = entry->index();
1187 HeapEntry entry(
this, type, name,
id, size);
1188 entries_.
Add(entry);
1189 return &entries_.
last();
1196 int children_index = 0;
1197 for (
int i = 0; i <
entries().length(); ++i) {
1198 HeapEntry* entry = &
entries()[i];
1199 children_index = entry->set_children_index(children_index);
1202 for (
int i = 0; i <
edges().length(); ++i) {
1204 edge->ReplaceToIndexWithEntry(
this);
1205 edge->from()->add_child(edge);
1214 if ((*entry)->id() == id_)
return 0;
1215 return (*entry)->id() < id_ ? -1 : 1;
1228 return entries_by_id->
at(index);
1233 static int SortByIds(
const T* entry1_ptr,
1234 const T* entry2_ptr) {
1235 if ((*entry1_ptr)->id() == (*entry2_ptr)->id())
return 0;
1236 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
1241 if (sorted_entries_.is_empty()) {
1242 sorted_entries_.Allocate(entries_.length());
1243 for (
int i = 0; i < entries_.length(); ++i) {
1244 sorted_entries_[i] = &entries_[i];
1246 sorted_entries_.
Sort(SortByIds);
1248 return &sorted_entries_;
1253 root()->Print(
"",
"", max_depth, 0);
1257 template<
typename T,
class P>
1258 static size_t GetMemoryUsedByList(
const List<T, P>& list) {
1259 return list.length() *
sizeof(
T) +
sizeof(list);
1264 STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::kExpectedHeapSnapshotSize ==
1268 GetMemoryUsedByList(entries_) +
1269 GetMemoryUsedByList(edges_) +
1270 GetMemoryUsedByList(children_) +
1271 GetMemoryUsedByList(sorted_entries_);
1287 : next_id_(kFirstAvailableObjectId),
1288 entries_map_(AddressesMatch) {
1296 entries_.
Add(EntryInfo(0,
NULL, 0));
1301 RemoveDeadEntries();
1308 if (from == to)
return;
1309 void* from_value = entries_map_.
Remove(from, AddressHash(from));
1310 if (from_value ==
NULL)
return;
1311 int from_entry_info_index =
1312 static_cast<int>(
reinterpret_cast<intptr_t
>(from_value));
1313 entries_.
at(from_entry_info_index).addr = to;
1314 HashMap::Entry* to_entry = entries_map_.
Lookup(to, AddressHash(to),
true);
1315 if (to_entry->value !=
NULL) {
1316 int to_entry_info_index =
1317 static_cast<int>(
reinterpret_cast<intptr_t
>(to_entry->value));
1322 entries_.
at(to_entry_info_index).addr =
NULL;
1324 to_entry->value =
reinterpret_cast<void*
>(from_entry_info_index);
1329 HashMap::Entry* entry = entries_map_.
Lookup(addr, AddressHash(addr),
false);
1330 if (entry ==
NULL)
return 0;
1331 int entry_index =
static_cast<int>(
reinterpret_cast<intptr_t
>(entry->value));
1332 EntryInfo& entry_info = entries_.
at(entry_index);
1333 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.
occupancy());
1334 return entry_info.id;
1339 unsigned int size) {
1340 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.
occupancy());
1341 HashMap::Entry* entry = entries_map_.
Lookup(addr, AddressHash(addr),
true);
1342 if (entry->value !=
NULL) {
1344 static_cast<int>(
reinterpret_cast<intptr_t
>(entry->value));
1345 EntryInfo& entry_info = entries_.
at(entry_index);
1346 entry_info.accessed =
true;
1347 entry_info.size = size;
1348 return entry_info.id;
1350 entry->value =
reinterpret_cast<void*
>(entries_.length());
1353 entries_.
Add(EntryInfo(
id, addr, size));
1354 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.
occupancy());
1360 time_intervals_.Clear();
1363 void HeapObjectsMap::UpdateHeapObjectsMap() {
1365 "HeapSnapshotsCollection::UpdateHeapObjectsMap");
1366 HeapIterator iterator;
1369 obj = iterator.next()) {
1372 RemoveDeadEntries();
1377 UpdateHeapObjectsMap();
1378 time_intervals_.
Add(TimeInterval(next_id_));
1381 ASSERT(!entries_.is_empty());
1382 EntryInfo* entry_info = &entries_.
first();
1383 EntryInfo* end_entry_info = &entries_.
last() + 1;
1384 for (
int time_interval_index = 0;
1385 time_interval_index < time_intervals_.length();
1386 ++time_interval_index) {
1387 TimeInterval& time_interval = time_intervals_[time_interval_index];
1389 uint32_t entries_size = 0;
1390 EntryInfo* start_entry_info = entry_info;
1391 while (entry_info < end_entry_info && entry_info->
id < time_interval_id) {
1392 entries_size += entry_info->size;
1395 uint32_t entries_count =
1396 static_cast<uint32_t
>(entry_info - start_entry_info);
1397 if (time_interval.count != entries_count ||
1398 time_interval.size != entries_size) {
1400 time_interval_index,
1401 time_interval.count = entries_count,
1402 time_interval.size = entries_size));
1403 if (stats_buffer.length() >= prefered_chunk_size) {
1405 &stats_buffer.
first(), stats_buffer.length());
1407 stats_buffer.Clear();
1411 ASSERT(entry_info == end_entry_info);
1412 if (!stats_buffer.is_empty()) {
1414 &stats_buffer.
first(), stats_buffer.length());
1422 void HeapObjectsMap::RemoveDeadEntries() {
1423 ASSERT(entries_.length() > 0 &&
1424 entries_.
at(0).id == 0 &&
1425 entries_.
at(0).addr ==
NULL);
1426 int first_free_entry = 1;
1427 for (
int i = 1; i < entries_.length(); ++i) {
1428 EntryInfo& entry_info = entries_.
at(i);
1429 if (entry_info.accessed) {
1430 if (first_free_entry != i) {
1431 entries_.
at(first_free_entry) = entry_info;
1433 entries_.
at(first_free_entry).accessed =
false;
1434 HashMap::Entry* entry = entries_map_.
Lookup(
1435 entry_info.addr, AddressHash(entry_info.addr),
false);
1437 entry->value =
reinterpret_cast<void*
>(first_free_entry);
1440 if (entry_info.addr) {
1441 entries_map_.
Remove(entry_info.addr, AddressHash(entry_info.addr));
1445 entries_.Rewind(first_free_entry);
1446 ASSERT(static_cast<uint32_t>(entries_.length()) - 1 ==
1453 const char* label = info->
GetLabel();
1455 static_cast<int>(strlen(label)),
1458 if (element_count != -1)
1460 v8::internal::kZeroHashSeed);
1468 sizeof(HashMap::Entry) * entries_map_.
capacity() +
1469 GetMemoryUsedByList(entries_) +
1470 GetMemoryUsedByList(time_intervals_);
1475 : is_tracking_objects_(
false),
1476 snapshots_uids_(HeapSnapshotsMatch),
1481 static void DeleteHeapSnapshot(
HeapSnapshot** snapshot_ptr) {
1482 delete *snapshot_ptr;
1487 delete token_enumerator_;
1488 snapshots_.Iterate(DeleteHeapSnapshot);
1495 is_tracking_objects_ =
true;
1503 if (snapshot !=
NULL) {
1504 snapshots_.Add(snapshot);
1505 HashMap::Entry* entry =
1506 snapshots_uids_.
Lookup(reinterpret_cast<void*>(snapshot->
uid()),
1507 static_cast<uint32_t>(snapshot->
uid()),
1516 HashMap::Entry* entry = snapshots_uids_.
Lookup(reinterpret_cast<void*>(uid),
1517 static_cast<uint32_t>(uid),
1524 snapshots_.RemoveElement(snapshot);
1525 unsigned uid = snapshot->
uid();
1526 snapshots_uids_.
Remove(reinterpret_cast<void*>(uid),
1527 static_cast<uint32_t>(uid));
1535 "HeapSnapshotsCollection::FindHeapObjectById");
1538 HeapIterator iterator(HeapIterator::kFilterUnreachable);
1542 obj = iterator.next()) {
1543 if (ids_.
FindEntry(obj->address()) ==
id) {
1555 kExpectedHeapSnapshotsCollectionSize ==
1557 size_t size =
sizeof(*this);
1560 size +=
sizeof(HashMap::Entry) * snapshots_uids_.
capacity();
1561 size += GetMemoryUsedByList(snapshots_);
1562 for (
int i = 0; i < snapshots_.length(); ++i) {
1563 size += snapshots_[i]->RawSnapshotSize();
1570 : entries_(HeapThingsMatch) {
1575 HashMap::Entry* cache_entry = entries_.
Lookup(thing, Hash(thing),
false);
1576 if (cache_entry ==
NULL)
return HeapEntry::kNoEntry;
1577 return static_cast<int>(
reinterpret_cast<intptr_t
>(cache_entry->value));
1582 HashMap::Entry* cache_entry = entries_.
Lookup(thing, Hash(thing),
true);
1584 cache_entry->value =
reinterpret_cast<void*
>(
static_cast<intptr_t
>(entry));
1599 if (!obj->IsHeapObject())
return false;
1601 return entries_.
Lookup(
object, HeapEntriesMap::Hash(
object),
false) !=
NULL;
1606 if (!obj->IsHeapObject())
return;
1608 entries_.
Lookup(
object, HeapEntriesMap::Hash(
object),
true);
1614 HashMap::Entry* cache_entry =
1615 entries_.
Lookup(
object, HeapEntriesMap::Hash(
object),
false);
1616 return cache_entry !=
NULL
1617 ?
reinterpret_cast<const char*
>(cache_entry->value)
1623 if (!obj->IsHeapObject())
return;
1625 HashMap::Entry* cache_entry =
1626 entries_.
Lookup(
object, HeapEntriesMap::Hash(
object),
true);
1627 cache_entry->value =
const_cast<char*
>(tag);
1634 HeapObject*
const V8HeapExplorer::kGcRootsObject =
1637 HeapObject*
const V8HeapExplorer::kFirstGcSubrootObject =
1640 HeapObject*
const V8HeapExplorer::kLastGcSubrootObject =
1648 : heap_(
Isolate::Current()->heap()),
1649 snapshot_(snapshot),
1650 collection_(snapshot_->collection()),
1651 progress_(progress),
1661 return AddEntry(reinterpret_cast<HeapObject*>(ptr));
1665 HeapEntry* V8HeapExplorer::AddEntry(
HeapObject*
object) {
1668 return snapshot_->
root();
1669 }
else if (
object == kGcRootsObject) {
1672 }
else if (
object >= kFirstGcSubrootObject &&
object < kLastGcSubrootObject) {
1675 }
else if (object->IsJSFunction()) {
1677 SharedFunctionInfo* shared = func->shared();
1678 const char* name = shared->bound() ?
"native_bind" :
1680 return AddEntry(
object, HeapEntry::kClosure, name);
1681 }
else if (object->IsJSRegExp()) {
1683 return AddEntry(
object,
1686 }
else if (object->IsJSObject()) {
1689 if (object->IsJSGlobalObject()) {
1690 const char* tag = objects_tags_.
GetTag(
object);
1695 return AddEntry(
object, HeapEntry::kObject, name);
1696 }
else if (object->IsString()) {
1697 return AddEntry(
object,
1700 }
else if (object->IsCode()) {
1701 return AddEntry(
object, HeapEntry::kCode,
"");
1702 }
else if (object->IsSharedFunctionInfo()) {
1704 return AddEntry(
object,
1707 }
else if (object->IsScript()) {
1709 return AddEntry(
object,
1714 }
else if (object->IsNativeContext()) {
1715 return AddEntry(
object, HeapEntry::kHidden,
"system / NativeContext");
1716 }
else if (object->IsContext()) {
1717 return AddEntry(
object, HeapEntry::kHidden,
"system / Context");
1718 }
else if (object->IsFixedArray() ||
1719 object->IsFixedDoubleArray() ||
1720 object->IsByteArray() ||
1721 object->IsExternalArray()) {
1722 return AddEntry(
object, HeapEntry::kArray,
"");
1723 }
else if (object->IsHeapNumber()) {
1724 return AddEntry(
object, HeapEntry::kHeapNumber,
"number");
1726 return AddEntry(
object, HeapEntry::kHidden, GetSystemEntryName(
object));
1730 HeapEntry* V8HeapExplorer::AddEntry(HeapObject*
object,
1731 HeapEntry::Type type,
1733 int object_size =
object->Size();
1735 collection_->
GetObjectId(object->address(), object_size);
1736 return snapshot_->
AddEntry(type, name, object_id, object_size);
1745 explorer_(explorer),
1746 previous_object_count_(0),
1750 object_count_ += end - start;
1754 if (previous_object_count_ != object_count_) {
1755 previous_object_count_ = object_count_;
1756 filler_->
AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_);
1762 intptr_t previous_object_count_;
1763 intptr_t object_count_;
1769 filler->
AddEntry(kGcRootsObject,
this);
1775 const char* V8HeapExplorer::GetSystemEntryName(
HeapObject*
object) {
1777 case MAP_TYPE:
return "system / Map";
1781 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1782 case NAME##_TYPE: return "system / "#Name;
1784 #undef MAKE_STRUCT_CASE
1785 default:
return "system";
1791 int objects_count = 0;
1794 obj = iterator->next()) {
1797 return objects_count;
1806 : generator_(generator),
1807 parent_obj_(parent_obj),
1812 for (
Object** p = start; p < end; p++) {
1813 if (CheckVisitedAndUnmark(p))
continue;
1814 generator_->SetHiddenReference(parent_obj_, parent_, next_index_++, *p);
1818 if (offset < 0)
return;
1826 bool CheckVisitedAndUnmark(
Object** field) {
1827 if ((*field)->IsFailure()) {
1828 intptr_t untagged =
reinterpret_cast<intptr_t
>(*field) & ~
kFailureTagMask;
1830 ASSERT((*field)->IsHeapObject());
1835 V8HeapExplorer* generator_;
1836 HeapObject* parent_obj_;
1842 void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
1843 HeapEntry* heap_entry = GetEntry(obj);
1844 if (heap_entry ==
NULL)
return;
1845 int entry = heap_entry->index();
1847 bool extract_indexed_refs =
true;
1848 if (obj->IsJSGlobalProxy()) {
1850 }
else if (obj->IsJSObject()) {
1852 }
else if (obj->IsString()) {
1854 extract_indexed_refs =
false;
1855 }
else if (obj->IsContext()) {
1857 }
else if (obj->IsMap()) {
1858 ExtractMapReferences(entry,
Map::cast(obj));
1859 }
else if (obj->IsSharedFunctionInfo()) {
1861 }
else if (obj->IsScript()) {
1863 }
else if (obj->IsCodeCache()) {
1865 }
else if (obj->IsCode()) {
1866 ExtractCodeReferences(entry,
Code::cast(obj));
1867 }
else if (obj->IsJSGlobalPropertyCell()) {
1868 ExtractJSGlobalPropertyCellReferences(
1870 extract_indexed_refs =
false;
1872 if (extract_indexed_refs) {
1875 obj->Iterate(&refs_extractor);
1880 void V8HeapExplorer::ExtractJSGlobalProxyReferences(JSGlobalProxy* proxy) {
1884 Object*
object = proxy->map()->prototype();
1885 bool is_debug_object =
false;
1886 #ifdef ENABLE_DEBUGGER_SUPPORT
1887 is_debug_object =
object->IsGlobalObject() &&
1890 if (!is_debug_object) {
1891 SetUserGlobalReference(
object);
1896 void V8HeapExplorer::ExtractJSObjectReferences(
1897 int entry, JSObject* js_obj) {
1898 HeapObject* obj = js_obj;
1899 ExtractClosureReferences(js_obj, entry);
1900 ExtractPropertyReferences(js_obj, entry);
1901 ExtractElementReferences(js_obj, entry);
1902 ExtractInternalReferences(js_obj, entry);
1903 SetPropertyReference(
1904 obj, entry, heap_->Proto_symbol(), js_obj->GetPrototype());
1905 if (obj->IsJSFunction()) {
1907 Object* proto_or_map = js_fun->prototype_or_initial_map();
1908 if (!proto_or_map->IsTheHole()) {
1909 if (!proto_or_map->IsMap()) {
1910 SetPropertyReference(
1912 heap_->prototype_symbol(), proto_or_map,
1916 SetPropertyReference(
1918 heap_->prototype_symbol(), js_fun->prototype());
1921 SharedFunctionInfo* shared_info = js_fun->shared();
1923 bool bound = shared_info->bound();
1924 TagObject(js_fun->literals_or_bindings(),
1925 bound ?
"(function bindings)" :
"(function literals)");
1926 SetInternalReference(js_fun, entry,
1927 bound ?
"bindings" :
"literals",
1928 js_fun->literals_or_bindings(),
1930 TagObject(shared_info,
"(shared function info)");
1931 SetInternalReference(js_fun, entry,
1932 "shared", shared_info,
1934 TagObject(js_fun->unchecked_context(),
"(context)");
1935 SetInternalReference(js_fun, entry,
1936 "context", js_fun->unchecked_context(),
1943 }
else if (obj->IsGlobalObject()) {
1945 SetInternalReference(global_obj, entry,
1946 "builtins", global_obj->builtins(),
1948 SetInternalReference(global_obj, entry,
1949 "native_context", global_obj->native_context(),
1951 SetInternalReference(global_obj, entry,
1952 "global_receiver", global_obj->global_receiver(),
1955 TagObject(js_obj->properties(),
"(object properties)");
1956 SetInternalReference(obj, entry,
1957 "properties", js_obj->properties(),
1959 TagObject(js_obj->elements(),
"(object elements)");
1960 SetInternalReference(obj, entry,
1961 "elements", js_obj->elements(),
1966 void V8HeapExplorer::ExtractStringReferences(
int entry, String*
string) {
1967 if (string->IsConsString()) {
1969 SetInternalReference(cs, entry,
"first", cs->first());
1970 SetInternalReference(cs, entry,
"second", cs->second());
1971 }
else if (string->IsSlicedString()) {
1973 SetInternalReference(ss, entry,
"parent", ss->parent());
1978 void V8HeapExplorer::ExtractContextReferences(
int entry, Context* context) {
1979 #define EXTRACT_CONTEXT_FIELD(index, type, name) \
1980 SetInternalReference(context, entry, #name, context->get(Context::index), \
1981 FixedArray::OffsetOfElementAt(Context::index));
1986 if (context->IsNativeContext()) {
1987 TagObject(context->jsfunction_result_caches(),
1988 "(context func. result caches)");
1989 TagObject(context->normalized_map_cache(),
"(context norm. map cache)");
1990 TagObject(context->runtime_context(),
"(runtime context)");
1991 TagObject(context->data(),
"(context data)");
1993 #undef EXTRACT_CONTEXT_FIELD
1997 SetWeakReference(context, entry, i, context->get(i),
2004 void V8HeapExplorer::ExtractMapReferences(
int entry, Map* map) {
2005 SetInternalReference(map, entry,
2007 SetInternalReference(map, entry,
2008 "constructor", map->constructor(),
2010 if (map->HasTransitionArray()) {
2011 TransitionArray* transitions = map->transitions();
2013 Object* back_pointer = transitions->back_pointer_storage();
2014 TagObject(transitions->back_pointer_storage(),
"(back pointer)");
2015 SetInternalReference(transitions, entry,
2016 "backpointer", back_pointer,
2019 transitions->Iterate(&transitions_refs);
2021 TagObject(transitions,
"(transition array)");
2022 SetInternalReference(map, entry,
2023 "transitions", transitions,
2026 Object* back_pointer = map->GetBackPointer();
2027 TagObject(back_pointer,
"(back pointer)");
2028 SetInternalReference(map, entry,
2029 "backpointer", back_pointer,
2032 DescriptorArray* descriptors = map->instance_descriptors();
2033 TagObject(descriptors,
"(map descriptors)");
2034 SetInternalReference(map, entry,
2035 "descriptors", descriptors,
2038 SetInternalReference(map, entry,
2039 "code_cache", map->code_cache(),
2044 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
2045 int entry, SharedFunctionInfo* shared) {
2046 HeapObject* obj = shared;
2047 SetInternalReference(obj, entry,
2048 "name", shared->name(),
2050 TagObject(shared->code(),
"(code)");
2051 SetInternalReference(obj, entry,
2052 "code", shared->code(),
2054 TagObject(shared->scope_info(),
"(function scope info)");
2055 SetInternalReference(obj, entry,
2056 "scope_info", shared->scope_info(),
2058 SetInternalReference(obj, entry,
2059 "instance_class_name", shared->instance_class_name(),
2061 SetInternalReference(obj, entry,
2062 "script", shared->script(),
2064 TagObject(shared->construct_stub(),
"(code)");
2065 SetInternalReference(obj, entry,
2066 "construct_stub", shared->construct_stub(),
2068 SetInternalReference(obj, entry,
2069 "function_data", shared->function_data(),
2071 SetInternalReference(obj, entry,
2072 "debug_info", shared->debug_info(),
2074 SetInternalReference(obj, entry,
2075 "inferred_name", shared->inferred_name(),
2077 SetInternalReference(obj, entry,
2078 "this_property_assignments",
2079 shared->this_property_assignments(),
2081 SetWeakReference(obj, entry,
2082 1, shared->initial_map(),
2087 void V8HeapExplorer::ExtractScriptReferences(
int entry, Script* script) {
2088 HeapObject* obj = script;
2089 SetInternalReference(obj, entry,
2090 "source", script->source(),
2092 SetInternalReference(obj, entry,
2093 "name", script->name(),
2095 SetInternalReference(obj, entry,
2096 "data", script->data(),
2098 SetInternalReference(obj, entry,
2099 "context_data", script->context_data(),
2101 TagObject(script->line_ends(),
"(script line ends)");
2102 SetInternalReference(obj, entry,
2103 "line_ends", script->line_ends(),
2108 void V8HeapExplorer::ExtractCodeCacheReferences(
2109 int entry, CodeCache* code_cache) {
2110 TagObject(code_cache->default_cache(),
"(default code cache)");
2111 SetInternalReference(code_cache, entry,
2112 "default_cache", code_cache->default_cache(),
2114 TagObject(code_cache->normal_type_cache(),
"(code type cache)");
2115 SetInternalReference(code_cache, entry,
2116 "type_cache", code_cache->normal_type_cache(),
2121 void V8HeapExplorer::ExtractCodeReferences(
int entry, Code*
code) {
2122 TagObject(code->relocation_info(),
"(code relocation info)");
2123 SetInternalReference(code, entry,
2124 "relocation_info", code->relocation_info(),
2126 SetInternalReference(code, entry,
2127 "handler_table", code->handler_table(),
2129 TagObject(code->deoptimization_data(),
"(code deopt data)");
2130 SetInternalReference(code, entry,
2131 "deoptimization_data", code->deoptimization_data(),
2133 SetInternalReference(code, entry,
2134 "type_feedback_info", code->type_feedback_info(),
2136 SetInternalReference(code, entry,
2137 "gc_metadata", code->gc_metadata(),
2142 void V8HeapExplorer::ExtractJSGlobalPropertyCellReferences(
2143 int entry, JSGlobalPropertyCell* cell) {
2144 SetInternalReference(cell, entry,
"value", cell->value());
2148 void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj,
int entry) {
2149 if (!js_obj->IsJSFunction())
return;
2152 if (func->shared()->bound()) {
2153 FixedArray* bindings = func->function_bindings();
2154 SetNativeBindReference(js_obj, entry,
"bound_this",
2156 SetNativeBindReference(js_obj, entry,
"bound_function",
2159 i < bindings->length(); i++) {
2161 "bound_argument_%d",
2163 SetNativeBindReference(js_obj, entry, reference_name,
2167 Context* context = func->context()->declaration_context();
2168 ScopeInfo* scope_info = context->closure()->shared()->scope_info();
2170 int context_locals = scope_info->ContextLocalCount();
2171 for (
int i = 0; i < context_locals; ++i) {
2172 String* local_name = scope_info->ContextLocalName(i);
2174 SetClosureReference(js_obj, entry, local_name, context->get(idx));
2178 if (scope_info->HasFunctionName()) {
2179 String* name = scope_info->FunctionName();
2181 int idx = scope_info->FunctionContextSlotIndex(name, &mode);
2183 SetClosureReference(js_obj, entry, name, context->get(idx));
2190 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj,
int entry) {
2191 if (js_obj->HasFastProperties()) {
2192 DescriptorArray* descs = js_obj->map()->instance_descriptors();
2193 int real_size = js_obj->map()->NumberOfOwnDescriptors();
2194 for (
int i = 0; i < descs->number_of_descriptors(); i++) {
2195 if (descs->GetDetails(i).descriptor_index() > real_size)
continue;
2196 switch (descs->GetType(i)) {
2198 int index = descs->GetFieldIndex(i);
2200 String* k = descs->GetKey(i);
2201 if (index < js_obj->map()->inobject_properties()) {
2202 Object* value = js_obj->InObjectPropertyAt(index);
2204 SetPropertyReference(
2208 js_obj->GetInObjectPropertyOffset(index));
2210 TagObject(value,
"(hidden properties)");
2211 SetInternalReference(
2213 "hidden_properties", value,
2214 js_obj->GetInObjectPropertyOffset(index));
2217 Object* value = js_obj->FastPropertyAt(index);
2219 SetPropertyReference(js_obj, entry, k, value);
2221 TagObject(value,
"(hidden properties)");
2222 SetInternalReference(js_obj, entry,
"hidden_properties", value);
2228 SetPropertyReference(
2230 descs->GetKey(i), descs->GetConstantFunction(i));
2233 Object* callback_obj = descs->GetValue(i);
2234 if (callback_obj->IsAccessorPair()) {
2236 if (
Object* getter = accessors->getter()) {
2237 SetPropertyReference(js_obj, entry, descs->GetKey(i),
2240 if (
Object* setter = accessors->setter()) {
2241 SetPropertyReference(js_obj, entry, descs->GetKey(i),
2258 StringDictionary* dictionary = js_obj->property_dictionary();
2259 int length = dictionary->Capacity();
2260 for (
int i = 0; i < length; ++i) {
2261 Object* k = dictionary->KeyAt(i);
2262 if (dictionary->IsKey(k)) {
2263 Object* target = dictionary->ValueAt(i);
2265 Object* value = target->IsJSGlobalPropertyCell()
2269 SetPropertyReference(js_obj, entry,
String::cast(k), value);
2271 TagObject(value,
"(hidden properties)");
2272 SetInternalReference(js_obj, entry,
"hidden_properties", value);
2280 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj,
int entry) {
2281 if (js_obj->HasFastObjectElements()) {
2283 int length = js_obj->IsJSArray() ?
2286 for (
int i = 0; i < length; ++i) {
2287 if (!elements->get(i)->IsTheHole()) {
2288 SetElementReference(js_obj, entry, i, elements->get(i));
2291 }
else if (js_obj->HasDictionaryElements()) {
2292 SeededNumberDictionary* dictionary = js_obj->element_dictionary();
2293 int length = dictionary->Capacity();
2294 for (
int i = 0; i < length; ++i) {
2295 Object* k = dictionary->KeyAt(i);
2296 if (dictionary->IsKey(k)) {
2298 uint32_t index =
static_cast<uint32_t
>(k->Number());
2299 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
2306 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj,
int entry) {
2307 int length = js_obj->GetInternalFieldCount();
2308 for (
int i = 0; i < length; ++i) {
2309 Object* o = js_obj->GetInternalField(i);
2310 SetInternalReference(
2311 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
2317 Heap* heap =
object->GetHeap();
2318 if (object->IsJSFunction())
return heap->closure_symbol();
2319 String* constructor_name =
object->constructor_name();
2320 if (constructor_name == heap->Object_symbol()) {
2325 LookupResult result(heap->
isolate());
2326 object->LocalLookupRealNamedProperty(heap->constructor_symbol(), &result);
2329 constructor_prop = result.GetLazyValue();
2330 if (constructor_prop->IsJSFunction()) {
2333 if (maybe_name->IsString()) {
2335 if (name->
length() > 0)
return name;
2339 return object->constructor_name();
2343 HeapEntry* V8HeapExplorer::GetEntry(
Object* obj) {
2344 if (!obj->IsHeapObject())
return NULL;
2353 : index(index), tag(tag) { }
2360 : collecting_all_references_(
false),
2361 previous_reference_count_(0) {
2365 if (collecting_all_references_) {
2366 for (
Object** p = start; p < end; p++) all_references_.Add(*p);
2368 for (
Object** p = start; p < end; p++) strong_references_.Add(*p);
2375 ASSERT(strong_references_.length() <= all_references_.length());
2376 for (
int i = 0; i < reference_tags_.length(); ++i) {
2377 explorer->SetGcRootsReference(reference_tags_[i].tag);
2379 int strong_index = 0, all_index = 0, tags_index = 0;
2380 while (all_index < all_references_.length()) {
2381 if (strong_index < strong_references_.length() &&
2382 strong_references_[strong_index] == all_references_[all_index]) {
2383 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
2385 all_references_[all_index++]);
2388 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
2390 all_references_[all_index++]);
2392 if (reference_tags_[tags_index].index == all_index) ++tags_index;
2397 if (collecting_all_references_ &&
2398 previous_reference_count_ != all_references_.length()) {
2399 previous_reference_count_ = all_references_.length();
2400 reference_tags_.
Add(IndexTag(previous_reference_count_, tag));
2405 bool collecting_all_references_;
2408 int previous_reference_count_;
2415 HeapIterator iterator(HeapIterator::kFilterUnreachable);
2418 bool interrupted =
false;
2425 ExtractReferences(obj);
2434 SetRootGcRootsReference();
2445 bool V8HeapExplorer::IsEssentialObject(
Object*
object) {
2448 return object->IsHeapObject()
2449 && !
object->IsOddball()
2450 &&
object != heap_->raw_unchecked_empty_byte_array()
2451 &&
object != heap_->raw_unchecked_empty_fixed_array()
2452 &&
object != heap_->raw_unchecked_empty_descriptor_array()
2453 &&
object != heap_->raw_unchecked_fixed_array_map()
2454 &&
object != heap_->raw_unchecked_global_property_cell_map()
2455 &&
object != heap_->raw_unchecked_shared_function_info_map()
2456 &&
object != heap_->raw_unchecked_free_space_map()
2457 &&
object != heap_->raw_unchecked_one_pointer_filler_map()
2458 &&
object != heap_->raw_unchecked_two_pointer_filler_map();
2462 void V8HeapExplorer::SetClosureReference(HeapObject* parent_obj,
2466 HeapEntry* child_entry = GetEntry(child_obj);
2467 if (child_entry !=
NULL) {
2476 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
2478 const char* reference_name,
2480 HeapEntry* child_entry = GetEntry(child_obj);
2481 if (child_entry !=
NULL) {
2490 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
2494 HeapEntry* child_entry = GetEntry(child_obj);
2495 if (child_entry !=
NULL) {
2504 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
2506 const char* reference_name,
2509 HeapEntry* child_entry = GetEntry(child_obj);
2510 if (child_entry ==
NULL)
return;
2511 if (IsEssentialObject(child_obj)) {
2521 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
2526 HeapEntry* child_entry = GetEntry(child_obj);
2527 if (child_entry ==
NULL)
return;
2528 if (IsEssentialObject(child_obj)) {
2538 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
2542 HeapEntry* child_entry = GetEntry(child_obj);
2543 if (child_entry !=
NULL && IsEssentialObject(child_obj)) {
2552 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
2557 HeapEntry* child_entry = GetEntry(child_obj);
2558 if (child_entry !=
NULL) {
2568 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
2570 String* reference_name,
2572 const char* name_format_string,
2574 HeapEntry* child_entry = GetEntry(child_obj);
2575 if (child_entry !=
NULL) {
2578 const char* name = name_format_string !=
NULL ?
2594 void V8HeapExplorer::SetRootGcRootsReference() {
2597 snapshot_->
root()->index(),
2602 void V8HeapExplorer::SetUserGlobalReference(
Object* child_obj) {
2603 HeapEntry* child_entry = GetEntry(child_obj);
2607 snapshot_->
root()->index(),
2620 void V8HeapExplorer::SetGcSubrootReference(
2622 HeapEntry* child_entry = GetEntry(child_obj);
2623 if (child_entry !=
NULL) {
2624 const char* name = GetStrongGcSubrootName(child_obj);
2641 const char* V8HeapExplorer::GetStrongGcSubrootName(
Object*
object) {
2642 if (strong_gc_subroot_names_.
is_empty()) {
2643 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2644 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2647 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2649 #undef STRUCT_MAP_NAME
2650 #define SYMBOL_NAME(name, str) NAME_ENTRY(name)
2656 return strong_gc_subroot_names_.
GetTag(
object);
2660 void V8HeapExplorer::TagObject(
Object* obj,
const char* tag) {
2661 if (IsEssentialObject(obj)) {
2662 HeapEntry* entry = GetEntry(obj);
2663 if (entry->name()[0] ==
'\0') {
2664 entry->set_name(tag);
2673 for (
Object** p = start; p < end; p++) {
2674 if ((*p)->IsNativeContext()) {
2677 if (proxy->IsJSGlobalProxy()) {
2678 Object* global = proxy->
map()->prototype();
2679 if (global->IsJSGlobalObject()) {
2686 int count() {
return objects_.length(); }
2697 Isolate* isolate = Isolate::Current();
2704 const char** urls = NewArray<const char*>(enumerator.
count());
2705 for (
int i = 0, l = enumerator.
count(); i < l; ++i) {
2710 if (global_obj->GetProperty(*document_string)->ToObject(&obj_document) &&
2711 obj_document->IsJSObject()) {
2714 if (!Isolate::Current()->context() && obj_document->IsJSGlobalProxy())
2718 if (document->
GetProperty(*url_string)->ToObject(&obj_url) &&
2719 obj_url->IsString()) {
2726 for (
int i = 0, l = enumerator.
count(); i < l; ++i) {
2727 objects_tags_.
SetTag(*enumerator.
at(i), urls[i]);
2737 : explorer_(explorer) {}
2743 explorer_->VisitSubtreeWrapper(p, class_id);
2754 HeapEntry::Type entries_type)
2755 : snapshot_(snapshot),
2756 collection_(snapshot_->collection()),
2757 entries_type_(entries_type) {
2763 HeapEntry::Type entries_type_;
2771 const char* name = elements != -1
2779 size != -1 ? static_cast<int>(size) : 0);
2785 : snapshot_(snapshot),
2786 collection_(snapshot_->collection()),
2787 progress_(progress),
2788 embedder_queried_(
false),
2789 objects_by_info_(RetainedInfosMatch),
2790 native_groups_(StringsMatch),
2792 synthetic_entries_allocator_ =
2794 native_entries_allocator_ =
2800 for (HashMap::Entry* p = objects_by_info_.
Start();
2802 p = objects_by_info_.
Next(p)) {
2810 for (HashMap::Entry* p = native_groups_.
Start();
2812 p = native_groups_.
Next(p)) {
2817 delete synthetic_entries_allocator_;
2818 delete native_entries_allocator_;
2823 FillRetainedObjects();
2828 void NativeObjectsExplorer::FillRetainedObjects() {
2829 if (embedder_queried_)
return;
2830 Isolate* isolate = Isolate::Current();
2834 for (
int i = 0; i < groups->length(); ++i) {
2838 for (
size_t j = 0; j < group->
length_; ++j) {
2850 embedder_queried_ =
true;
2853 void NativeObjectsExplorer::FillImplicitReferences() {
2854 Isolate* isolate = Isolate::Current();
2855 List<ImplicitRefGroup*>* groups =
2856 isolate->global_handles()->implicit_ref_groups();
2857 for (
int i = 0; i < groups->length(); ++i) {
2858 ImplicitRefGroup* group = groups->at(i);
2859 HeapObject* parent = *group->parent_;
2861 filler_->
FindOrAddEntry(parent, native_entries_allocator_)->index();
2862 ASSERT(parent_entry != HeapEntry::kNoEntry);
2863 Object*** children = group->children_;
2864 for (
size_t j = 0; j < group->length_; ++j) {
2865 Object* child = *children[j];
2866 HeapEntry* child_entry =
2877 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2879 HashMap::Entry* entry =
2880 objects_by_info_.
Lookup(info, InfoHash(info),
true);
2881 if (entry->value !=
NULL) {
2884 entry->value =
new List<HeapObject*>(4);
2886 return reinterpret_cast<List<HeapObject*>*
>(entry->value);
2893 FillRetainedObjects();
2894 FillImplicitReferences();
2896 for (HashMap::Entry* p = objects_by_info_.
Start();
2898 p = objects_by_info_.
Next(p)) {
2901 SetNativeRootReference(info);
2904 for (
int i = 0; i < objects->length(); ++i) {
2905 SetWrapperNativeReferences(objects->
at(i), info);
2908 SetRootNativeRootsReference();
2919 hash_(reinterpret_cast<intptr_t>(label)),
2930 return hash_ == other->
GetHash() && !strcmp(label_, other->
GetLabel());
2942 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2943 const char* label) {
2944 const char* label_copy = collection_->
names()->
GetCopy(label);
2946 static_cast<int>(strlen(label_copy)),
2948 HashMap::Entry* entry = native_groups_.
Lookup(const_cast<char*>(label_copy),
2950 if (entry->value ==
NULL) {
2951 entry->value =
new NativeGroupRetainedObjectInfo(label);
2953 return static_cast<NativeGroupRetainedObjectInfo*
>(entry->value);
2957 void NativeObjectsExplorer::SetNativeRootReference(
2959 HeapEntry* child_entry =
2962 NativeGroupRetainedObjectInfo* group_info =
2964 HeapEntry* group_entry =
2965 filler_->
FindOrAddEntry(group_info, synthetic_entries_allocator_);
2968 group_entry->index(),
2973 void NativeObjectsExplorer::SetWrapperNativeReferences(
2975 HeapEntry* wrapper_entry = filler_->
FindEntry(wrapper);
2977 HeapEntry* info_entry =
2981 wrapper_entry->index(),
2985 info_entry->index(),
2990 void NativeObjectsExplorer::SetRootNativeRootsReference() {
2991 for (HashMap::Entry* entry = native_groups_.
Start();
2993 entry = native_groups_.
Next(entry)) {
2994 NativeGroupRetainedObjectInfo* group_info =
2995 static_cast<NativeGroupRetainedObjectInfo*
>(entry->value);
2996 HeapEntry* group_entry =
3001 snapshot_->
root()->index(),
3007 void NativeObjectsExplorer::VisitSubtreeWrapper(
Object** p,
uint16_t class_id) {
3008 if (in_groups_.
Contains(*p))
return;
3009 Isolate* isolate = Isolate::Current();
3011 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
3012 if (info ==
NULL)
return;
3020 : snapshot_(snapshot),
3021 collection_(snapshot->collection()),
3022 entries_(entries) { }
3025 entries_->
Pair(ptr, entry->index());
3029 int index = entries_->
Map(ptr);
3030 return index != HeapEntry::kNoEntry ? &snapshot_->
entries()[index] :
NULL;
3034 return entry !=
NULL ? entry :
AddEntry(ptr, allocator);
3039 HeapEntry* child_entry) {
3040 HeapEntry* parent_entry = &snapshot_->
entries()[parent];
3041 parent_entry->SetIndexedReference(type, index, child_entry);
3045 HeapEntry* child_entry) {
3046 HeapEntry* parent_entry = &snapshot_->
entries()[parent];
3047 int index = parent_entry->children_count() + 1;
3048 parent_entry->SetIndexedReference(type, index, child_entry);
3052 const char* reference_name,
3053 HeapEntry* child_entry) {
3054 HeapEntry* parent_entry = &snapshot_->
entries()[parent];
3055 parent_entry->SetNamedReference(type, reference_name, child_entry);
3059 HeapEntry* child_entry) {
3060 HeapEntry* parent_entry = &snapshot_->
entries()[parent];
3061 int index = parent_entry->children_count() + 1;
3062 parent_entry->SetNamedReference(
3077 : snapshot_(snapshot),
3079 v8_heap_explorer_(snapshot_, this),
3080 dom_explorer_(snapshot_, this) {
3091 Isolate::Current()->heap()->CollectAllGarbage(
3093 "HeapSnapshotGenerator::GenerateSnapshot");
3094 Isolate::Current()->heap()->CollectAllGarbage(
3096 "HeapSnapshotGenerator::GenerateSnapshot");
3099 Heap* debug_heap = Isolate::Current()->heap();
3112 debug_heap->Verify();
3115 SetProgressTotal(1);
3118 debug_heap->Verify();
3121 if (!FillReferences())
return false;
3126 progress_counter_ = progress_total_;
3127 if (!ProgressReport(
true))
return false;
3132 void HeapSnapshotGenerator::ProgressStep() {
3133 ++progress_counter_;
3137 bool HeapSnapshotGenerator::ProgressReport(
bool force) {
3138 const int kProgressReportGranularity = 10000;
3139 if (control_ !=
NULL
3140 && (force || progress_counter_ % kProgressReportGranularity == 0)) {
3149 void HeapSnapshotGenerator::SetProgressTotal(
int iterations_count) {
3150 if (control_ ==
NULL)
return;
3151 HeapIterator iterator(HeapIterator::kFilterUnreachable);
3152 progress_total_ = iterations_count * (
3155 progress_counter_ = 0;
3159 bool HeapSnapshotGenerator::FillReferences() {
3160 SnapshotFiller filler(snapshot_, &entries_);
3169 static const int kSigned = 11;
3170 static const int kUnsigned = 10;
3173 static const int kSigned = 20;
3174 static const int kUnsigned = 20;
3182 chunk_size_(stream->GetChunkSize()),
3183 chunk_(chunk_size_),
3191 ASSERT(chunk_pos_ < chunk_size_);
3192 chunk_[chunk_pos_++] = c;
3200 ASSERT(static_cast<size_t>(n) <= strlen(s));
3201 const char* s_end = s + n;
3203 int s_chunk_size =
Min(
3204 chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
3205 ASSERT(s_chunk_size > 0);
3206 memcpy(chunk_.
start() + chunk_pos_, s, s_chunk_size);
3208 chunk_pos_ += s_chunk_size;
3212 void AddNumber(
unsigned n) { AddNumberImpl<unsigned>(n,
"%u"); }
3214 if (aborted_)
return;
3215 ASSERT(chunk_pos_ < chunk_size_);
3216 if (chunk_pos_ != 0) {
3223 template<
typename T>
3224 void AddNumberImpl(
T n,
const char* format) {
3226 static const int kMaxNumberSize =
3228 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
3230 chunk_.
SubVector(chunk_pos_, chunk_size_), format, n);
3232 chunk_pos_ += result;
3235 EmbeddedVector<char, kMaxNumberSize> buffer;
3242 void MaybeWriteChunk() {
3243 ASSERT(chunk_pos_ <= chunk_size_);
3244 if (chunk_pos_ == chunk_size_) {
3249 if (aborted_)
return;
3257 ScopedVector<char> chunk_;
3264 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
3266 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 5;
3274 SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize) {
3276 original_snapshot = snapshot_;
3277 snapshot_ = CreateFakeSnapshot();
3285 if (original_snapshot !=
NULL) {
3287 snapshot_ = original_snapshot;
3292 HeapSnapshot* HeapSnapshotJSONSerializer::CreateFakeSnapshot() {
3299 "The snapshot is too big. "
3302 SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize /
MB,
3304 HeapEntry*
message = result->
AddEntry(HeapEntry::kString, text, 0, 4);
3311 void HeapSnapshotJSONSerializer::SerializeImpl() {
3315 SerializeSnapshot();
3316 if (writer_->
aborted())
return;
3320 if (writer_->
aborted())
return;
3324 if (writer_->
aborted())
return;
3328 if (writer_->
aborted())
return;
3335 int HeapSnapshotJSONSerializer::GetStringId(
const char* s) {
3336 HashMap::Entry* cache_entry = strings_.
Lookup(
3337 const_cast<char*>(s), ObjectHash(s),
true);
3338 if (cache_entry->value ==
NULL) {
3339 cache_entry->value =
reinterpret_cast<void*
>(next_string_id_++);
3341 return static_cast<int>(
reinterpret_cast<intptr_t
>(cache_entry->value));
3345 static int utoa(
unsigned value,
const Vector<char>& buffer,
int buffer_pos) {
3346 int number_of_digits = 0;
3352 buffer_pos += number_of_digits;
3353 int result = buffer_pos;
3355 int last_digit = value % 10;
3356 buffer[--buffer_pos] =
'0' + last_digit;
3363 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
3366 static const int kBufferSize =
3367 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2;
3368 EmbeddedVector<char, kBufferSize> buffer;
3372 ? edge->index() : GetStringId(edge->name());
3375 buffer[buffer_pos++] =
',';
3377 buffer_pos = utoa(edge->type(), buffer, buffer_pos);
3378 buffer[buffer_pos++] =
',';
3379 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
3380 buffer[buffer_pos++] =
',';
3381 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
3382 buffer[buffer_pos++] =
'\n';
3383 buffer[buffer_pos++] =
'\0';
3388 void HeapSnapshotJSONSerializer::SerializeEdges() {
3389 List<HeapGraphEdge*>& edges = snapshot_->
children();
3390 for (
int i = 0; i < edges.length(); ++i) {
3392 edges[i - 1]->from()->index() <= edges[i]->from()->index());
3393 SerializeEdge(edges[i], i == 0);
3394 if (writer_->
aborted())
return;
3399 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
3401 static const int kBufferSize =
3402 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned
3404 EmbeddedVector<char, kBufferSize> buffer;
3406 if (entry_index(entry) != 0) {
3407 buffer[buffer_pos++] =
',';
3409 buffer_pos = utoa(entry->type(), buffer, buffer_pos);
3410 buffer[buffer_pos++] =
',';
3411 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
3412 buffer[buffer_pos++] =
',';
3413 buffer_pos = utoa(entry->id(), buffer, buffer_pos);
3414 buffer[buffer_pos++] =
',';
3415 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
3416 buffer[buffer_pos++] =
',';
3417 buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
3418 buffer[buffer_pos++] =
'\n';
3419 buffer[buffer_pos++] =
'\0';
3424 void HeapSnapshotJSONSerializer::SerializeNodes() {
3425 List<HeapEntry>& entries = snapshot_->
entries();
3426 for (
int i = 0; i < entries.length(); ++i) {
3427 SerializeNode(&entries[i]);
3428 if (writer_->
aborted())
return;
3433 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
3442 #define JSON_A(s) "[" s "]"
3443 #define JSON_O(s) "{" s "}"
3444 #define JSON_S(s) "\"" s "\""
3451 JSON_S(
"edge_count"))
","
3472 JSON_S(
"name_or_index")
","
3483 JSON_S(
"string_or_number")
","
3495 static void WriteUChar(OutputStreamWriter* w,
unibrow::uchar u) {
3496 static const char hex_chars[] =
"0123456789ABCDEF";
3497 w->AddString(
"\\u");
3498 w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
3499 w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
3500 w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
3501 w->AddCharacter(hex_chars[u & 0xf]);
3504 void HeapSnapshotJSONSerializer::SerializeString(
const unsigned char* s) {
3507 for ( ; *s !=
'\0'; ++s) {
3530 if (*s > 31 && *s < 128) {
3532 }
else if (*s <= 31) {
3534 WriteUChar(writer_, *s);
3537 unsigned length = 1, cursor = 0;
3538 for ( ; length <= 4 && *(s + length) !=
'\0'; ++length) { }
3541 WriteUChar(writer_, c);
3554 void HeapSnapshotJSONSerializer::SerializeStrings() {
3555 List<HashMap::Entry*> sorted_strings;
3556 SortHashMap(&strings_, &sorted_strings);
3558 for (
int i = 0; i < sorted_strings.length(); ++i) {
3561 reinterpret_cast<const unsigned char*>(sorted_strings[i]->key));
3562 if (writer_->
aborted())
return;
3567 template<
typename T>
3568 inline static int SortUsingEntryValue(
const T* x,
const T* y) {
3569 uintptr_t x_uint =
reinterpret_cast<uintptr_t
>((*x)->value);
3570 uintptr_t y_uint =
reinterpret_cast<uintptr_t
>((*y)->value);
3571 if (x_uint > y_uint) {
3573 }
else if (x_uint == y_uint) {
3581 void HeapSnapshotJSONSerializer::SortHashMap(
3582 HashMap* map, List<HashMap::Entry*>* sorted_entries) {
3583 for (HashMap::Entry* p = map->Start(); p !=
NULL; p = map->Next(p))
3584 sorted_entries->Add(p);
3585 sorted_entries->Sort(SortUsingEntryValue);
void AddSubstring(const char *s, int n)
#define ROOT_NAME(type, name, camel_name)
void SetIndexedReference(HeapGraphEdge::Type type, int parent, int index, HeapEntry *child_entry)
virtual bool ProgressReport(bool force)=0
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
Address external_callback
virtual HeapEntry * AllocateEntry(HeapThing ptr)=0
static const int kDefaultCacheOffset
void Destroy(Object **location)
size_t GetUsedMemorySize() const
OutputStreamWriter(v8::OutputStream *stream)
virtual ~V8HeapExplorer()
bool IsLastProfile(const char *title)
SnapshotFiller(HeapSnapshot *snapshot, HeapEntriesMap *entries)
size_t GetUsedMemorySize() const
bool Find(const Key &key, Locator *locator)
static const int kCodeOffset
static Object *& Object_at(Address addr)
static const SnapshotObjectId kGcRootsFirstSubrootId
#define NATIVE_CONTEXT_FIELDS(V)
void CallGlobalGCEpilogueCallback()
SnapshotObjectId FindOrAddEntry(Address addr, unsigned int size)
static const int kPrototypeOrInitialMapOffset
void RemoveSnapshot(HeapSnapshot *snapshot)
static const int kInheritsSecurityToken
#define STRUCT_MAP_NAME(NAME, Name, name)
bool Insert(const Key &key, Locator *locator)
static const int kBuiltinsOffset
void BeforeTraversingChild(ProfileNode *parent, ProfileNode *child)
Handle< HeapObject > FindHeapObjectById(SnapshotObjectId id)
#define SYMBOL_NAME(name, str)
virtual HeapEntry * AllocateEntry(HeapThing ptr)
void SetTickRatePerMs(double ticks_per_ms)
virtual intptr_t GetHash()=0
static int VSNPrintF(Vector< char > str, const char *format, va_list args)
uint32_t GetCallUid() const
static const int kMaxSimultaneousProfiles
bool was_swept_conservatively()
static const int kTransitionsOrBackPointerOffset
static String * cast(Object *obj)
FindEntryById(SnapshotObjectId id)
GlobalHandlesExtractor(NativeObjectsExplorer *explorer)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the snapshot(mksnapshot only)") DEFINE_bool(help
void AfterChildTraversed(ProfileNode *, ProfileNode *child)
void BeforeTraversingChild(ProfileNode *, ProfileNode *)
ProfileGenerator(CpuProfilesCollection *profiles)
void VisitPointers(Object **start, Object **end)
static SnapshotObjectId GetNthGcSubrootId(int delta)
static HeapObject * cast(Object *obj)
CpuProfile * GetProfile(int security_token_id, unsigned uid)
static const int kGlobalReceiverOffset
static const int kDeoptimizationDataOffset
static AccessorPair * cast(Object *obj)
void Print(int max_depth)
NodesPair(ProfileNode *src, ProfileNode *dst)
static Map * cast(Object *obj)
BasicHeapEntriesAllocator(HeapSnapshot *snapshot, HeapEntry::Type entries_type)
bool IterateAndExtractReferences(SnapshotFillerInterface *filler)
CodeEntry * NewCodeEntry(Logger::LogEventsAndTags tag, String *name, String *resource_name, int line_number)
void ForEach(Callback *callback)
Vector< T > SubVector(int from, int to)
void FilteredClone(ProfileTree *src, int security_token_id)
double GetSelfMillis() const
void SnapshotGenerationFinished()
virtual bool IsEquivalent(RetainedObjectInfo *other)
void SetNamedAutoIndexReference(HeapGraphEdge::Type type, int parent, HeapEntry *child_entry)
virtual void SetNamedAutoIndexReference(HeapGraphEdge::Type type, int parent_entry, HeapEntry *child_entry)=0
size_t RawSnapshotSize() const
static const int kHandlerTableOffset
#define ASSERT(condition)
void ClearWeakness(Object **location)
v8::Handle< v8::Value > Print(const v8::Arguments &args)
const char * GetFormatted(const char *format,...)
SnapshotObjectId PushHeapObjectsStats(OutputStream *stream)
static Script * cast(Object *obj)
static const int kDebugInfoOffset
static const int kNoSecurityToken
static JSRegExp * cast(Object *obj)
static const int kDataOffset
#define STRONG_ROOT_LIST(V)
static Context * cast(Object *context)
#define MAKE_STRUCT_CASE(NAME, Name, name)
static const int kInitialMapOffset
static SharedFunctionInfo * cast(Object *obj)
virtual HeapEntry * AddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)=0
void SetTag(Object *obj, const char *tag)
static uchar CalculateValue(const byte *str, unsigned length, unsigned *cursor)
Address stack[kMaxFramesCount]
static const int kInstanceClassNameOffset
static const int kDescriptorsOffset
void SetActualSamplingRate(double actual_sampling_rate)
bool IterateAndExtractReferences(SnapshotFillerInterface *filler)
virtual ControlOption ReportProgressValue(int done, int total)=0
void CallGlobalGCPrologueCallback()
static const int kContextOffset
static Code * cast(Object *obj)
HeapEntry * FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)
HeapEntry * AddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)
void AfterChildTraversed(ProfileNode *, ProfileNode *)
void SetNamedReference(HeapGraphEdge::Type type, int parent, const char *reference_name, HeapEntry *child_entry)
int EstimateObjectsCount()
static Object ** RawField(HeapObject *obj, int offset)
static Smi * cast(Object *object)
virtual void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, int parent_entry, HeapEntry *child_entry)=0
int operator()(HeapEntry *const *entry)
static const SnapshotObjectId kGcRootsObjectId
void AddCharacter(char c)
static SnapshotObjectId GenerateId(v8::RetainedObjectInfo *info)
void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, int parent, HeapEntry *child_entry)
SmartArrayPointer< char > ToCString(AllowNullsFlag allow_nulls, RobustnessFlag robustness_flag, int offset, int length, int *length_output=0)
HeapEntry * AddGcRootsEntry()
void CalculateTotalTicks()
void BeforeTraversingChild(ProfileNode *, ProfileNode *)
NativeGroupRetainedObjectInfo(const char *label)
const char * GetName(String *name)
static const int kLiteralsOffset
#define EXTRACT_CONTEXT_FIELD(index, type, name)
static const int kSourceOffset
SnapshotObjectId last_assigned_id() const
virtual void SetNamedReference(HeapGraphEdge::Type type, int parent_entry, const char *reference_name, HeapEntry *child_entry)=0
void AfterChildTraversed(ProfileNode *parent, ProfileNode *child)
CodeEntry * FindEntry(Address addr)
Position(ProfileNode *node)
static JSGlobalProxy * cast(Object *obj)
NativeObjectsExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress)
List< HeapEntry > & entries()
static const int kGCMetadataOffset
const intptr_t kFailureTagMask
void RecordTickSample(const TickSample &sample)
const char * GetTag(Object *obj)
static SlicedString * cast(Object *obj)
int GetSharedId(Address addr)
static const int kScopeInfoOffset
static const int kObjectIdStep
virtual int GetChunkSize()
static String * GetConstructorName(JSObject *object)
Handle< Object > Create(Object *value)
HeapEntry * gc_subroot(int index)
virtual const char * GetLabel()=0
ProfileNode * FindChild(CodeEntry *entry)
HeapSnapshotsCollection * collection()
JSObject * global_proxy()
virtual const char * GetLabel()
bool Contains(Object *object)
uint32_t occupancy() const
static HeapObject *const kInternalRootObject
virtual void ProgressStep()=0
GlobalHandles * global_handles()
void AddString(const char *s)
HeapEntry * AddRootEntry()
virtual WriteResult WriteHeapStatsChunk(HeapStatsUpdate *data, int count)
Entry * Lookup(void *key, uint32_t hash, bool insert, AllocationPolicy allocator=AllocationPolicy())
void MoveCode(Address from, Address to)
static const char *const kProgramEntryName
static const int kNameOffset
virtual ~GlobalHandlesExtractor()
double TicksToMillis(unsigned ticks) const
OldSpace * old_pointer_space()
static const int kPropertiesOffset
void AfterAllChildrenTraversed(ProfileNode *node)
static const SnapshotObjectId kFirstAvailableObjectId
List< HeapGraphEdge > & edges()
static double TimeCurrentMillis()
HeapSnapshotGenerator(HeapSnapshot *snapshot, v8::ActivityControl *control)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
void IterateAllRoots(ObjectVisitor *v)
double GetTotalMillis() const
SnapshotObjectId FindEntry(Address addr)
static const int kMakeHeapIterableMask
bool IsSameAs(CodeEntry *entry) const
void AddPathFromEnd(const Vector< CodeEntry * > &path)
friend class GlobalHandlesExtractor
virtual void SetIndexedReference(HeapGraphEdge::Type type, int parent_entry, int index, HeapEntry *child_entry)=0
static const int kLineEndsOffset
V8HeapExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress)
static const int kElementsOffset
static Vector< T > New(int length)
void RemoveProfile(CpuProfile *profile)
HeapEntry * FindEntry(HeapThing ptr)
static const int kTypeFeedbackInfoOffset
activate correct semantics for inheriting readonliness false
virtual void VisitPointers(Object **start, Object **end)
void IterateAllRootsWithClassIds(ObjectVisitor *v)
static const int kRelocationInfoOffset
static const int kNonWeakFieldsEndOffset
CpuProfile * FilteredClone(int security_token_id)
Vector< const char > CStrVector(const char *data)
int StrLength(const char *string)
static int OffsetOfElementAt(int index)
static JSArray * cast(Object *obj)
static void Print(const char *format,...)
#define T(name, string, precedence)
static const char *const kGarbageCollectorEntryName
INLINE(bool has_current_child())
void AddPathToCurrentProfiles(const Vector< CodeEntry * > &path)
static const int kBackPointerStorageOffset
HeapEntry * GetEntryById(SnapshotObjectId id)
virtual HeapEntry * FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)=0
void AddRootEntries(SnapshotFillerInterface *filler)
List< ObjectGroup * > * object_groups()
static int SNPrintF(Vector< char > str, const char *format,...)
void CopyData(const CodeEntry &source)
void UpdateMeasurements(double current_time)
static const unsigned kWallTimeQueryIntervalMs
static const int kMapOffset
void AddPath(const Vector< CodeEntry * > &path)
static const int kFunctionDataOffset
void AddCode(Address addr, CodeEntry *entry, unsigned size)
Handle< String > NewStringFromAscii(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
INLINE(void next_child())
static const int kNormalTypeCacheOffset
void Serialize(v8::OutputStream *stream)
void AddPathFromStart(const Vector< CodeEntry * > &path)
void StopHeapObjectsTracking()
virtual WriteResult WriteAsciiChunk(char *data, int size)=0
void IterateRoots(ObjectVisitor *v, VisitMode mode)
HeapEntry * AddEntry(HeapEntry::Type type, const char *name, SnapshotObjectId id, int size)
void Sort(int(*cmp)(const T *x, const T *y))
bool Remove(const Key &key)
int GetTokenId(Object *token)
static const char *const kAnonymousFunctionName
uint32_t ComputeIntegerHash(uint32_t key, uint32_t seed)
GcSubrootsEnumerator(SnapshotFillerInterface *filler, V8HeapExplorer *explorer)
bool FindGreatestLessThan(const Key &key, Locator *locator)
INLINE(ProfileNode *current_child())
FilteredCloneCallback(ProfileNode *dst_root, int security_token_id)
static const SnapshotObjectId kInternalRootObjectId
virtual ~NativeObjectsExplorer()
HeapSnapshotsCollection()
virtual void VisitEmbedderReference(Object **p, uint16_t class_id)
static JSGlobalPropertyCell * cast(Object *obj)
List< HeapEntry * > * GetSortedEntriesList()
uint32_t SnapshotObjectId
virtual HeapEntry * FindEntry(HeapThing ptr)=0
friend class IndexedReferencesExtractor
void AddNumber(unsigned n)
static const int kInferredNameOffset
static const int kThisPropertyAssignmentsOffset
void SnapshotGenerationFinished(HeapSnapshot *snapshot)
const char * GetCopy(const char *src)
uint32_t HashSequentialString(const schar *chars, int length, uint32_t seed)
void Pair(HeapThing thing, int entry)
HeapEntry * AddGcSubrootEntry(int tag)
void * Remove(void *key, uint32_t hash)
TemplateHashMapImpl< FreeStoreAllocationPolicy > HashMap
void AfterAllChildrenTraversed(ProfileNode *node)
SnapshotObjectId last_assigned_id() const
static const char *const kEmptyNamePrefix
MUST_USE_RESULT MaybeObject * GetProperty(String *key)
uint32_t capacity() const
static const int kNameOffset
virtual intptr_t GetElementCount()
InstanceType instance_type()
size_t GetUsedMemorySize() const
static const uchar kBadChar
static const int kConstructorOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage message
int SortedListBSearch(const List< T > &list, P cmp)
static void StrNCpy(Vector< char > dest, const char *src, size_t n)
Handle< JSGlobalObject > & at(int i)
static FixedArray * cast(Object *obj)
HeapSnapshot * NewSnapshot(HeapSnapshot::Type type, const char *name, unsigned uid)
bool StartProfiling(const char *title, unsigned uid)
void RemoveObjectGroups()
HeapSnapshot * GetSnapshot(unsigned uid)
~HeapSnapshotsCollection()
static const int kBoundFunctionIndex
void RememberLastJSObjectId()
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
static const int kScriptOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static const int kPrototypeOffset
void Synchronize(VisitorSynchronization::SyncTag tag)
List< HeapGraphEdge * > & children()
SnapshotObjectId GetObjectId(Address object_addr, int object_size)
virtual void VisitPointers(Object **start, Object **end)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kContextOffset
void MakeWeak(Object **location, void *parameter, WeakReferenceCallback callback)
static const int kNativeContextOffset
int EstimateObjectsCount(HeapIterator *iterator)
virtual void EndOfStream()=0
const char * GetVFormatted(const char *format, va_list args)
CpuProfile * StopProfiling(int security_token_id, const char *title, double actual_sampling_rate)
static GlobalObject * cast(Object *obj)
static const int kBoundThisIndex
static const int kConstructStubOffset
void DeleteArray(T *array)
virtual ~NativeGroupRetainedObjectInfo()
static const int kSharedFunctionInfoOffset
static ConsString * cast(Object *obj)
void AfterAllChildrenTraversed(ProfileNode *parent)
static const int kNoLineNumberInfo
static CodeCache * cast(Object *obj)
virtual intptr_t GetSizeInBytes()
virtual HeapEntry * AllocateEntry(HeapThing ptr)
const char * GetName(String *name)
void MoveObject(Address from, Address to)
static const int kCodeCacheOffset
ProfileNode * FindOrAddChild(CodeEntry *entry)
CpuProfile(const char *title, unsigned uid)
static const int kBoundArgumentsStartIndex
HeapSnapshot(HeapSnapshotsCollection *collection, Type type, const char *title, unsigned uid)
bool has_external_callback
virtual const char * GetGroupLabel()
static JSObject * cast(Object *obj)
OldSpace * old_data_space()
Entry * Next(Entry *p) const
static const char *const kTagNames[kNumberOfSyncTags]
void CalculateTotalTicks()
v8::RetainedObjectInfo * info_
virtual intptr_t GetHash()
List< CpuProfile * > * Profiles(int security_token_id)
String * constructor_name()
static JSGlobalObject * cast(Object *obj)
static JSFunction * cast(Object *obj)