44 : token_locations_(4),
50 Isolate* isolate = Isolate::Current();
51 for (
int i = 0; i < token_locations_.length(); ++i) {
52 if (!token_removed_[i]) {
61 Isolate* isolate = Isolate::Current();
63 for (
int i = 0; i < token_locations_.length(); ++i) {
64 if (*token_locations_[i] == token && !token_removed_[i])
return i;
70 TokenRemovedCallback);
71 token_locations_.Add(handle.
location());
72 token_removed_.
Add(
false);
73 return token_locations_.length() - 1;
85 void TokenEnumerator::TokenRemoved(
Object** token_location) {
86 for (
int i = 0; i < token_locations_.length(); ++i) {
87 if (token_locations_[i] == token_location && !token_removed_[i]) {
88 token_removed_[i] =
true;
96 : names_(StringsMatch) {
101 for (HashMap::Entry* p = names_.
Start();
103 p = names_.
Next(p)) {
104 DeleteArray(reinterpret_cast<const char*>(p->value));
110 int len =
static_cast<int>(strlen(src));
116 return AddOrDisposeString(dst.
start(), hash);
122 va_start(args, format);
129 const char* StringsStorage::AddOrDisposeString(
char* str, uint32_t hash) {
130 HashMap::Entry* cache_entry = names_.
Lookup(str, hash,
true);
131 if (cache_entry->value ==
NULL) {
133 cache_entry->value = str;
137 return reinterpret_cast<const char*
>(cache_entry->value);
150 return AddOrDisposeString(str.
start(), hash);
155 if (name->IsString()) {
156 int length =
Min(kMaxNameSize, name->
length());
161 return AddOrDisposeString(data.Detach(), hash);
173 size_t size =
sizeof(*this);
174 size +=
sizeof(HashMap::Entry) * names_.
capacity();
175 for (HashMap::Entry* p = names_.
Start(); p !=
NULL; p = names_.
Next(p)) {
176 size += strlen(reinterpret_cast<const char*>(p->value)) + 1;
186 name_prefix_ = source.name_prefix_;
187 name_ = source.name_;
188 resource_name_ = source.resource_name_;
189 line_number_ = source.line_number_;
195 if (shared_id_ != 0) {
197 v8::internal::kZeroHashSeed);
200 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_prefix_)),
201 v8::internal::kZeroHashSeed);
203 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_)),
204 v8::internal::kZeroHashSeed);
206 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(resource_name_)),
207 v8::internal::kZeroHashSeed);
216 || (tag_ == entry->tag_
217 && shared_id_ == entry->shared_id_
219 || (name_prefix_ == entry->name_prefix_
220 && name_ == entry->name_
221 && resource_name_ == entry->resource_name_
222 && line_number_ == entry->line_number_)));
227 HashMap::Entry* map_entry =
228 children_.
Lookup(entry, CodeEntryHash(entry),
false);
229 return map_entry !=
NULL ?
235 HashMap::Entry* map_entry =
236 children_.
Lookup(entry, CodeEntryHash(entry),
true);
237 if (map_entry->value ==
NULL) {
240 map_entry->value = new_node;
241 children_list_.Add(new_node);
243 return reinterpret_cast<ProfileNode*
>(map_entry->value);
259 total_ticks_, self_ticks_,
261 entry_->name_prefix(),
263 entry_->security_token_id());
264 if (entry_->resource_name()[0] !=
'\0')
265 OS::Print(
" %s:%d", entry_->resource_name(), entry_->line_number());
267 for (HashMap::Entry* p = children_.
Start();
269 p = children_.
Next(p)) {
288 : root_entry_(
Logger::FUNCTION_TAG,
300 TraverseDepthFirst(&cb);
307 entry != path.
start() - 1;
309 if (*entry !=
NULL) {
313 node->IncrementSelfTicks();
322 if (*entry !=
NULL) {
326 node->IncrementSelfTicks();
332 : src(src), dst(dst) { }
342 security_token_id_(security_token_id) {
347 if (IsTokenAcceptable(child->entry()->security_token_id(),
348 parent->entry()->security_token_id())) {
349 ProfileNode* clone = stack_.last().dst->FindOrAddChild(child->entry());
350 clone->IncreaseSelfTicks(child->self_ticks());
354 stack_.last().dst->IncreaseSelfTicks(child->self_ticks());
361 if (stack_.last().src == child) {
367 bool IsTokenAcceptable(
int token,
int parent_token) {
369 || token == security_token_id_)
return true;
373 || parent_token == security_token_id_;
378 List<NodesPair> stack_;
379 int security_token_id_;
383 ms_to_ticks_scale_ = src->ms_to_ticks_scale_;
385 src->TraverseDepthFirst(&cb);
391 ms_to_ticks_scale_ = ticks_per_ms > 0 ? 1.0 / ticks_per_ms : 1.0;
398 : node(node), child_idx_(0) { }
400 return node->children()->at(child_idx_);
403 return child_idx_ <
node->children()->length();
405 INLINE(
void next_child()) { ++child_idx_; }
414 template <
typename Callback>
415 void ProfileTree::TraverseDepthFirst(Callback* callback) {
418 while (stack.length() > 0) {
420 if (current.has_current_child()) {
421 callback->BeforeTraversingChild(current.
node, current.current_child());
424 callback->AfterAllChildrenTraversed(current.
node);
425 if (stack.length() > 1) {
426 Position& parent = stack[stack.length() - 2];
427 callback->AfterChildTraversed(parent.
node, current.
node);
442 node->IncreaseTotalTicks(node->self_ticks());
446 parent->IncreaseTotalTicks(child->total_ticks());
453 TraverseDepthFirst(&cb);
459 root_->total_ticks(), root_->self_ticks(),
485 clone->top_down_.
FilteredClone(&top_down_, security_token_id);
486 clone->bottom_up_.
FilteredClone(&bottom_up_, security_token_id);
508 const CodeMap::CodeTreeConfig::Key CodeMap::CodeTreeConfig::kNoKey =
NULL;
512 DeleteAllCoveredCode(addr, addr + size);
513 CodeTree::Locator locator;
514 tree_.
Insert(addr, &locator);
515 locator.set_value(CodeEntryInfo(entry, size));
522 while (addr >= start) {
523 CodeTree::Locator locator;
525 Address start2 = locator.key(), end2 = start2 + locator.value().size;
526 if (start2 < end && start < end2) to_delete.
Add(start2);
529 for (
int i = 0; i < to_delete.length(); ++i) tree_.
Remove(to_delete[i]);
534 CodeTree::Locator locator;
537 const CodeEntryInfo& entry = locator.value();
538 if (addr < (locator.key() + entry.size))
546 CodeTree::Locator locator;
548 if (tree_.
Find(addr, &locator)) {
549 const CodeEntryInfo& entry = locator.value();
550 ASSERT(entry.entry == kSharedFunctionCodeEntry);
553 tree_.
Insert(addr, &locator);
554 int id = next_shared_id_++;
555 locator.set_value(CodeEntryInfo(kSharedFunctionCodeEntry,
id));
562 if (from == to)
return;
563 CodeTree::Locator locator;
564 if (!tree_.
Find(from, &locator))
return;
565 CodeEntryInfo entry = locator.value();
567 AddCode(to, entry.entry, entry.size);
571 void CodeMap::CodeTreePrinter::Call(
572 const Address& key,
const CodeMap::CodeEntryInfo& value) {
573 OS::Print(
"%p %5d %s\n", key, value.size, value.entry->name());
578 CodeTreePrinter printer;
584 : profiles_uids_(UidsMatch),
585 current_profiles_semaphore_(
OS::CreateSemaphore(1)) {
591 static void DeleteCodeEntry(
CodeEntry** entry_ptr) {
595 static void DeleteCpuProfile(
CpuProfile** profile_ptr) {
599 static void DeleteProfilesList(List<CpuProfile*>** list_ptr) {
600 if (*list_ptr !=
NULL) {
601 (*list_ptr)->Iterate(DeleteCpuProfile);
607 delete current_profiles_semaphore_;
608 current_profiles_.Iterate(DeleteCpuProfile);
609 detached_profiles_.Iterate(DeleteCpuProfile);
610 profiles_by_token_.Iterate(DeleteProfilesList);
611 code_entries_.Iterate(DeleteCodeEntry);
617 current_profiles_semaphore_->
Wait();
619 current_profiles_semaphore_->
Signal();
622 for (
int i = 0; i < current_profiles_.length(); ++i) {
623 if (strcmp(current_profiles_[i]->title(), title) == 0) {
625 current_profiles_semaphore_->
Signal();
629 current_profiles_.Add(
new CpuProfile(title, uid));
630 current_profiles_semaphore_->
Signal();
642 double actual_sampling_rate) {
645 current_profiles_semaphore_->
Wait();
646 for (
int i = current_profiles_.length() - 1; i >= 0; --i) {
647 if (title_len == 0 || strcmp(current_profiles_[i]->title(), title) == 0) {
648 profile = current_profiles_.Remove(i);
652 current_profiles_semaphore_->
Signal();
654 if (profile !=
NULL) {
659 unabridged_list->
Add(profile);
660 HashMap::Entry* entry =
661 profiles_uids_.
Lookup(reinterpret_cast<void*>(profile->uid()),
662 static_cast<uint32_t>(profile->uid()),
665 entry->value =
reinterpret_cast<void*
>(unabridged_list->length() - 1);
666 return GetProfile(security_token_id, profile->uid());
674 int index = GetProfileIndex(uid);
675 if (index < 0)
return NULL;
679 return unabridged_list->
at(index);
682 if (list->
at(index) ==
NULL) {
684 unabridged_list->
at(index)->FilteredClone(security_token_id);
686 return list->
at(index);
690 int CpuProfilesCollection::GetProfileIndex(
unsigned uid) {
691 HashMap::Entry* entry = profiles_uids_.
Lookup(reinterpret_cast<void*>(uid),
692 static_cast<uint32_t>(uid),
694 return entry !=
NULL ?
695 static_cast<int>(
reinterpret_cast<intptr_t
>(entry->value)) : -1;
702 if (current_profiles_.length() != 1)
return false;
704 || strcmp(current_profiles_[0]->title(), title) == 0;
710 unsigned uid = profile->uid();
711 int index = GetProfileIndex(uid);
713 detached_profiles_.RemoveElement(profile);
716 profiles_uids_.
Remove(reinterpret_cast<void*>(uid),
717 static_cast<uint32_t>(uid));
719 for (HashMap::Entry* p = profiles_uids_.
Start();
721 p = profiles_uids_.
Next(p)) {
722 intptr_t p_index =
reinterpret_cast<intptr_t
>(p->value);
723 if (p_index > index) {
724 p->value =
reinterpret_cast<void*
>(p_index - 1);
727 for (
int i = 0; i < profiles_by_token_.length(); ++i) {
729 if (list !=
NULL && index < list->length()) {
733 if (cloned_profile !=
NULL && cloned_profile != profile) {
734 detached_profiles_.Add(cloned_profile);
741 int CpuProfilesCollection::TokenToIndex(
int security_token_id) {
743 return security_token_id + 1;
747 List<CpuProfile*>* CpuProfilesCollection::GetProfilesList(
748 int security_token_id) {
749 const int index = TokenToIndex(security_token_id);
750 const int lists_to_add = index - profiles_by_token_.length() + 1;
751 if (lists_to_add > 0) profiles_by_token_.AddBlock(
NULL, lists_to_add);
752 List<CpuProfile*>* unabridged_list =
754 const int current_count = unabridged_list->length();
755 if (profiles_by_token_[index] ==
NULL) {
756 profiles_by_token_[index] =
new List<CpuProfile*>(current_count);
758 List<CpuProfile*>* list = profiles_by_token_[index];
759 const int profiles_to_add = current_count - list->length();
760 if (profiles_to_add > 0) list->AddBlock(
NULL, profiles_to_add);
769 return unabridged_list;
772 const int current_count = unabridged_list->length();
773 for (
int i = 0; i < current_count; ++i) {
774 if (list->
at(i) ==
NULL) {
775 (*list)[i] = unabridged_list->
at(i)->FilteredClone(security_token_id);
788 GetFunctionName(name),
792 code_entries_.Add(entry);
801 GetFunctionName(name),
805 code_entries_.Add(entry);
811 const char* name_prefix,
819 code_entries_.Add(entry);
832 code_entries_.Add(entry);
842 current_profiles_semaphore_->
Wait();
843 for (
int i = 0; i < current_profiles_.length(); ++i) {
844 current_profiles_[i]->AddPath(path);
846 current_profiles_semaphore_->
Signal();
851 if (--wall_time_query_countdown_ == 0)
857 if (measurements_count_++ != 0) {
858 const double measured_ticks_per_ms =
860 (current_time - last_wall_time_);
863 (measured_ticks_per_ms - ticks_per_ms_) / measurements_count_;
865 result_ =
static_cast<AtomicWord>(ticks_per_ms_ * kResultScale);
867 last_wall_time_ = current_time;
868 wall_time_query_countdown_ =
874 "(anonymous function)";
878 "(garbage collector)";
882 : profiles_(profiles),
884 profiles->NewCodeEntry(
Logger::FUNCTION_TAG, kProgramEntryName)),
886 profiles->NewCodeEntry(
Logger::BUILTIN_TAG,
887 kGarbageCollectorEntryName)) {
897 memset(entry, 0, entries.
length() *
sizeof(*entry));
907 }
else if (sample.
tos !=
NULL) {
911 if (*entry !=
NULL && !(*entry)->is_js_function()) {
919 stack_pos != stack_end;
921 *entry++ = code_map_.
FindEntry(*stack_pos);
925 if (FLAG_prof_browser_mode) {
926 bool no_symbolized_entries =
true;
929 no_symbolized_entries =
false;
934 if (no_symbolized_entries) {
935 *entry++ = EntryForVMState(sample.
state);
943 HeapGraphEdge::HeapGraphEdge(Type
type,
const char*
name,
int from,
int to)
948 ASSERT(type == kContextVariable
951 || type == kShortcut);
955 HeapGraphEdge::HeapGraphEdge(Type
type,
int index,
int from,
int to)
960 ASSERT(type == kElement || type == kHidden || type == kWeak);
964 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
965 to_entry_ = &snapshot->entries()[to_index_];
969 const int HeapEntry::kNoEntry = -1;
971 HeapEntry::HeapEntry(HeapSnapshot* snapshot,
979 self_size_(self_size),
988 HeapGraphEdge edge(type, name, this->index(), entry->index());
989 snapshot_->edges().Add(edge);
997 HeapGraphEdge edge(type, index, this->index(), entry->index());
998 snapshot_->edges().Add(edge);
1003 Handle<HeapObject> HeapEntry::GetHeapObject() {
1004 return snapshot_->collection()->FindHeapObjectById(
id());
1009 const char* prefix,
const char* edge_name,
int max_depth,
int indent) {
1012 self_size(),
id(), indent,
' ', prefix, edge_name);
1013 if (
type() != kString) {
1017 const char* c =
name_;
1018 while (*c && (c -
name_) <= 40) {
1027 if (--max_depth == 0)
return;
1028 Vector<HeapGraphEdge*> ch = children();
1029 for (
int i = 0; i < ch.length(); ++i) {
1030 HeapGraphEdge& edge = *ch[i];
1031 const char* edge_prefix =
"";
1032 EmbeddedVector<char, 64> index;
1033 const char* edge_name = index.start();
1034 switch (edge.type()) {
1037 edge_name = edge.name();
1044 edge_name = edge.name();
1047 edge_name = edge.name();
1055 edge_name = edge.name();
1062 OS::SNPrintF(index,
"!!! unknown edge type: %d ", edge.type());
1064 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
1069 const char* HeapEntry::TypeAsString() {
1071 case kHidden:
return "/hidden/";
1072 case kObject:
return "/object/";
1073 case kClosure:
return "/closure/";
1074 case kString:
return "/string/";
1075 case kCode:
return "/code/";
1076 case kArray:
return "/array/";
1077 case kRegExp:
return "/regexp/";
1078 case kHeapNumber:
return "/number/";
1079 case kNative:
return "/native/";
1080 case kSynthetic:
return "/synthetic/";
1081 default:
return "???";
1090 template <
size_t ptr_size>
struct SnapshotSizeConstants;
1092 template <>
struct SnapshotSizeConstants<4> {
1093 static const int kExpectedHeapGraphEdgeSize = 12;
1094 static const int kExpectedHeapEntrySize = 24;
1095 static const int kExpectedHeapSnapshotsCollectionSize = 96;
1096 static const int kExpectedHeapSnapshotSize = 136;
1097 static const size_t kMaxSerializableSnapshotRawSize = 256 *
MB;
1100 template <>
struct SnapshotSizeConstants<8> {
1101 static const int kExpectedHeapGraphEdgeSize = 24;
1102 static const int kExpectedHeapEntrySize = 32;
1103 static const int kExpectedHeapSnapshotsCollectionSize = 144;
1104 static const int kExpectedHeapSnapshotSize = 168;
1105 static const uint64_t kMaxSerializableSnapshotRawSize =
1106 static_cast<uint64_t
>(6000) *
MB;
1115 : collection_(collection),
1119 root_index_(HeapEntry::kNoEntry),
1120 gc_roots_index_(HeapEntry::kNoEntry),
1121 natives_root_index_(HeapEntry::kNoEntry),
1122 max_snapshot_js_object_id_(0) {
1125 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
1127 sizeof(HeapEntry) ==
1128 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
1130 gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
1147 ASSERT(root_index_ == HeapEntry::kNoEntry);
1148 ASSERT(entries_.is_empty());
1149 HeapEntry* entry =
AddEntry(HeapEntry::kObject,
1153 root_index_ = entry->index();
1154 ASSERT(root_index_ == 0);
1160 ASSERT(gc_roots_index_ == HeapEntry::kNoEntry);
1161 HeapEntry* entry =
AddEntry(HeapEntry::kObject,
1165 gc_roots_index_ = entry->index();
1171 ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
1178 gc_subroot_indexes_[tag] = entry->index();
1187 HeapEntry entry(
this, type, name,
id, size);
1188 entries_.
Add(entry);
1189 return &entries_.
last();
1196 int children_index = 0;
1197 for (
int i = 0; i <
entries().length(); ++i) {
1198 HeapEntry* entry = &
entries()[i];
1199 children_index = entry->set_children_index(children_index);
1202 for (
int i = 0; i <
edges().length(); ++i) {
1204 edge->ReplaceToIndexWithEntry(
this);
1205 edge->from()->add_child(edge);
1214 if ((*entry)->id() == id_)
return 0;
1215 return (*entry)->id() < id_ ? -1 : 1;
1228 return entries_by_id->
at(index);
1233 static int SortByIds(
const T* entry1_ptr,
1234 const T* entry2_ptr) {
1235 if ((*entry1_ptr)->id() == (*entry2_ptr)->id())
return 0;
1236 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
1241 if (sorted_entries_.is_empty()) {
1242 sorted_entries_.Allocate(entries_.length());
1243 for (
int i = 0; i < entries_.length(); ++i) {
1244 sorted_entries_[i] = &entries_[i];
1246 sorted_entries_.
Sort(SortByIds);
1248 return &sorted_entries_;
1253 root()->Print(
"",
"", max_depth, 0);
1257 template<
typename T,
class P>
1258 static size_t GetMemoryUsedByList(
const List<T, P>& list) {
1259 return list.length() *
sizeof(
T) +
sizeof(list);
1264 STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::kExpectedHeapSnapshotSize ==
1268 GetMemoryUsedByList(entries_) +
1269 GetMemoryUsedByList(edges_) +
1270 GetMemoryUsedByList(children_) +
1271 GetMemoryUsedByList(sorted_entries_);
1287 : next_id_(kFirstAvailableObjectId),
1288 entries_map_(AddressesMatch) {
1296 entries_.
Add(EntryInfo(0,
NULL, 0));
1301 RemoveDeadEntries();
1308 if (from == to)
return;
1309 void* from_value = entries_map_.
Remove(from, AddressHash(from));
1310 if (from_value ==
NULL)
return;
1311 int from_entry_info_index =
1312 static_cast<int>(
reinterpret_cast<intptr_t
>(from_value));
1313 entries_.
at(from_entry_info_index).addr = to;
1314 HashMap::Entry* to_entry = entries_map_.
Lookup(to, AddressHash(to),
true);
1315 if (to_entry->value !=
NULL) {
1316 int to_entry_info_index =
1317 static_cast<int>(
reinterpret_cast<intptr_t
>(to_entry->value));
1322 entries_.
at(to_entry_info_index).addr =
NULL;
1324 to_entry->value =
reinterpret_cast<void*
>(from_entry_info_index);
1329 HashMap::Entry* entry = entries_map_.
Lookup(addr, AddressHash(addr),
false);
1330 if (entry ==
NULL)
return 0;
1331 int entry_index =
static_cast<int>(
reinterpret_cast<intptr_t
>(entry->value));
1332 EntryInfo& entry_info = entries_.
at(entry_index);
1333 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.
occupancy());
1334 return entry_info.id;
1339 unsigned int size) {
1340 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.
occupancy());
1341 HashMap::Entry* entry = entries_map_.
Lookup(addr, AddressHash(addr),
true);
1342 if (entry->value !=
NULL) {
1344 static_cast<int>(
reinterpret_cast<intptr_t
>(entry->value));
1345 EntryInfo& entry_info = entries_.
at(entry_index);
1346 entry_info.accessed =
true;
1347 entry_info.size = size;
1348 return entry_info.id;
1350 entry->value =
reinterpret_cast<void*
>(entries_.length());
1353 entries_.
Add(EntryInfo(
id, addr, size));
1354 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.
occupancy());
1360 time_intervals_.Clear();
1363 void HeapObjectsMap::UpdateHeapObjectsMap() {
1365 "HeapSnapshotsCollection::UpdateHeapObjectsMap");
1366 HeapIterator iterator;
1369 obj = iterator.next()) {
1372 RemoveDeadEntries();
1377 UpdateHeapObjectsMap();
1378 time_intervals_.
Add(TimeInterval(next_id_));
1381 ASSERT(!entries_.is_empty());
1382 EntryInfo* entry_info = &entries_.
first();
1383 EntryInfo* end_entry_info = &entries_.
last() + 1;
1384 for (
int time_interval_index = 0;
1385 time_interval_index < time_intervals_.length();
1386 ++time_interval_index) {
1387 TimeInterval& time_interval = time_intervals_[time_interval_index];
1389 uint32_t entries_size = 0;
1390 EntryInfo* start_entry_info = entry_info;
1391 while (entry_info < end_entry_info && entry_info->
id < time_interval_id) {
1392 entries_size += entry_info->size;
1395 uint32_t entries_count =
1396 static_cast<uint32_t
>(entry_info - start_entry_info);
1397 if (time_interval.count != entries_count ||
1398 time_interval.size != entries_size) {
1400 time_interval_index,
1401 time_interval.count = entries_count,
1402 time_interval.size = entries_size));
1403 if (stats_buffer.length() >= prefered_chunk_size) {
1405 &stats_buffer.
first(), stats_buffer.length());
1407 stats_buffer.Clear();
1411 ASSERT(entry_info == end_entry_info);
1412 if (!stats_buffer.is_empty()) {
1414 &stats_buffer.
first(), stats_buffer.length());
1422 void HeapObjectsMap::RemoveDeadEntries() {
1423 ASSERT(entries_.length() > 0 &&
1424 entries_.
at(0).id == 0 &&
1425 entries_.
at(0).addr ==
NULL);
1426 int first_free_entry = 1;
1427 for (
int i = 1; i < entries_.length(); ++i) {
1428 EntryInfo& entry_info = entries_.
at(i);
1429 if (entry_info.accessed) {
1430 if (first_free_entry != i) {
1431 entries_.
at(first_free_entry) = entry_info;
1433 entries_.
at(first_free_entry).accessed =
false;
1434 HashMap::Entry* entry = entries_map_.
Lookup(
1435 entry_info.addr, AddressHash(entry_info.addr),
false);
1437 entry->value =
reinterpret_cast<void*
>(first_free_entry);
1440 if (entry_info.addr) {
1441 entries_map_.
Remove(entry_info.addr, AddressHash(entry_info.addr));
1445 entries_.Rewind(first_free_entry);
1446 ASSERT(static_cast<uint32_t>(entries_.length()) - 1 ==
1453 const char* label = info->
GetLabel();
1455 static_cast<int>(strlen(label)),
1458 if (element_count != -1)
1460 v8::internal::kZeroHashSeed);
1468 sizeof(HashMap::Entry) * entries_map_.
capacity() +
1469 GetMemoryUsedByList(entries_) +
1470 GetMemoryUsedByList(time_intervals_);
1475 : is_tracking_objects_(
false),
1476 snapshots_uids_(HeapSnapshotsMatch),
1481 static void DeleteHeapSnapshot(
HeapSnapshot** snapshot_ptr) {
1482 delete *snapshot_ptr;
1487 delete token_enumerator_;
1488 snapshots_.Iterate(DeleteHeapSnapshot);
1495 is_tracking_objects_ =
true;
1503 if (snapshot !=
NULL) {
1504 snapshots_.Add(snapshot);
1505 HashMap::Entry* entry =
1506 snapshots_uids_.
Lookup(reinterpret_cast<void*>(snapshot->
uid()),
1507 static_cast<uint32_t>(snapshot->
uid()),
1510 entry->value = snapshot;
1516 HashMap::Entry* entry = snapshots_uids_.
Lookup(reinterpret_cast<void*>(uid),
1517 static_cast<uint32_t>(uid),
1524 snapshots_.RemoveElement(snapshot);
1525 unsigned uid = snapshot->
uid();
1526 snapshots_uids_.
Remove(reinterpret_cast<void*>(uid),
1527 static_cast<uint32_t>(uid));
1535 "HeapSnapshotsCollection::FindHeapObjectById");
1538 HeapIterator iterator(HeapIterator::kFilterUnreachable);
1542 obj = iterator.next()) {
1543 if (ids_.
FindEntry(obj->address()) ==
id) {
1555 kExpectedHeapSnapshotsCollectionSize ==
1557 size_t size =
sizeof(*this);
1560 size +=
sizeof(HashMap::Entry) * snapshots_uids_.
capacity();
1561 size += GetMemoryUsedByList(snapshots_);
1562 for (
int i = 0; i < snapshots_.length(); ++i) {
1563 size += snapshots_[i]->RawSnapshotSize();
1570 : entries_(HeapThingsMatch) {
1575 HashMap::Entry* cache_entry = entries_.
Lookup(thing, Hash(thing),
false);
1576 if (cache_entry ==
NULL)
return HeapEntry::kNoEntry;
1577 return static_cast<int>(
reinterpret_cast<intptr_t
>(cache_entry->value));
1582 HashMap::Entry* cache_entry = entries_.
Lookup(thing, Hash(thing),
true);
1584 cache_entry->value =
reinterpret_cast<void*
>(
static_cast<intptr_t
>(entry));
1599 if (!obj->IsHeapObject())
return false;
1601 return entries_.
Lookup(
object, HeapEntriesMap::Hash(
object),
false) !=
NULL;
1606 if (!obj->IsHeapObject())
return;
1608 entries_.
Lookup(
object, HeapEntriesMap::Hash(
object),
true);
1614 HashMap::Entry* cache_entry =
1615 entries_.
Lookup(
object, HeapEntriesMap::Hash(
object),
false);
1616 return cache_entry !=
NULL
1617 ?
reinterpret_cast<const char*
>(cache_entry->value)
1623 if (!obj->IsHeapObject())
return;
1625 HashMap::Entry* cache_entry =
1626 entries_.
Lookup(
object, HeapEntriesMap::Hash(
object),
true);
1627 cache_entry->value =
const_cast<char*
>(tag);
1634 HeapObject*
const V8HeapExplorer::kGcRootsObject =
1637 HeapObject*
const V8HeapExplorer::kFirstGcSubrootObject =
1640 HeapObject*
const V8HeapExplorer::kLastGcSubrootObject =
1648 : heap_(
Isolate::Current()->heap()),
1649 snapshot_(snapshot),
1650 collection_(snapshot_->collection()),
1651 progress_(progress),
1661 return AddEntry(reinterpret_cast<HeapObject*>(ptr));
1665 HeapEntry* V8HeapExplorer::AddEntry(
HeapObject*
object) {
1668 return snapshot_->
root();
1669 }
else if (
object == kGcRootsObject) {
1672 }
else if (
object >= kFirstGcSubrootObject &&
object < kLastGcSubrootObject) {
1675 }
else if (object->IsJSFunction()) {
1677 SharedFunctionInfo* shared = func->shared();
1678 const char* name = shared->bound() ?
"native_bind" :
1680 return AddEntry(
object, HeapEntry::kClosure, name);
1681 }
else if (object->IsJSRegExp()) {
1683 return AddEntry(
object,
1686 }
else if (object->IsJSObject()) {
1689 if (object->IsJSGlobalObject()) {
1690 const char* tag = objects_tags_.
GetTag(
object);
1695 return AddEntry(
object, HeapEntry::kObject, name);
1696 }
else if (object->IsString()) {
1697 return AddEntry(
object,
1700 }
else if (object->IsCode()) {
1701 return AddEntry(
object, HeapEntry::kCode,
"");
1702 }
else if (object->IsSharedFunctionInfo()) {
1704 return AddEntry(
object,
1707 }
else if (object->IsScript()) {
1709 return AddEntry(
object,
1714 }
else if (object->IsGlobalContext()) {
1715 return AddEntry(
object, HeapEntry::kHidden,
"system / GlobalContext");
1716 }
else if (object->IsContext()) {
1717 return AddEntry(
object, HeapEntry::kHidden,
"system / Context");
1718 }
else if (object->IsFixedArray() ||
1719 object->IsFixedDoubleArray() ||
1720 object->IsByteArray() ||
1721 object->IsExternalArray()) {
1722 return AddEntry(
object, HeapEntry::kArray,
"");
1723 }
else if (object->IsHeapNumber()) {
1724 return AddEntry(
object, HeapEntry::kHeapNumber,
"number");
1726 return AddEntry(
object, HeapEntry::kHidden, GetSystemEntryName(
object));
1730 HeapEntry* V8HeapExplorer::AddEntry(HeapObject*
object,
1731 HeapEntry::Type type,
1733 int object_size =
object->Size();
1735 collection_->
GetObjectId(object->address(), object_size);
1736 return snapshot_->
AddEntry(type, name, object_id, object_size);
1745 explorer_(explorer),
1746 previous_object_count_(0),
1750 object_count_ += end - start;
1754 if (previous_object_count_ != object_count_) {
1755 previous_object_count_ = object_count_;
1756 filler_->
AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_);
1762 intptr_t previous_object_count_;
1763 intptr_t object_count_;
1769 filler->
AddEntry(kGcRootsObject,
this);
1775 const char* V8HeapExplorer::GetSystemEntryName(
HeapObject*
object) {
1777 case MAP_TYPE:
return "system / Map";
1781 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1782 case NAME##_TYPE: return "system / "#Name;
1784 #undef MAKE_STRUCT_CASE
1785 default:
return "system";
1791 int objects_count = 0;
1794 obj = iterator->next()) {
1797 return objects_count;
1806 : generator_(generator),
1807 parent_obj_(parent_obj),
1812 for (
Object** p = start; p < end; p++) {
1813 if (CheckVisitedAndUnmark(p))
continue;
1814 generator_->SetHiddenReference(parent_obj_, parent_, next_index_++, *p);
1818 if (offset < 0)
return;
1826 bool CheckVisitedAndUnmark(
Object** field) {
1827 if ((*field)->IsFailure()) {
1828 intptr_t untagged =
reinterpret_cast<intptr_t
>(*field) & ~
kFailureTagMask;
1830 ASSERT((*field)->IsHeapObject());
1835 V8HeapExplorer* generator_;
1836 HeapObject* parent_obj_;
1842 void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
1843 HeapEntry* heap_entry = GetEntry(obj);
1844 if (heap_entry ==
NULL)
return;
1845 int entry = heap_entry->index();
1847 bool extract_indexed_refs =
true;
1848 if (obj->IsJSGlobalProxy()) {
1850 }
else if (obj->IsJSObject()) {
1852 }
else if (obj->IsString()) {
1854 extract_indexed_refs =
false;
1855 }
else if (obj->IsContext()) {
1857 }
else if (obj->IsMap()) {
1858 ExtractMapReferences(entry,
Map::cast(obj));
1859 }
else if (obj->IsSharedFunctionInfo()) {
1861 }
else if (obj->IsScript()) {
1863 }
else if (obj->IsCodeCache()) {
1865 }
else if (obj->IsCode()) {
1866 ExtractCodeReferences(entry,
Code::cast(obj));
1867 }
else if (obj->IsJSGlobalPropertyCell()) {
1868 ExtractJSGlobalPropertyCellReferences(
1870 extract_indexed_refs =
false;
1872 if (extract_indexed_refs) {
1875 obj->Iterate(&refs_extractor);
1880 void V8HeapExplorer::ExtractJSGlobalProxyReferences(JSGlobalProxy* proxy) {
1884 Object*
object = proxy->map()->prototype();
1885 bool is_debug_object =
false;
1886 #ifdef ENABLE_DEBUGGER_SUPPORT
1887 is_debug_object =
object->IsGlobalObject() &&
1890 if (!is_debug_object) {
1891 SetUserGlobalReference(
object);
1896 void V8HeapExplorer::ExtractJSObjectReferences(
1897 int entry, JSObject* js_obj) {
1898 HeapObject* obj = js_obj;
1899 ExtractClosureReferences(js_obj, entry);
1900 ExtractPropertyReferences(js_obj, entry);
1901 ExtractElementReferences(js_obj, entry);
1902 ExtractInternalReferences(js_obj, entry);
1903 SetPropertyReference(
1904 obj, entry, heap_->Proto_symbol(), js_obj->GetPrototype());
1905 if (obj->IsJSFunction()) {
1907 Object* proto_or_map = js_fun->prototype_or_initial_map();
1908 if (!proto_or_map->IsTheHole()) {
1909 if (!proto_or_map->IsMap()) {
1910 SetPropertyReference(
1912 heap_->prototype_symbol(), proto_or_map,
1916 SetPropertyReference(
1918 heap_->prototype_symbol(), js_fun->prototype());
1921 SharedFunctionInfo* shared_info = js_fun->shared();
1923 bool bound = shared_info->bound();
1924 TagObject(js_fun->literals_or_bindings(),
1925 bound ?
"(function bindings)" :
"(function literals)");
1926 SetInternalReference(js_fun, entry,
1927 bound ?
"bindings" :
"literals",
1928 js_fun->literals_or_bindings(),
1930 TagObject(shared_info,
"(shared function info)");
1931 SetInternalReference(js_fun, entry,
1932 "shared", shared_info,
1934 TagObject(js_fun->unchecked_context(),
"(context)");
1935 SetInternalReference(js_fun, entry,
1936 "context", js_fun->unchecked_context(),
1943 }
else if (obj->IsGlobalObject()) {
1945 SetInternalReference(global_obj, entry,
1946 "builtins", global_obj->builtins(),
1948 SetInternalReference(global_obj, entry,
1949 "global_context", global_obj->global_context(),
1951 SetInternalReference(global_obj, entry,
1952 "global_receiver", global_obj->global_receiver(),
1955 TagObject(js_obj->properties(),
"(object properties)");
1956 SetInternalReference(obj, entry,
1957 "properties", js_obj->properties(),
1959 TagObject(js_obj->elements(),
"(object elements)");
1960 SetInternalReference(obj, entry,
1961 "elements", js_obj->elements(),
1966 void V8HeapExplorer::ExtractStringReferences(
int entry, String*
string) {
1967 if (string->IsConsString()) {
1969 SetInternalReference(cs, entry,
"first", cs->first());
1970 SetInternalReference(cs, entry,
"second", cs->second());
1971 }
else if (string->IsSlicedString()) {
1973 SetInternalReference(ss, entry,
"parent", ss->parent());
1978 void V8HeapExplorer::ExtractContextReferences(
int entry, Context* context) {
1979 #define EXTRACT_CONTEXT_FIELD(index, type, name) \
1980 SetInternalReference(context, entry, #name, context->get(Context::index), \
1981 FixedArray::OffsetOfElementAt(Context::index));
1986 if (context->IsGlobalContext()) {
1987 TagObject(context->jsfunction_result_caches(),
1988 "(context func. result caches)");
1989 TagObject(context->normalized_map_cache(),
"(context norm. map cache)");
1990 TagObject(context->runtime_context(),
"(runtime context)");
1991 TagObject(context->data(),
"(context data)");
1993 #undef EXTRACT_CONTEXT_FIELD
1997 SetWeakReference(context, entry, i, context->get(i),
2004 void V8HeapExplorer::ExtractMapReferences(
int entry, Map* map) {
2005 SetInternalReference(map, entry,
2007 SetInternalReference(map, entry,
2008 "constructor", map->constructor(),
2010 if (!map->instance_descriptors()->IsEmpty()) {
2011 TagObject(map->instance_descriptors(),
"(map descriptors)");
2012 SetInternalReference(map, entry,
2013 "descriptors", map->instance_descriptors(),
2016 if (map->unchecked_prototype_transitions()->IsFixedArray()) {
2017 TagObject(map->prototype_transitions(),
"(prototype transitions)");
2018 SetInternalReference(map, entry,
2019 "prototype_transitions", map->prototype_transitions(),
2022 SetInternalReference(map, entry,
2023 "back_pointer", map->GetBackPointer(),
2026 SetInternalReference(map, entry,
2027 "code_cache", map->code_cache(),
2032 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
2033 int entry, SharedFunctionInfo* shared) {
2034 HeapObject* obj = shared;
2035 SetInternalReference(obj, entry,
2036 "name", shared->name(),
2038 TagObject(shared->code(),
"(code)");
2039 SetInternalReference(obj, entry,
2040 "code", shared->code(),
2042 TagObject(shared->scope_info(),
"(function scope info)");
2043 SetInternalReference(obj, entry,
2044 "scope_info", shared->scope_info(),
2046 SetInternalReference(obj, entry,
2047 "instance_class_name", shared->instance_class_name(),
2049 SetInternalReference(obj, entry,
2050 "script", shared->script(),
2052 TagObject(shared->construct_stub(),
"(code)");
2053 SetInternalReference(obj, entry,
2054 "construct_stub", shared->construct_stub(),
2056 SetInternalReference(obj, entry,
2057 "function_data", shared->function_data(),
2059 SetInternalReference(obj, entry,
2060 "debug_info", shared->debug_info(),
2062 SetInternalReference(obj, entry,
2063 "inferred_name", shared->inferred_name(),
2065 SetInternalReference(obj, entry,
2066 "this_property_assignments",
2067 shared->this_property_assignments(),
2069 SetWeakReference(obj, entry,
2070 1, shared->initial_map(),
2075 void V8HeapExplorer::ExtractScriptReferences(
int entry, Script* script) {
2076 HeapObject* obj = script;
2077 SetInternalReference(obj, entry,
2078 "source", script->source(),
2080 SetInternalReference(obj, entry,
2081 "name", script->name(),
2083 SetInternalReference(obj, entry,
2084 "data", script->data(),
2086 SetInternalReference(obj, entry,
2087 "context_data", script->context_data(),
2089 TagObject(script->line_ends(),
"(script line ends)");
2090 SetInternalReference(obj, entry,
2091 "line_ends", script->line_ends(),
2096 void V8HeapExplorer::ExtractCodeCacheReferences(
2097 int entry, CodeCache* code_cache) {
2098 TagObject(code_cache->default_cache(),
"(default code cache)");
2099 SetInternalReference(code_cache, entry,
2100 "default_cache", code_cache->default_cache(),
2102 TagObject(code_cache->normal_type_cache(),
"(code type cache)");
2103 SetInternalReference(code_cache, entry,
2104 "type_cache", code_cache->normal_type_cache(),
2109 void V8HeapExplorer::ExtractCodeReferences(
int entry, Code* code) {
2110 TagObject(code->relocation_info(),
"(code relocation info)");
2111 SetInternalReference(code, entry,
2112 "relocation_info", code->relocation_info(),
2114 SetInternalReference(code, entry,
2115 "handler_table", code->handler_table(),
2117 TagObject(code->deoptimization_data(),
"(code deopt data)");
2118 SetInternalReference(code, entry,
2119 "deoptimization_data", code->deoptimization_data(),
2121 SetInternalReference(code, entry,
2122 "type_feedback_info", code->type_feedback_info(),
2124 SetInternalReference(code, entry,
2125 "gc_metadata", code->gc_metadata(),
2130 void V8HeapExplorer::ExtractJSGlobalPropertyCellReferences(
2131 int entry, JSGlobalPropertyCell* cell) {
2132 SetInternalReference(cell, entry,
"value", cell->value());
2136 void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj,
int entry) {
2137 if (!js_obj->IsJSFunction())
return;
2140 if (func->shared()->bound()) {
2141 FixedArray* bindings = func->function_bindings();
2142 SetNativeBindReference(js_obj, entry,
"bound_this",
2144 SetNativeBindReference(js_obj, entry,
"bound_function",
2147 i < bindings->length(); i++) {
2149 "bound_argument_%d",
2151 SetNativeBindReference(js_obj, entry, reference_name,
2155 Context* context = func->context()->declaration_context();
2156 ScopeInfo* scope_info = context->closure()->shared()->scope_info();
2158 int context_locals = scope_info->ContextLocalCount();
2159 for (
int i = 0; i < context_locals; ++i) {
2160 String* local_name = scope_info->ContextLocalName(i);
2162 SetClosureReference(js_obj, entry, local_name, context->get(idx));
2166 if (scope_info->HasFunctionName()) {
2167 String* name = scope_info->FunctionName();
2169 int idx = scope_info->FunctionContextSlotIndex(name, &mode);
2171 SetClosureReference(js_obj, entry, name, context->get(idx));
2178 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj,
int entry) {
2179 if (js_obj->HasFastProperties()) {
2180 DescriptorArray* descs = js_obj->map()->instance_descriptors();
2181 for (
int i = 0; i < descs->number_of_descriptors(); i++) {
2182 switch (descs->GetType(i)) {
2184 int index = descs->GetFieldIndex(i);
2185 if (index < js_obj->map()->inobject_properties()) {
2186 SetPropertyReference(
2188 descs->GetKey(i), js_obj->InObjectPropertyAt(index),
2190 js_obj->GetInObjectPropertyOffset(index));
2192 SetPropertyReference(
2194 descs->GetKey(i), js_obj->FastPropertyAt(index));
2199 SetPropertyReference(
2201 descs->GetKey(i), descs->GetConstantFunction(i));
2204 Object* callback_obj = descs->GetValue(i);
2205 if (callback_obj->IsAccessorPair()) {
2207 if (
Object* getter = accessors->getter()) {
2208 SetPropertyReference(js_obj, entry, descs->GetKey(i),
2211 if (
Object* setter = accessors->setter()) {
2212 SetPropertyReference(js_obj, entry, descs->GetKey(i),
2228 StringDictionary* dictionary = js_obj->property_dictionary();
2229 int length = dictionary->Capacity();
2230 for (
int i = 0; i < length; ++i) {
2231 Object* k = dictionary->KeyAt(i);
2232 if (dictionary->IsKey(k)) {
2233 Object* target = dictionary->ValueAt(i);
2235 Object* value = target->IsJSGlobalPropertyCell()
2239 SetPropertyReference(js_obj, entry,
String::cast(k), value);
2241 TagObject(value,
"(hidden properties)");
2242 SetInternalReference(js_obj, entry,
"hidden_properties", value);
2250 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj,
int entry) {
2251 if (js_obj->HasFastObjectElements()) {
2253 int length = js_obj->IsJSArray() ?
2256 for (
int i = 0; i < length; ++i) {
2257 if (!elements->get(i)->IsTheHole()) {
2258 SetElementReference(js_obj, entry, i, elements->get(i));
2261 }
else if (js_obj->HasDictionaryElements()) {
2262 SeededNumberDictionary* dictionary = js_obj->element_dictionary();
2263 int length = dictionary->Capacity();
2264 for (
int i = 0; i < length; ++i) {
2265 Object* k = dictionary->KeyAt(i);
2266 if (dictionary->IsKey(k)) {
2268 uint32_t index =
static_cast<uint32_t
>(k->Number());
2269 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
2276 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj,
int entry) {
2277 int length = js_obj->GetInternalFieldCount();
2278 for (
int i = 0; i < length; ++i) {
2279 Object* o = js_obj->GetInternalField(i);
2280 SetInternalReference(
2281 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
2287 Heap* heap =
object->GetHeap();
2288 if (object->IsJSFunction())
return heap->closure_symbol();
2289 String* constructor_name =
object->constructor_name();
2290 if (constructor_name == heap->Object_symbol()) {
2295 LookupResult result(heap->
isolate());
2296 object->LocalLookupRealNamedProperty(heap->constructor_symbol(), &result);
2297 if (result.IsProperty()) {
2298 constructor_prop = result.GetLazyValue();
2300 if (constructor_prop->IsJSFunction()) {
2302 if (maybe_name->IsString()) {
2304 if (name->
length() > 0)
return name;
2308 return object->constructor_name();
2312 HeapEntry* V8HeapExplorer::GetEntry(
Object* obj) {
2313 if (!obj->IsHeapObject())
return NULL;
2322 : index(index), tag(tag) { }
2329 : collecting_all_references_(
false),
2330 previous_reference_count_(0) {
2334 if (collecting_all_references_) {
2335 for (
Object** p = start; p < end; p++) all_references_.Add(*p);
2337 for (
Object** p = start; p < end; p++) strong_references_.Add(*p);
2344 ASSERT(strong_references_.length() <= all_references_.length());
2345 for (
int i = 0; i < reference_tags_.length(); ++i) {
2346 explorer->SetGcRootsReference(reference_tags_[i].tag);
2348 int strong_index = 0, all_index = 0, tags_index = 0;
2349 while (all_index < all_references_.length()) {
2350 if (strong_index < strong_references_.length() &&
2351 strong_references_[strong_index] == all_references_[all_index]) {
2352 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
2354 all_references_[all_index++]);
2357 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
2359 all_references_[all_index++]);
2361 if (reference_tags_[tags_index].index == all_index) ++tags_index;
2366 if (collecting_all_references_ &&
2367 previous_reference_count_ != all_references_.length()) {
2368 previous_reference_count_ = all_references_.length();
2369 reference_tags_.
Add(IndexTag(previous_reference_count_, tag));
2374 bool collecting_all_references_;
2377 int previous_reference_count_;
2384 HeapIterator iterator(HeapIterator::kFilterUnreachable);
2387 bool interrupted =
false;
2394 ExtractReferences(obj);
2403 SetRootGcRootsReference();
2414 bool V8HeapExplorer::IsEssentialObject(
Object*
object) {
2417 return object->IsHeapObject()
2418 && !
object->IsOddball()
2419 &&
object != heap_->raw_unchecked_empty_byte_array()
2420 &&
object != heap_->raw_unchecked_empty_fixed_array()
2421 &&
object != heap_->raw_unchecked_empty_descriptor_array()
2422 &&
object != heap_->raw_unchecked_fixed_array_map()
2423 &&
object != heap_->raw_unchecked_global_property_cell_map()
2424 &&
object != heap_->raw_unchecked_shared_function_info_map()
2425 &&
object != heap_->raw_unchecked_free_space_map()
2426 &&
object != heap_->raw_unchecked_one_pointer_filler_map()
2427 &&
object != heap_->raw_unchecked_two_pointer_filler_map();
2431 void V8HeapExplorer::SetClosureReference(HeapObject* parent_obj,
2435 HeapEntry* child_entry = GetEntry(child_obj);
2436 if (child_entry !=
NULL) {
2445 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
2447 const char* reference_name,
2449 HeapEntry* child_entry = GetEntry(child_obj);
2450 if (child_entry !=
NULL) {
2459 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
2463 HeapEntry* child_entry = GetEntry(child_obj);
2464 if (child_entry !=
NULL) {
2473 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
2475 const char* reference_name,
2478 HeapEntry* child_entry = GetEntry(child_obj);
2479 if (child_entry ==
NULL)
return;
2480 if (IsEssentialObject(child_obj)) {
2490 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
2495 HeapEntry* child_entry = GetEntry(child_obj);
2496 if (child_entry ==
NULL)
return;
2497 if (IsEssentialObject(child_obj)) {
2507 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
2511 HeapEntry* child_entry = GetEntry(child_obj);
2512 if (child_entry !=
NULL && IsEssentialObject(child_obj)) {
2521 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
2526 HeapEntry* child_entry = GetEntry(child_obj);
2527 if (child_entry !=
NULL) {
2537 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
2539 String* reference_name,
2541 const char* name_format_string,
2543 HeapEntry* child_entry = GetEntry(child_obj);
2544 if (child_entry !=
NULL) {
2547 const char* name = name_format_string !=
NULL ?
2563 void V8HeapExplorer::SetPropertyShortcutReference(HeapObject* parent_obj,
2565 String* reference_name,
2567 HeapEntry* child_entry = GetEntry(child_obj);
2568 if (child_entry !=
NULL) {
2577 void V8HeapExplorer::SetRootGcRootsReference() {
2580 snapshot_->
root()->index(),
2585 void V8HeapExplorer::SetUserGlobalReference(
Object* child_obj) {
2586 HeapEntry* child_entry = GetEntry(child_obj);
2590 snapshot_->
root()->index(),
2603 void V8HeapExplorer::SetGcSubrootReference(
2605 HeapEntry* child_entry = GetEntry(child_obj);
2606 if (child_entry !=
NULL) {
2607 const char* name = GetStrongGcSubrootName(child_obj);
2624 const char* V8HeapExplorer::GetStrongGcSubrootName(
Object*
object) {
2625 if (strong_gc_subroot_names_.
is_empty()) {
2626 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2627 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2630 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2632 #undef STRUCT_MAP_NAME
2633 #define SYMBOL_NAME(name, str) NAME_ENTRY(name)
2639 return strong_gc_subroot_names_.
GetTag(
object);
2643 void V8HeapExplorer::TagObject(
Object* obj,
const char* tag) {
2644 if (IsEssentialObject(obj)) {
2645 HeapEntry* entry = GetEntry(obj);
2646 if (entry->name()[0] ==
'\0') {
2647 entry->set_name(tag);
2656 for (
Object** p = start; p < end; p++) {
2657 if ((*p)->IsGlobalContext()) {
2660 if (proxy->IsJSGlobalProxy()) {
2661 Object* global = proxy->
map()->prototype();
2662 if (global->IsJSGlobalObject()) {
2669 int count() {
return objects_.length(); }
2680 Isolate* isolate = Isolate::Current();
2687 const char** urls = NewArray<const char*>(enumerator.
count());
2688 for (
int i = 0, l = enumerator.
count(); i < l; ++i) {
2693 if (global_obj->GetProperty(*document_string)->ToObject(&obj_document) &&
2694 obj_document->IsJSObject()) {
2697 if (document->
GetProperty(*url_string)->ToObject(&obj_url) &&
2698 obj_url->IsString()) {
2705 for (
int i = 0, l = enumerator.
count(); i < l; ++i) {
2706 objects_tags_.
SetTag(*enumerator.
at(i), urls[i]);
2716 : explorer_(explorer) {}
2722 explorer_->VisitSubtreeWrapper(p, class_id);
2733 HeapEntry::Type entries_type)
2734 : snapshot_(snapshot),
2735 collection_(snapshot_->collection()),
2736 entries_type_(entries_type) {
2742 HeapEntry::Type entries_type_;
2750 const char* name = elements != -1
2758 size != -1 ? static_cast<int>(size) : 0);
2764 : snapshot_(snapshot),
2765 collection_(snapshot_->collection()),
2766 progress_(progress),
2767 embedder_queried_(
false),
2768 objects_by_info_(RetainedInfosMatch),
2769 native_groups_(StringsMatch),
2771 synthetic_entries_allocator_ =
2773 native_entries_allocator_ =
2779 for (HashMap::Entry* p = objects_by_info_.
Start();
2781 p = objects_by_info_.
Next(p)) {
2789 for (HashMap::Entry* p = native_groups_.
Start();
2791 p = native_groups_.
Next(p)) {
2796 delete synthetic_entries_allocator_;
2797 delete native_entries_allocator_;
2802 FillRetainedObjects();
2807 void NativeObjectsExplorer::FillRetainedObjects() {
2808 if (embedder_queried_)
return;
2809 Isolate* isolate = Isolate::Current();
2813 for (
int i = 0; i < groups->length(); ++i) {
2817 for (
size_t j = 0; j < group->
length_; ++j) {
2829 embedder_queried_ =
true;
2832 void NativeObjectsExplorer::FillImplicitReferences() {
2833 Isolate* isolate = Isolate::Current();
2834 List<ImplicitRefGroup*>* groups =
2835 isolate->global_handles()->implicit_ref_groups();
2836 for (
int i = 0; i < groups->length(); ++i) {
2837 ImplicitRefGroup* group = groups->at(i);
2838 HeapObject* parent = *group->parent_;
2840 filler_->
FindOrAddEntry(parent, native_entries_allocator_)->index();
2841 ASSERT(parent_entry != HeapEntry::kNoEntry);
2842 Object*** children = group->children_;
2843 for (
size_t j = 0; j < group->length_; ++j) {
2844 Object* child = *children[j];
2845 HeapEntry* child_entry =
2856 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2858 HashMap::Entry* entry =
2859 objects_by_info_.
Lookup(info, InfoHash(info),
true);
2860 if (entry->value !=
NULL) {
2863 entry->value =
new List<HeapObject*>(4);
2865 return reinterpret_cast<List<HeapObject*>*
>(entry->value);
2872 FillRetainedObjects();
2873 FillImplicitReferences();
2875 for (HashMap::Entry* p = objects_by_info_.
Start();
2877 p = objects_by_info_.
Next(p)) {
2880 SetNativeRootReference(info);
2883 for (
int i = 0; i < objects->length(); ++i) {
2884 SetWrapperNativeReferences(objects->
at(i), info);
2887 SetRootNativeRootsReference();
2898 hash_(reinterpret_cast<intptr_t>(label)),
2909 return hash_ == other->
GetHash() && !strcmp(label_, other->
GetLabel());
2921 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2922 const char* label) {
2923 const char* label_copy = collection_->
names()->
GetCopy(label);
2925 static_cast<int>(strlen(label_copy)),
2927 HashMap::Entry* entry = native_groups_.
Lookup(const_cast<char*>(label_copy),
2929 if (entry->value ==
NULL) {
2930 entry->value =
new NativeGroupRetainedObjectInfo(label);
2932 return static_cast<NativeGroupRetainedObjectInfo*
>(entry->value);
2936 void NativeObjectsExplorer::SetNativeRootReference(
2938 HeapEntry* child_entry =
2941 NativeGroupRetainedObjectInfo* group_info =
2943 HeapEntry* group_entry =
2944 filler_->
FindOrAddEntry(group_info, synthetic_entries_allocator_);
2947 group_entry->index(),
2952 void NativeObjectsExplorer::SetWrapperNativeReferences(
2954 HeapEntry* wrapper_entry = filler_->
FindEntry(wrapper);
2956 HeapEntry* info_entry =
2960 wrapper_entry->index(),
2964 info_entry->index(),
2969 void NativeObjectsExplorer::SetRootNativeRootsReference() {
2970 for (HashMap::Entry* entry = native_groups_.
Start();
2972 entry = native_groups_.
Next(entry)) {
2973 NativeGroupRetainedObjectInfo* group_info =
2974 static_cast<NativeGroupRetainedObjectInfo*
>(entry->value);
2975 HeapEntry* group_entry =
2980 snapshot_->
root()->index(),
2986 void NativeObjectsExplorer::VisitSubtreeWrapper(
Object** p,
uint16_t class_id) {
2987 if (in_groups_.
Contains(*p))
return;
2988 Isolate* isolate = Isolate::Current();
2990 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2991 if (info ==
NULL)
return;
2999 : snapshot_(snapshot),
3000 collection_(snapshot->collection()),
3001 entries_(entries) { }
3004 entries_->
Pair(ptr, entry->index());
3008 int index = entries_->
Map(ptr);
3009 return index != HeapEntry::kNoEntry ? &snapshot_->
entries()[index] :
NULL;
3013 return entry !=
NULL ? entry :
AddEntry(ptr, allocator);
3018 HeapEntry* child_entry) {
3019 HeapEntry* parent_entry = &snapshot_->
entries()[parent];
3020 parent_entry->SetIndexedReference(type, index, child_entry);
3024 HeapEntry* child_entry) {
3025 HeapEntry* parent_entry = &snapshot_->
entries()[parent];
3026 int index = parent_entry->children_count() + 1;
3027 parent_entry->SetIndexedReference(type, index, child_entry);
3031 const char* reference_name,
3032 HeapEntry* child_entry) {
3033 HeapEntry* parent_entry = &snapshot_->
entries()[parent];
3034 parent_entry->SetNamedReference(type, reference_name, child_entry);
3038 HeapEntry* child_entry) {
3039 HeapEntry* parent_entry = &snapshot_->
entries()[parent];
3040 int index = parent_entry->children_count() + 1;
3041 parent_entry->SetNamedReference(
3056 : snapshot_(snapshot),
3058 v8_heap_explorer_(snapshot_, this),
3059 dom_explorer_(snapshot_, this) {
3070 Isolate::Current()->heap()->CollectAllGarbage(
3072 "HeapSnapshotGenerator::GenerateSnapshot");
3073 Isolate::Current()->heap()->CollectAllGarbage(
3075 "HeapSnapshotGenerator::GenerateSnapshot");
3078 Heap* debug_heap = Isolate::Current()->heap();
3091 debug_heap->Verify();
3094 SetProgressTotal(1);
3097 debug_heap->Verify();
3100 if (!FillReferences())
return false;
3105 progress_counter_ = progress_total_;
3106 if (!ProgressReport(
true))
return false;
3111 void HeapSnapshotGenerator::ProgressStep() {
3112 ++progress_counter_;
3116 bool HeapSnapshotGenerator::ProgressReport(
bool force) {
3117 const int kProgressReportGranularity = 10000;
3118 if (control_ !=
NULL
3119 && (force || progress_counter_ % kProgressReportGranularity == 0)) {
3128 void HeapSnapshotGenerator::SetProgressTotal(
int iterations_count) {
3129 if (control_ ==
NULL)
return;
3130 HeapIterator iterator(HeapIterator::kFilterUnreachable);
3131 progress_total_ = iterations_count * (
3134 progress_counter_ = 0;
3138 bool HeapSnapshotGenerator::FillReferences() {
3139 SnapshotFiller filler(snapshot_, &entries_);
3148 static const int kSigned = 11;
3149 static const int kUnsigned = 10;
3152 static const int kSigned = 20;
3153 static const int kUnsigned = 20;
3161 chunk_size_(stream->GetChunkSize()),
3162 chunk_(chunk_size_),
3170 ASSERT(chunk_pos_ < chunk_size_);
3171 chunk_[chunk_pos_++] = c;
3179 ASSERT(static_cast<size_t>(n) <= strlen(s));
3180 const char* s_end = s + n;
3182 int s_chunk_size =
Min(
3183 chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
3184 ASSERT(s_chunk_size > 0);
3185 memcpy(chunk_.
start() + chunk_pos_, s, s_chunk_size);
3187 chunk_pos_ += s_chunk_size;
3191 void AddNumber(
unsigned n) { AddNumberImpl<unsigned>(n,
"%u"); }
3193 if (aborted_)
return;
3194 ASSERT(chunk_pos_ < chunk_size_);
3195 if (chunk_pos_ != 0) {
3202 template<
typename T>
3203 void AddNumberImpl(
T n,
const char* format) {
3205 static const int kMaxNumberSize =
3207 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
3209 chunk_.
SubVector(chunk_pos_, chunk_size_), format, n);
3211 chunk_pos_ += result;
3214 EmbeddedVector<char, kMaxNumberSize> buffer;
3221 void MaybeWriteChunk() {
3222 ASSERT(chunk_pos_ <= chunk_size_);
3223 if (chunk_pos_ == chunk_size_) {
3228 if (aborted_)
return;
3236 ScopedVector<char> chunk_;
3243 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
3245 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 5;
3253 SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize) {
3255 original_snapshot = snapshot_;
3256 snapshot_ = CreateFakeSnapshot();
3264 if (original_snapshot !=
NULL) {
3266 snapshot_ = original_snapshot;
3271 HeapSnapshot* HeapSnapshotJSONSerializer::CreateFakeSnapshot() {
3278 "The snapshot is too big. "
3281 SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize /
MB,
3283 HeapEntry* message = result->
AddEntry(HeapEntry::kString, text, 0, 4);
3290 void HeapSnapshotJSONSerializer::SerializeImpl() {
3294 SerializeSnapshot();
3295 if (writer_->
aborted())
return;
3299 if (writer_->
aborted())
return;
3303 if (writer_->
aborted())
return;
3307 if (writer_->
aborted())
return;
3314 int HeapSnapshotJSONSerializer::GetStringId(
const char* s) {
3315 HashMap::Entry* cache_entry = strings_.
Lookup(
3316 const_cast<char*>(s), ObjectHash(s),
true);
3317 if (cache_entry->value ==
NULL) {
3318 cache_entry->value =
reinterpret_cast<void*
>(next_string_id_++);
3320 return static_cast<int>(
reinterpret_cast<intptr_t
>(cache_entry->value));
3324 static int utoa(
unsigned value,
const Vector<char>& buffer,
int buffer_pos) {
3325 int number_of_digits = 0;
3331 buffer_pos += number_of_digits;
3332 int result = buffer_pos;
3334 int last_digit = value % 10;
3335 buffer[--buffer_pos] =
'0' + last_digit;
3342 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
3345 static const int kBufferSize =
3346 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 1;
3347 EmbeddedVector<char, kBufferSize> buffer;
3351 ? edge->index() : GetStringId(edge->name());
3354 buffer[buffer_pos++] =
',';
3356 buffer_pos = utoa(edge->type(), buffer, buffer_pos);
3357 buffer[buffer_pos++] =
',';
3358 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
3359 buffer[buffer_pos++] =
',';
3360 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
3361 buffer[buffer_pos++] =
'\0';
3366 void HeapSnapshotJSONSerializer::SerializeEdges() {
3367 List<HeapGraphEdge*>& edges = snapshot_->
children();
3368 for (
int i = 0; i < edges.length(); ++i) {
3370 edges[i - 1]->from()->index() <= edges[i]->from()->index());
3371 SerializeEdge(edges[i], i == 0);
3372 if (writer_->
aborted())
return;
3377 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
3379 static const int kBufferSize =
3380 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned
3382 EmbeddedVector<char, kBufferSize> buffer;
3384 if (entry_index(entry) != 0) {
3385 buffer[buffer_pos++] =
',';
3387 buffer_pos = utoa(entry->type(), buffer, buffer_pos);
3388 buffer[buffer_pos++] =
',';
3389 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
3390 buffer[buffer_pos++] =
',';
3391 buffer_pos = utoa(entry->id(), buffer, buffer_pos);
3392 buffer[buffer_pos++] =
',';
3393 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
3394 buffer[buffer_pos++] =
',';
3395 buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
3396 buffer[buffer_pos++] =
'\n';
3397 buffer[buffer_pos++] =
'\0';
3402 void HeapSnapshotJSONSerializer::SerializeNodes() {
3403 List<HeapEntry>& entries = snapshot_->
entries();
3404 for (
int i = 0; i < entries.length(); ++i) {
3405 SerializeNode(&entries[i]);
3406 if (writer_->
aborted())
return;
3411 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
3420 #define JSON_A(s) "[" s "]"
3421 #define JSON_O(s) "{" s "}"
3422 #define JSON_S(s) "\"" s "\""
3429 JSON_S(
"edge_count"))
","
3450 JSON_S(
"name_or_index")
","
3461 JSON_S(
"string_or_number")
","
3473 static void WriteUChar(OutputStreamWriter* w,
unibrow::uchar u) {
3474 static const char hex_chars[] =
"0123456789ABCDEF";
3475 w->AddString(
"\\u");
3476 w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
3477 w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
3478 w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
3479 w->AddCharacter(hex_chars[u & 0xf]);
3482 void HeapSnapshotJSONSerializer::SerializeString(
const unsigned char* s) {
3485 for ( ; *s !=
'\0'; ++s) {
3508 if (*s > 31 && *s < 128) {
3510 }
else if (*s <= 31) {
3512 WriteUChar(writer_, *s);
3515 unsigned length = 1, cursor = 0;
3516 for ( ; length <= 4 && *(s + length) !=
'\0'; ++length) { }
3519 WriteUChar(writer_, c);
3532 void HeapSnapshotJSONSerializer::SerializeStrings() {
3533 List<HashMap::Entry*> sorted_strings;
3534 SortHashMap(&strings_, &sorted_strings);
3536 for (
int i = 0; i < sorted_strings.length(); ++i) {
3539 reinterpret_cast<const unsigned char*>(sorted_strings[i]->key));
3540 if (writer_->
aborted())
return;
3545 template<
typename T>
3546 inline static int SortUsingEntryValue(
const T* x,
const T* y) {
3547 uintptr_t x_uint =
reinterpret_cast<uintptr_t
>((*x)->value);
3548 uintptr_t y_uint =
reinterpret_cast<uintptr_t
>((*y)->value);
3549 if (x_uint > y_uint) {
3551 }
else if (x_uint == y_uint) {
3559 void HeapSnapshotJSONSerializer::SortHashMap(
3560 HashMap* map, List<HashMap::Entry*>* sorted_entries) {
3561 for (HashMap::Entry* p = map->Start(); p !=
NULL; p = map->Next(p))
3562 sorted_entries->Add(p);
3563 sorted_entries->Sort(SortUsingEntryValue);
void AddSubstring(const char *s, int n)
#define ROOT_NAME(type, name, camel_name)
void SetIndexedReference(HeapGraphEdge::Type type, int parent, int index, HeapEntry *child_entry)
virtual bool ProgressReport(bool force)=0
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
Address external_callback
virtual HeapEntry * AllocateEntry(HeapThing ptr)=0
static const int kDefaultCacheOffset
void Destroy(Object **location)
size_t GetUsedMemorySize() const
OutputStreamWriter(v8::OutputStream *stream)
virtual ~V8HeapExplorer()
bool IsLastProfile(const char *title)
SnapshotFiller(HeapSnapshot *snapshot, HeapEntriesMap *entries)
size_t GetUsedMemorySize() const
bool Find(const Key &key, Locator *locator)
static const int kCodeOffset
static Object *& Object_at(Address addr)
static const SnapshotObjectId kGcRootsFirstSubrootId
void CallGlobalGCEpilogueCallback()
SnapshotObjectId FindOrAddEntry(Address addr, unsigned int size)
static const int kPrototypeOrInitialMapOffset
void RemoveSnapshot(HeapSnapshot *snapshot)
static const int kInheritsSecurityToken
#define STRUCT_MAP_NAME(NAME, Name, name)
bool Insert(const Key &key, Locator *locator)
static const int kBuiltinsOffset
void BeforeTraversingChild(ProfileNode *parent, ProfileNode *child)
Handle< HeapObject > FindHeapObjectById(SnapshotObjectId id)
#define SYMBOL_NAME(name, str)
virtual HeapEntry * AllocateEntry(HeapThing ptr)
void SetTickRatePerMs(double ticks_per_ms)
virtual intptr_t GetHash()=0
static int VSNPrintF(Vector< char > str, const char *format, va_list args)
uint32_t GetCallUid() const
static const int kMaxSimultaneousProfiles
bool was_swept_conservatively()
static String * cast(Object *obj)
FindEntryById(SnapshotObjectId id)
GlobalHandlesExtractor(NativeObjectsExplorer *explorer)
void AfterChildTraversed(ProfileNode *, ProfileNode *child)
void BeforeTraversingChild(ProfileNode *, ProfileNode *)
ProfileGenerator(CpuProfilesCollection *profiles)
void VisitPointers(Object **start, Object **end)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
static SnapshotObjectId GetNthGcSubrootId(int delta)
static HeapObject * cast(Object *obj)
CpuProfile * GetProfile(int security_token_id, unsigned uid)
static const int kGlobalReceiverOffset
static const int kDeoptimizationDataOffset
static AccessorPair * cast(Object *obj)
void Print(int max_depth)
NodesPair(ProfileNode *src, ProfileNode *dst)
static Map * cast(Object *obj)
BasicHeapEntriesAllocator(HeapSnapshot *snapshot, HeapEntry::Type entries_type)
bool IterateAndExtractReferences(SnapshotFillerInterface *filler)
CodeEntry * NewCodeEntry(Logger::LogEventsAndTags tag, String *name, String *resource_name, int line_number)
void ForEach(Callback *callback)
Vector< T > SubVector(int from, int to)
static const int kInstanceDescriptorsOrBitField3Offset
void FilteredClone(ProfileTree *src, int security_token_id)
double GetSelfMillis() const
void SnapshotGenerationFinished()
virtual bool IsEquivalent(RetainedObjectInfo *other)
void SetNamedAutoIndexReference(HeapGraphEdge::Type type, int parent, HeapEntry *child_entry)
virtual void SetNamedAutoIndexReference(HeapGraphEdge::Type type, int parent_entry, HeapEntry *child_entry)=0
size_t RawSnapshotSize() const
static const int kHandlerTableOffset
#define ASSERT(condition)
void ClearWeakness(Object **location)
v8::Handle< v8::Value > Print(const v8::Arguments &args)
const char * GetFormatted(const char *format,...)
SnapshotObjectId PushHeapObjectsStats(OutputStream *stream)
static Script * cast(Object *obj)
static const int kDebugInfoOffset
static const int kNoSecurityToken
static JSRegExp * cast(Object *obj)
static const int kDataOffset
#define STRONG_ROOT_LIST(V)
static Context * cast(Object *context)
#define MAKE_STRUCT_CASE(NAME, Name, name)
static const int kInitialMapOffset
static SharedFunctionInfo * cast(Object *obj)
virtual HeapEntry * AddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)=0
void SetTag(Object *obj, const char *tag)
static uchar CalculateValue(const byte *str, unsigned length, unsigned *cursor)
Address stack[kMaxFramesCount]
static const int kInstanceClassNameOffset
static const int kPrototypeTransitionsOrBackPointerOffset
void SetActualSamplingRate(double actual_sampling_rate)
bool IterateAndExtractReferences(SnapshotFillerInterface *filler)
static const int kGlobalContextOffset
virtual ControlOption ReportProgressValue(int done, int total)=0
void CallGlobalGCPrologueCallback()
static const int kContextOffset
static Code * cast(Object *obj)
HeapEntry * FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)
HeapEntry * AddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)
void AfterChildTraversed(ProfileNode *, ProfileNode *)
void SetNamedReference(HeapGraphEdge::Type type, int parent, const char *reference_name, HeapEntry *child_entry)
int EstimateObjectsCount()
static Object ** RawField(HeapObject *obj, int offset)
static Smi * cast(Object *object)
virtual void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, int parent_entry, HeapEntry *child_entry)=0
int operator()(HeapEntry *const *entry)
static const SnapshotObjectId kGcRootsObjectId
void AddCharacter(char c)
static SnapshotObjectId GenerateId(v8::RetainedObjectInfo *info)
void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, int parent, HeapEntry *child_entry)
SmartArrayPointer< char > ToCString(AllowNullsFlag allow_nulls, RobustnessFlag robustness_flag, int offset, int length, int *length_output=0)
HeapEntry * AddGcRootsEntry()
void CalculateTotalTicks()
void BeforeTraversingChild(ProfileNode *, ProfileNode *)
NativeGroupRetainedObjectInfo(const char *label)
const char * GetName(String *name)
static const int kLiteralsOffset
#define EXTRACT_CONTEXT_FIELD(index, type, name)
static const int kSourceOffset
SnapshotObjectId last_assigned_id() const
virtual void SetNamedReference(HeapGraphEdge::Type type, int parent_entry, const char *reference_name, HeapEntry *child_entry)=0
void AfterChildTraversed(ProfileNode *parent, ProfileNode *child)
CodeEntry * FindEntry(Address addr)
Position(ProfileNode *node)
static JSGlobalProxy * cast(Object *obj)
NativeObjectsExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress)
List< HeapEntry > & entries()
static const int kGCMetadataOffset
const intptr_t kFailureTagMask
void RecordTickSample(const TickSample &sample)
const char * GetTag(Object *obj)
static SlicedString * cast(Object *obj)
int GetSharedId(Address addr)
static const int kScopeInfoOffset
static const int kObjectIdStep
virtual int GetChunkSize()
static String * GetConstructorName(JSObject *object)
Handle< Object > Create(Object *value)
HeapEntry * gc_subroot(int index)
virtual const char * GetLabel()=0
ProfileNode * FindChild(CodeEntry *entry)
HeapSnapshotsCollection * collection()
JSObject * global_proxy()
virtual const char * GetLabel()
bool Contains(Object *object)
uint32_t occupancy() const
static HeapObject *const kInternalRootObject
virtual void ProgressStep()=0
GlobalHandles * global_handles()
void AddString(const char *s)
HeapEntry * AddRootEntry()
virtual WriteResult WriteHeapStatsChunk(HeapStatsUpdate *data, int count)
Entry * Lookup(void *key, uint32_t hash, bool insert, AllocationPolicy allocator=AllocationPolicy())
void MoveCode(Address from, Address to)
static const char *const kProgramEntryName
static const int kNameOffset
virtual ~GlobalHandlesExtractor()
double TicksToMillis(unsigned ticks) const
OldSpace * old_pointer_space()
static const int kPropertiesOffset
void AfterAllChildrenTraversed(ProfileNode *node)
static const SnapshotObjectId kFirstAvailableObjectId
List< HeapGraphEdge > & edges()
static double TimeCurrentMillis()
HeapSnapshotGenerator(HeapSnapshot *snapshot, v8::ActivityControl *control)
void IterateAllRoots(ObjectVisitor *v)
double GetTotalMillis() const
SnapshotObjectId FindEntry(Address addr)
static const int kMakeHeapIterableMask
bool IsSameAs(CodeEntry *entry) const
void AddPathFromEnd(const Vector< CodeEntry * > &path)
friend class GlobalHandlesExtractor
virtual void SetIndexedReference(HeapGraphEdge::Type type, int parent_entry, int index, HeapEntry *child_entry)=0
static const int kLineEndsOffset
V8HeapExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress)
static const int kElementsOffset
static Vector< T > New(int length)
void RemoveProfile(CpuProfile *profile)
HeapEntry * FindEntry(HeapThing ptr)
static const int kTypeFeedbackInfoOffset
virtual void VisitPointers(Object **start, Object **end)
void IterateAllRootsWithClassIds(ObjectVisitor *v)
static const int kRelocationInfoOffset
static const int kNonWeakFieldsEndOffset
CpuProfile * FilteredClone(int security_token_id)
Vector< const char > CStrVector(const char *data)
int StrLength(const char *string)
static int OffsetOfElementAt(int index)
static JSArray * cast(Object *obj)
static void Print(const char *format,...)
#define T(name, string, precedence)
static const char *const kGarbageCollectorEntryName
INLINE(bool has_current_child())
void AddPathToCurrentProfiles(const Vector< CodeEntry * > &path)
HeapEntry * GetEntryById(SnapshotObjectId id)
virtual HeapEntry * FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)=0
void AddRootEntries(SnapshotFillerInterface *filler)
List< ObjectGroup * > * object_groups()
static int SNPrintF(Vector< char > str, const char *format,...)
void CopyData(const CodeEntry &source)
void UpdateMeasurements(double current_time)
static const unsigned kWallTimeQueryIntervalMs
static const int kMapOffset
void AddPath(const Vector< CodeEntry * > &path)
static const int kFunctionDataOffset
void AddCode(Address addr, CodeEntry *entry, unsigned size)
Handle< String > NewStringFromAscii(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
INLINE(void next_child())
static const int kNormalTypeCacheOffset
void Serialize(v8::OutputStream *stream)
void AddPathFromStart(const Vector< CodeEntry * > &path)
void StopHeapObjectsTracking()
virtual WriteResult WriteAsciiChunk(char *data, int size)=0
void IterateRoots(ObjectVisitor *v, VisitMode mode)
HeapEntry * AddEntry(HeapEntry::Type type, const char *name, SnapshotObjectId id, int size)
void Sort(int(*cmp)(const T *x, const T *y))
bool Remove(const Key &key)
int GetTokenId(Object *token)
static const char *const kAnonymousFunctionName
uint32_t ComputeIntegerHash(uint32_t key, uint32_t seed)
GcSubrootsEnumerator(SnapshotFillerInterface *filler, V8HeapExplorer *explorer)
bool FindGreatestLessThan(const Key &key, Locator *locator)
INLINE(ProfileNode *current_child())
FilteredCloneCallback(ProfileNode *dst_root, int security_token_id)
static const SnapshotObjectId kInternalRootObjectId
virtual ~NativeObjectsExplorer()
HeapSnapshotsCollection()
virtual void VisitEmbedderReference(Object **p, uint16_t class_id)
static JSGlobalPropertyCell * cast(Object *obj)
List< HeapEntry * > * GetSortedEntriesList()
uint32_t SnapshotObjectId
virtual HeapEntry * FindEntry(HeapThing ptr)=0
friend class IndexedReferencesExtractor
void AddNumber(unsigned n)
static const int kInferredNameOffset
static const int kThisPropertyAssignmentsOffset
void SnapshotGenerationFinished(HeapSnapshot *snapshot)
const char * GetCopy(const char *src)
uint32_t HashSequentialString(const schar *chars, int length, uint32_t seed)
void Pair(HeapThing thing, int entry)
HeapEntry * AddGcSubrootEntry(int tag)
void * Remove(void *key, uint32_t hash)
TemplateHashMapImpl< FreeStoreAllocationPolicy > HashMap
void AfterAllChildrenTraversed(ProfileNode *node)
SnapshotObjectId last_assigned_id() const
static const char *const kEmptyNamePrefix
MUST_USE_RESULT MaybeObject * GetProperty(String *key)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
uint32_t capacity() const
static const int kNameOffset
virtual intptr_t GetElementCount()
InstanceType instance_type()
size_t GetUsedMemorySize() const
static const uchar kBadChar
static const int kConstructorOffset
int SortedListBSearch(const List< T > &list, P cmp)
static void StrNCpy(Vector< char > dest, const char *src, size_t n)
Handle< JSGlobalObject > & at(int i)
static FixedArray * cast(Object *obj)
HeapSnapshot * NewSnapshot(HeapSnapshot::Type type, const char *name, unsigned uid)
bool StartProfiling(const char *title, unsigned uid)
void RemoveObjectGroups()
HeapSnapshot * GetSnapshot(unsigned uid)
~HeapSnapshotsCollection()
static const int kBoundFunctionIndex
void RememberLastJSObjectId()
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
static const int kScriptOffset
static const int kPrototypeOffset
void Synchronize(VisitorSynchronization::SyncTag tag)
List< HeapGraphEdge * > & children()
SnapshotObjectId GetObjectId(Address object_addr, int object_size)
virtual void VisitPointers(Object **start, Object **end)
static const int kContextOffset
void MakeWeak(Object **location, void *parameter, WeakReferenceCallback callback)
int EstimateObjectsCount(HeapIterator *iterator)
virtual void EndOfStream()=0
const char * GetVFormatted(const char *format, va_list args)
CpuProfile * StopProfiling(int security_token_id, const char *title, double actual_sampling_rate)
static GlobalObject * cast(Object *obj)
static const int kBoundThisIndex
static const int kConstructStubOffset
void DeleteArray(T *array)
virtual ~NativeGroupRetainedObjectInfo()
static const int kSharedFunctionInfoOffset
static ConsString * cast(Object *obj)
void AfterAllChildrenTraversed(ProfileNode *parent)
static const int kNoLineNumberInfo
static CodeCache * cast(Object *obj)
virtual intptr_t GetSizeInBytes()
virtual HeapEntry * AllocateEntry(HeapThing ptr)
const char * GetName(String *name)
void MoveObject(Address from, Address to)
static const int kCodeCacheOffset
ProfileNode * FindOrAddChild(CodeEntry *entry)
CpuProfile(const char *title, unsigned uid)
static const int kBoundArgumentsStartIndex
HeapSnapshot(HeapSnapshotsCollection *collection, Type type, const char *title, unsigned uid)
bool has_external_callback
virtual const char * GetGroupLabel()
static JSObject * cast(Object *obj)
OldSpace * old_data_space()
Entry * Next(Entry *p) const
static const char *const kTagNames[kNumberOfSyncTags]
void CalculateTotalTicks()
v8::RetainedObjectInfo * info_
virtual intptr_t GetHash()
static v8::internal::Handle< v8::internal::TemplateInfo > OpenHandle(const Template *that)
List< CpuProfile * > * Profiles(int security_token_id)
#define GLOBAL_CONTEXT_FIELDS(V)
static JSGlobalObject * cast(Object *obj)
static JSFunction * cast(Object *obj)