v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
profile-generator.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "profile-generator-inl.h"
31 
32 #include "global-handles.h"
33 #include "heap-profiler.h"
34 #include "scopeinfo.h"
35 #include "unicode.h"
36 #include "zone-inl.h"
37 #include "debug.h"
38 
39 namespace v8 {
40 namespace internal {
41 
42 
44  : token_locations_(4),
45  token_removed_(4) {
46 }
47 
48 
50  Isolate* isolate = Isolate::Current();
51  for (int i = 0; i < token_locations_.length(); ++i) {
52  if (!token_removed_[i]) {
53  isolate->global_handles()->ClearWeakness(token_locations_[i]);
54  isolate->global_handles()->Destroy(token_locations_[i]);
55  }
56  }
57 }
58 
59 
61  Isolate* isolate = Isolate::Current();
62  if (token == NULL) return TokenEnumerator::kNoSecurityToken;
63  for (int i = 0; i < token_locations_.length(); ++i) {
64  if (*token_locations_[i] == token && !token_removed_[i]) return i;
65  }
66  Handle<Object> handle = isolate->global_handles()->Create(token);
67  // handle.location() points to a memory cell holding a pointer
68  // to a token object in the V8's heap.
69  isolate->global_handles()->MakeWeak(handle.location(), this,
70  TokenRemovedCallback);
71  token_locations_.Add(handle.location());
72  token_removed_.Add(false);
73  return token_locations_.length() - 1;
74 }
75 
76 
77 void TokenEnumerator::TokenRemovedCallback(v8::Persistent<v8::Value> handle,
78  void* parameter) {
79  reinterpret_cast<TokenEnumerator*>(parameter)->TokenRemoved(
80  Utils::OpenHandle(*handle).location());
81  handle.Dispose();
82 }
83 
84 
85 void TokenEnumerator::TokenRemoved(Object** token_location) {
86  for (int i = 0; i < token_locations_.length(); ++i) {
87  if (token_locations_[i] == token_location && !token_removed_[i]) {
88  token_removed_[i] = true;
89  return;
90  }
91  }
92 }
93 
94 
96  : names_(StringsMatch) {
97 }
98 
99 
101  for (HashMap::Entry* p = names_.Start();
102  p != NULL;
103  p = names_.Next(p)) {
104  DeleteArray(reinterpret_cast<const char*>(p->value));
105  }
106 }
107 
108 
109 const char* StringsStorage::GetCopy(const char* src) {
110  int len = static_cast<int>(strlen(src));
111  Vector<char> dst = Vector<char>::New(len + 1);
112  OS::StrNCpy(dst, src, len);
113  dst[len] = '\0';
114  uint32_t hash =
115  HashSequentialString(dst.start(), len, HEAP->HashSeed());
116  return AddOrDisposeString(dst.start(), hash);
117 }
118 
119 
120 const char* StringsStorage::GetFormatted(const char* format, ...) {
121  va_list args;
122  va_start(args, format);
123  const char* result = GetVFormatted(format, args);
124  va_end(args);
125  return result;
126 }
127 
128 
129 const char* StringsStorage::AddOrDisposeString(char* str, uint32_t hash) {
130  HashMap::Entry* cache_entry = names_.Lookup(str, hash, true);
131  if (cache_entry->value == NULL) {
132  // New entry added.
133  cache_entry->value = str;
134  } else {
135  DeleteArray(str);
136  }
137  return reinterpret_cast<const char*>(cache_entry->value);
138 }
139 
140 
141 const char* StringsStorage::GetVFormatted(const char* format, va_list args) {
142  Vector<char> str = Vector<char>::New(1024);
143  int len = OS::VSNPrintF(str, format, args);
144  if (len == -1) {
145  DeleteArray(str.start());
146  return format;
147  }
148  uint32_t hash = HashSequentialString(
149  str.start(), len, HEAP->HashSeed());
150  return AddOrDisposeString(str.start(), hash);
151 }
152 
153 
154 const char* StringsStorage::GetName(String* name) {
155  if (name->IsString()) {
156  int length = Min(kMaxNameSize, name->length());
159  uint32_t hash =
160  HashSequentialString(*data, length, name->GetHeap()->HashSeed());
161  return AddOrDisposeString(data.Detach(), hash);
162  }
163  return "";
164 }
165 
166 
167 const char* StringsStorage::GetName(int index) {
168  return GetFormatted("%d", index);
169 }
170 
171 
173  size_t size = sizeof(*this);
174  size += sizeof(HashMap::Entry) * names_.capacity();
175  for (HashMap::Entry* p = names_.Start(); p != NULL; p = names_.Next(p)) {
176  size += strlen(reinterpret_cast<const char*>(p->value)) + 1;
177  }
178  return size;
179 }
180 
181 const char* const CodeEntry::kEmptyNamePrefix = "";
182 
183 
184 void CodeEntry::CopyData(const CodeEntry& source) {
185  tag_ = source.tag_;
186  name_prefix_ = source.name_prefix_;
187  name_ = source.name_;
188  resource_name_ = source.resource_name_;
189  line_number_ = source.line_number_;
190 }
191 
192 
193 uint32_t CodeEntry::GetCallUid() const {
194  uint32_t hash = ComputeIntegerHash(tag_, v8::internal::kZeroHashSeed);
195  if (shared_id_ != 0) {
196  hash ^= ComputeIntegerHash(static_cast<uint32_t>(shared_id_),
197  v8::internal::kZeroHashSeed);
198  } else {
199  hash ^= ComputeIntegerHash(
200  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_prefix_)),
201  v8::internal::kZeroHashSeed);
202  hash ^= ComputeIntegerHash(
203  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_)),
204  v8::internal::kZeroHashSeed);
205  hash ^= ComputeIntegerHash(
206  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(resource_name_)),
207  v8::internal::kZeroHashSeed);
208  hash ^= ComputeIntegerHash(line_number_, v8::internal::kZeroHashSeed);
209  }
210  return hash;
211 }
212 
213 
214 bool CodeEntry::IsSameAs(CodeEntry* entry) const {
215  return this == entry
216  || (tag_ == entry->tag_
217  && shared_id_ == entry->shared_id_
218  && (shared_id_ != 0
219  || (name_prefix_ == entry->name_prefix_
220  && name_ == entry->name_
221  && resource_name_ == entry->resource_name_
222  && line_number_ == entry->line_number_)));
223 }
224 
225 
227  HashMap::Entry* map_entry =
228  children_.Lookup(entry, CodeEntryHash(entry), false);
229  return map_entry != NULL ?
230  reinterpret_cast<ProfileNode*>(map_entry->value) : NULL;
231 }
232 
233 
235  HashMap::Entry* map_entry =
236  children_.Lookup(entry, CodeEntryHash(entry), true);
237  if (map_entry->value == NULL) {
238  // New node added.
239  ProfileNode* new_node = new ProfileNode(tree_, entry);
240  map_entry->value = new_node;
241  children_list_.Add(new_node);
242  }
243  return reinterpret_cast<ProfileNode*>(map_entry->value);
244 }
245 
246 
248  return tree_->TicksToMillis(self_ticks_);
249 }
250 
251 
253  return tree_->TicksToMillis(total_ticks_);
254 }
255 
256 
257 void ProfileNode::Print(int indent) {
258  OS::Print("%5u %5u %*c %s%s [%d]",
259  total_ticks_, self_ticks_,
260  indent, ' ',
261  entry_->name_prefix(),
262  entry_->name(),
263  entry_->security_token_id());
264  if (entry_->resource_name()[0] != '\0')
265  OS::Print(" %s:%d", entry_->resource_name(), entry_->line_number());
266  OS::Print("\n");
267  for (HashMap::Entry* p = children_.Start();
268  p != NULL;
269  p = children_.Next(p)) {
270  reinterpret_cast<ProfileNode*>(p->value)->Print(indent + 2);
271  }
272 }
273 
274 
276  public:
278 
280  delete node;
281  }
282 
284 };
285 
286 
288  : root_entry_(Logger::FUNCTION_TAG,
289  "",
290  "(root)",
291  "",
292  0,
293  TokenEnumerator::kNoSecurityToken),
294  root_(new ProfileNode(this, &root_entry_)) {
295 }
296 
297 
300  TraverseDepthFirst(&cb);
301 }
302 
303 
305  ProfileNode* node = root_;
306  for (CodeEntry** entry = path.start() + path.length() - 1;
307  entry != path.start() - 1;
308  --entry) {
309  if (*entry != NULL) {
310  node = node->FindOrAddChild(*entry);
311  }
312  }
313  node->IncrementSelfTicks();
314 }
315 
316 
318  ProfileNode* node = root_;
319  for (CodeEntry** entry = path.start();
320  entry != path.start() + path.length();
321  ++entry) {
322  if (*entry != NULL) {
323  node = node->FindOrAddChild(*entry);
324  }
325  }
326  node->IncrementSelfTicks();
327 }
328 
329 
330 struct NodesPair {
332  : src(src), dst(dst) { }
335 };
336 
337 
339  public:
340  FilteredCloneCallback(ProfileNode* dst_root, int security_token_id)
341  : stack_(10),
342  security_token_id_(security_token_id) {
343  stack_.Add(NodesPair(NULL, dst_root));
344  }
345 
347  if (IsTokenAcceptable(child->entry()->security_token_id(),
348  parent->entry()->security_token_id())) {
349  ProfileNode* clone = stack_.last().dst->FindOrAddChild(child->entry());
350  clone->IncreaseSelfTicks(child->self_ticks());
351  stack_.Add(NodesPair(child, clone));
352  } else {
353  // Attribute ticks to parent node.
354  stack_.last().dst->IncreaseSelfTicks(child->self_ticks());
355  }
356  }
357 
359 
361  if (stack_.last().src == child) {
362  stack_.RemoveLast();
363  }
364  }
365 
366  private:
367  bool IsTokenAcceptable(int token, int parent_token) {
369  || token == security_token_id_) return true;
372  return parent_token == TokenEnumerator::kNoSecurityToken
373  || parent_token == security_token_id_;
374  }
375  return false;
376  }
377 
378  List<NodesPair> stack_;
379  int security_token_id_;
380 };
381 
382 void ProfileTree::FilteredClone(ProfileTree* src, int security_token_id) {
383  ms_to_ticks_scale_ = src->ms_to_ticks_scale_;
384  FilteredCloneCallback cb(root_, security_token_id);
385  src->TraverseDepthFirst(&cb);
387 }
388 
389 
390 void ProfileTree::SetTickRatePerMs(double ticks_per_ms) {
391  ms_to_ticks_scale_ = ticks_per_ms > 0 ? 1.0 / ticks_per_ms : 1.0;
392 }
393 
394 
395 class Position {
396  public:
398  : node(node), child_idx_(0) { }
399  INLINE(ProfileNode* current_child()) {
400  return node->children()->at(child_idx_);
401  }
402  INLINE(bool has_current_child()) {
403  return child_idx_ < node->children()->length();
404  }
405  INLINE(void next_child()) { ++child_idx_; }
406 
408  private:
409  int child_idx_;
410 };
411 
412 
413 // Non-recursive implementation of a depth-first post-order tree traversal.
414 template <typename Callback>
415 void ProfileTree::TraverseDepthFirst(Callback* callback) {
416  List<Position> stack(10);
417  stack.Add(Position(root_));
418  while (stack.length() > 0) {
419  Position& current = stack.last();
420  if (current.has_current_child()) {
421  callback->BeforeTraversingChild(current.node, current.current_child());
422  stack.Add(Position(current.current_child()));
423  } else {
424  callback->AfterAllChildrenTraversed(current.node);
425  if (stack.length() > 1) {
426  Position& parent = stack[stack.length() - 2];
427  callback->AfterChildTraversed(parent.node, current.node);
428  parent.next_child();
429  }
430  // Remove child from the stack.
431  stack.RemoveLast();
432  }
433  }
434 }
435 
436 
438  public:
440 
442  node->IncreaseTotalTicks(node->self_ticks());
443  }
444 
446  parent->IncreaseTotalTicks(child->total_ticks());
447  }
448 };
449 
450 
453  TraverseDepthFirst(&cb);
454 }
455 
456 
458  OS::Print("root: %u %u %.2fms %.2fms\n",
459  root_->total_ticks(), root_->self_ticks(),
460  root_->GetTotalMillis(), root_->GetSelfMillis());
461 }
462 
463 
465  top_down_.AddPathFromEnd(path);
466  bottom_up_.AddPathFromStart(path);
467 }
468 
469 
471  top_down_.CalculateTotalTicks();
472  bottom_up_.CalculateTotalTicks();
473 }
474 
475 
476 void CpuProfile::SetActualSamplingRate(double actual_sampling_rate) {
477  top_down_.SetTickRatePerMs(actual_sampling_rate);
478  bottom_up_.SetTickRatePerMs(actual_sampling_rate);
479 }
480 
481 
482 CpuProfile* CpuProfile::FilteredClone(int security_token_id) {
483  ASSERT(security_token_id != TokenEnumerator::kNoSecurityToken);
484  CpuProfile* clone = new CpuProfile(title_, uid_);
485  clone->top_down_.FilteredClone(&top_down_, security_token_id);
486  clone->bottom_up_.FilteredClone(&bottom_up_, security_token_id);
487  return clone;
488 }
489 
490 
492  OS::Print("top down ");
493  top_down_.ShortPrint();
494  OS::Print("bottom up ");
495  bottom_up_.ShortPrint();
496 }
497 
498 
500  OS::Print("[Top down]:\n");
501  top_down_.Print();
502  OS::Print("[Bottom up]:\n");
503  bottom_up_.Print();
504 }
505 
506 
507 CodeEntry* const CodeMap::kSharedFunctionCodeEntry = NULL;
508 const CodeMap::CodeTreeConfig::Key CodeMap::CodeTreeConfig::kNoKey = NULL;
509 
510 
511 void CodeMap::AddCode(Address addr, CodeEntry* entry, unsigned size) {
512  DeleteAllCoveredCode(addr, addr + size);
513  CodeTree::Locator locator;
514  tree_.Insert(addr, &locator);
515  locator.set_value(CodeEntryInfo(entry, size));
516 }
517 
518 
519 void CodeMap::DeleteAllCoveredCode(Address start, Address end) {
520  List<Address> to_delete;
521  Address addr = end - 1;
522  while (addr >= start) {
523  CodeTree::Locator locator;
524  if (!tree_.FindGreatestLessThan(addr, &locator)) break;
525  Address start2 = locator.key(), end2 = start2 + locator.value().size;
526  if (start2 < end && start < end2) to_delete.Add(start2);
527  addr = start2 - 1;
528  }
529  for (int i = 0; i < to_delete.length(); ++i) tree_.Remove(to_delete[i]);
530 }
531 
532 
534  CodeTree::Locator locator;
535  if (tree_.FindGreatestLessThan(addr, &locator)) {
536  // locator.key() <= addr. Need to check that addr is within entry.
537  const CodeEntryInfo& entry = locator.value();
538  if (addr < (locator.key() + entry.size))
539  return entry.entry;
540  }
541  return NULL;
542 }
543 
544 
546  CodeTree::Locator locator;
547  // For shared function entries, 'size' field is used to store their IDs.
548  if (tree_.Find(addr, &locator)) {
549  const CodeEntryInfo& entry = locator.value();
550  ASSERT(entry.entry == kSharedFunctionCodeEntry);
551  return entry.size;
552  } else {
553  tree_.Insert(addr, &locator);
554  int id = next_shared_id_++;
555  locator.set_value(CodeEntryInfo(kSharedFunctionCodeEntry, id));
556  return id;
557  }
558 }
559 
560 
562  if (from == to) return;
563  CodeTree::Locator locator;
564  if (!tree_.Find(from, &locator)) return;
565  CodeEntryInfo entry = locator.value();
566  tree_.Remove(from);
567  AddCode(to, entry.entry, entry.size);
568 }
569 
570 
571 void CodeMap::CodeTreePrinter::Call(
572  const Address& key, const CodeMap::CodeEntryInfo& value) {
573  OS::Print("%p %5d %s\n", key, value.size, value.entry->name());
574 }
575 
576 
578  CodeTreePrinter printer;
579  tree_.ForEach(&printer);
580 }
581 
582 
584  : profiles_uids_(UidsMatch),
585  current_profiles_semaphore_(OS::CreateSemaphore(1)) {
586  // Create list of unabridged profiles.
587  profiles_by_token_.Add(new List<CpuProfile*>());
588 }
589 
590 
591 static void DeleteCodeEntry(CodeEntry** entry_ptr) {
592  delete *entry_ptr;
593 }
594 
595 static void DeleteCpuProfile(CpuProfile** profile_ptr) {
596  delete *profile_ptr;
597 }
598 
599 static void DeleteProfilesList(List<CpuProfile*>** list_ptr) {
600  if (*list_ptr != NULL) {
601  (*list_ptr)->Iterate(DeleteCpuProfile);
602  delete *list_ptr;
603  }
604 }
605 
607  delete current_profiles_semaphore_;
608  current_profiles_.Iterate(DeleteCpuProfile);
609  detached_profiles_.Iterate(DeleteCpuProfile);
610  profiles_by_token_.Iterate(DeleteProfilesList);
611  code_entries_.Iterate(DeleteCodeEntry);
612 }
613 
614 
615 bool CpuProfilesCollection::StartProfiling(const char* title, unsigned uid) {
616  ASSERT(uid > 0);
617  current_profiles_semaphore_->Wait();
618  if (current_profiles_.length() >= kMaxSimultaneousProfiles) {
619  current_profiles_semaphore_->Signal();
620  return false;
621  }
622  for (int i = 0; i < current_profiles_.length(); ++i) {
623  if (strcmp(current_profiles_[i]->title(), title) == 0) {
624  // Ignore attempts to start profile with the same title.
625  current_profiles_semaphore_->Signal();
626  return false;
627  }
628  }
629  current_profiles_.Add(new CpuProfile(title, uid));
630  current_profiles_semaphore_->Signal();
631  return true;
632 }
633 
634 
635 bool CpuProfilesCollection::StartProfiling(String* title, unsigned uid) {
636  return StartProfiling(GetName(title), uid);
637 }
638 
639 
641  const char* title,
642  double actual_sampling_rate) {
643  const int title_len = StrLength(title);
644  CpuProfile* profile = NULL;
645  current_profiles_semaphore_->Wait();
646  for (int i = current_profiles_.length() - 1; i >= 0; --i) {
647  if (title_len == 0 || strcmp(current_profiles_[i]->title(), title) == 0) {
648  profile = current_profiles_.Remove(i);
649  break;
650  }
651  }
652  current_profiles_semaphore_->Signal();
653 
654  if (profile != NULL) {
655  profile->CalculateTotalTicks();
656  profile->SetActualSamplingRate(actual_sampling_rate);
657  List<CpuProfile*>* unabridged_list =
658  profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
659  unabridged_list->Add(profile);
660  HashMap::Entry* entry =
661  profiles_uids_.Lookup(reinterpret_cast<void*>(profile->uid()),
662  static_cast<uint32_t>(profile->uid()),
663  true);
664  ASSERT(entry->value == NULL);
665  entry->value = reinterpret_cast<void*>(unabridged_list->length() - 1);
666  return GetProfile(security_token_id, profile->uid());
667  }
668  return NULL;
669 }
670 
671 
673  unsigned uid) {
674  int index = GetProfileIndex(uid);
675  if (index < 0) return NULL;
676  List<CpuProfile*>* unabridged_list =
677  profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
678  if (security_token_id == TokenEnumerator::kNoSecurityToken) {
679  return unabridged_list->at(index);
680  }
681  List<CpuProfile*>* list = GetProfilesList(security_token_id);
682  if (list->at(index) == NULL) {
683  (*list)[index] =
684  unabridged_list->at(index)->FilteredClone(security_token_id);
685  }
686  return list->at(index);
687 }
688 
689 
690 int CpuProfilesCollection::GetProfileIndex(unsigned uid) {
691  HashMap::Entry* entry = profiles_uids_.Lookup(reinterpret_cast<void*>(uid),
692  static_cast<uint32_t>(uid),
693  false);
694  return entry != NULL ?
695  static_cast<int>(reinterpret_cast<intptr_t>(entry->value)) : -1;
696 }
697 
698 
699 bool CpuProfilesCollection::IsLastProfile(const char* title) {
700  // Called from VM thread, and only it can mutate the list,
701  // so no locking is needed here.
702  if (current_profiles_.length() != 1) return false;
703  return StrLength(title) == 0
704  || strcmp(current_profiles_[0]->title(), title) == 0;
705 }
706 
707 
709  // Called from VM thread for a completed profile.
710  unsigned uid = profile->uid();
711  int index = GetProfileIndex(uid);
712  if (index < 0) {
713  detached_profiles_.RemoveElement(profile);
714  return;
715  }
716  profiles_uids_.Remove(reinterpret_cast<void*>(uid),
717  static_cast<uint32_t>(uid));
718  // Decrement all indexes above the deleted one.
719  for (HashMap::Entry* p = profiles_uids_.Start();
720  p != NULL;
721  p = profiles_uids_.Next(p)) {
722  intptr_t p_index = reinterpret_cast<intptr_t>(p->value);
723  if (p_index > index) {
724  p->value = reinterpret_cast<void*>(p_index - 1);
725  }
726  }
727  for (int i = 0; i < profiles_by_token_.length(); ++i) {
728  List<CpuProfile*>* list = profiles_by_token_[i];
729  if (list != NULL && index < list->length()) {
730  // Move all filtered clones into detached_profiles_,
731  // so we can know that they are still in use.
732  CpuProfile* cloned_profile = list->Remove(index);
733  if (cloned_profile != NULL && cloned_profile != profile) {
734  detached_profiles_.Add(cloned_profile);
735  }
736  }
737  }
738 }
739 
740 
741 int CpuProfilesCollection::TokenToIndex(int security_token_id) {
743  return security_token_id + 1; // kNoSecurityToken -> 0, 0 -> 1, ...
744 }
745 
746 
747 List<CpuProfile*>* CpuProfilesCollection::GetProfilesList(
748  int security_token_id) {
749  const int index = TokenToIndex(security_token_id);
750  const int lists_to_add = index - profiles_by_token_.length() + 1;
751  if (lists_to_add > 0) profiles_by_token_.AddBlock(NULL, lists_to_add);
752  List<CpuProfile*>* unabridged_list =
753  profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
754  const int current_count = unabridged_list->length();
755  if (profiles_by_token_[index] == NULL) {
756  profiles_by_token_[index] = new List<CpuProfile*>(current_count);
757  }
758  List<CpuProfile*>* list = profiles_by_token_[index];
759  const int profiles_to_add = current_count - list->length();
760  if (profiles_to_add > 0) list->AddBlock(NULL, profiles_to_add);
761  return list;
762 }
763 
764 
766  List<CpuProfile*>* unabridged_list =
767  profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
768  if (security_token_id == TokenEnumerator::kNoSecurityToken) {
769  return unabridged_list;
770  }
771  List<CpuProfile*>* list = GetProfilesList(security_token_id);
772  const int current_count = unabridged_list->length();
773  for (int i = 0; i < current_count; ++i) {
774  if (list->at(i) == NULL) {
775  (*list)[i] = unabridged_list->at(i)->FilteredClone(security_token_id);
776  }
777  }
778  return list;
779 }
780 
781 
783  String* name,
784  String* resource_name,
785  int line_number) {
786  CodeEntry* entry = new CodeEntry(tag,
788  GetFunctionName(name),
789  GetName(resource_name),
790  line_number,
792  code_entries_.Add(entry);
793  return entry;
794 }
795 
796 
798  const char* name) {
799  CodeEntry* entry = new CodeEntry(tag,
801  GetFunctionName(name),
802  "",
805  code_entries_.Add(entry);
806  return entry;
807 }
808 
809 
811  const char* name_prefix,
812  String* name) {
813  CodeEntry* entry = new CodeEntry(tag,
814  name_prefix,
815  GetName(name),
816  "",
819  code_entries_.Add(entry);
820  return entry;
821 }
822 
823 
825  int args_count) {
826  CodeEntry* entry = new CodeEntry(tag,
827  "args_count: ",
828  GetName(args_count),
829  "",
832  code_entries_.Add(entry);
833  return entry;
834 }
835 
836 
838  const Vector<CodeEntry*>& path) {
839  // As starting / stopping profiles is rare relatively to this
840  // method, we don't bother minimizing the duration of lock holding,
841  // e.g. copying contents of the list to a local vector.
842  current_profiles_semaphore_->Wait();
843  for (int i = 0; i < current_profiles_.length(); ++i) {
844  current_profiles_[i]->AddPath(path);
845  }
846  current_profiles_semaphore_->Signal();
847 }
848 
849 
851  if (--wall_time_query_countdown_ == 0)
853 }
854 
855 
856 void SampleRateCalculator::UpdateMeasurements(double current_time) {
857  if (measurements_count_++ != 0) {
858  const double measured_ticks_per_ms =
859  (kWallTimeQueryIntervalMs * ticks_per_ms_) /
860  (current_time - last_wall_time_);
861  // Update the average value.
862  ticks_per_ms_ +=
863  (measured_ticks_per_ms - ticks_per_ms_) / measurements_count_;
864  // Update the externally accessible result.
865  result_ = static_cast<AtomicWord>(ticks_per_ms_ * kResultScale);
866  }
867  last_wall_time_ = current_time;
868  wall_time_query_countdown_ =
869  static_cast<unsigned>(kWallTimeQueryIntervalMs * ticks_per_ms_);
870 }
871 
872 
874  "(anonymous function)";
875 const char* const ProfileGenerator::kProgramEntryName =
876  "(program)";
878  "(garbage collector)";
879 
880 
882  : profiles_(profiles),
883  program_entry_(
884  profiles->NewCodeEntry(Logger::FUNCTION_TAG, kProgramEntryName)),
885  gc_entry_(
886  profiles->NewCodeEntry(Logger::BUILTIN_TAG,
887  kGarbageCollectorEntryName)) {
888 }
889 
890 
892  // Allocate space for stack frames + pc + function + vm-state.
893  ScopedVector<CodeEntry*> entries(sample.frames_count + 3);
894  // As actual number of decoded code entries may vary, initialize
895  // entries vector with NULL values.
896  CodeEntry** entry = entries.start();
897  memset(entry, 0, entries.length() * sizeof(*entry));
898  if (sample.pc != NULL) {
899  *entry++ = code_map_.FindEntry(sample.pc);
900 
901  if (sample.has_external_callback) {
902  // Don't use PC when in external callback code, as it can point
903  // inside callback's code, and we will erroneously report
904  // that a callback calls itself.
905  *(entries.start()) = NULL;
906  *entry++ = code_map_.FindEntry(sample.external_callback);
907  } else if (sample.tos != NULL) {
908  // Find out, if top of stack was pointing inside a JS function
909  // meaning that we have encountered a frameless invocation.
910  *entry = code_map_.FindEntry(sample.tos);
911  if (*entry != NULL && !(*entry)->is_js_function()) {
912  *entry = NULL;
913  }
914  entry++;
915  }
916 
917  for (const Address* stack_pos = sample.stack,
918  *stack_end = stack_pos + sample.frames_count;
919  stack_pos != stack_end;
920  ++stack_pos) {
921  *entry++ = code_map_.FindEntry(*stack_pos);
922  }
923  }
924 
925  if (FLAG_prof_browser_mode) {
926  bool no_symbolized_entries = true;
927  for (CodeEntry** e = entries.start(); e != entry; ++e) {
928  if (*e != NULL) {
929  no_symbolized_entries = false;
930  break;
931  }
932  }
933  // If no frames were symbolized, put the VM state entry in.
934  if (no_symbolized_entries) {
935  *entry++ = EntryForVMState(sample.state);
936  }
937  }
938 
939  profiles_->AddPathToCurrentProfiles(entries);
940 }
941 
942 
943 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
944  : type_(type),
945  from_index_(from),
946  to_index_(to),
947  name_(name) {
948  ASSERT(type == kContextVariable
949  || type == kProperty
950  || type == kInternal
951  || type == kShortcut);
952 }
953 
954 
955 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
956  : type_(type),
957  from_index_(from),
958  to_index_(to),
959  index_(index) {
960  ASSERT(type == kElement || type == kHidden || type == kWeak);
961 }
962 
963 
964 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
965  to_entry_ = &snapshot->entries()[to_index_];
966 }
967 
968 
969 const int HeapEntry::kNoEntry = -1;
970 
971 HeapEntry::HeapEntry(HeapSnapshot* snapshot,
972  Type type,
973  const char* name,
974  SnapshotObjectId id,
975  int self_size)
976  : type_(type),
977  children_count_(0),
978  children_index_(-1),
979  self_size_(self_size),
980  id_(id),
981  snapshot_(snapshot),
982  name_(name) { }
983 
984 
985 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
986  const char* name,
987  HeapEntry* entry) {
988  HeapGraphEdge edge(type, name, this->index(), entry->index());
989  snapshot_->edges().Add(edge);
990  ++children_count_;
991 }
992 
993 
994 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
995  int index,
996  HeapEntry* entry) {
997  HeapGraphEdge edge(type, index, this->index(), entry->index());
998  snapshot_->edges().Add(edge);
999  ++children_count_;
1000 }
1001 
1002 
1003 Handle<HeapObject> HeapEntry::GetHeapObject() {
1004  return snapshot_->collection()->FindHeapObjectById(id());
1005 }
1006 
1007 
1008 void HeapEntry::Print(
1009  const char* prefix, const char* edge_name, int max_depth, int indent) {
1010  STATIC_CHECK(sizeof(unsigned) == sizeof(id()));
1011  OS::Print("%6d @%6u %*c %s%s: ",
1012  self_size(), id(), indent, ' ', prefix, edge_name);
1013  if (type() != kString) {
1014  OS::Print("%s %.40s\n", TypeAsString(), name_);
1015  } else {
1016  OS::Print("\"");
1017  const char* c = name_;
1018  while (*c && (c - name_) <= 40) {
1019  if (*c != '\n')
1020  OS::Print("%c", *c);
1021  else
1022  OS::Print("\\n");
1023  ++c;
1024  }
1025  OS::Print("\"\n");
1026  }
1027  if (--max_depth == 0) return;
1028  Vector<HeapGraphEdge*> ch = children();
1029  for (int i = 0; i < ch.length(); ++i) {
1030  HeapGraphEdge& edge = *ch[i];
1031  const char* edge_prefix = "";
1032  EmbeddedVector<char, 64> index;
1033  const char* edge_name = index.start();
1034  switch (edge.type()) {
1036  edge_prefix = "#";
1037  edge_name = edge.name();
1038  break;
1040  OS::SNPrintF(index, "%d", edge.index());
1041  break;
1043  edge_prefix = "$";
1044  edge_name = edge.name();
1045  break;
1047  edge_name = edge.name();
1048  break;
1050  edge_prefix = "$";
1051  OS::SNPrintF(index, "%d", edge.index());
1052  break;
1054  edge_prefix = "^";
1055  edge_name = edge.name();
1056  break;
1057  case HeapGraphEdge::kWeak:
1058  edge_prefix = "w";
1059  OS::SNPrintF(index, "%d", edge.index());
1060  break;
1061  default:
1062  OS::SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
1063  }
1064  edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
1065  }
1066 }
1067 
1068 
1069 const char* HeapEntry::TypeAsString() {
1070  switch (type()) {
1071  case kHidden: return "/hidden/";
1072  case kObject: return "/object/";
1073  case kClosure: return "/closure/";
1074  case kString: return "/string/";
1075  case kCode: return "/code/";
1076  case kArray: return "/array/";
1077  case kRegExp: return "/regexp/";
1078  case kHeapNumber: return "/number/";
1079  case kNative: return "/native/";
1080  case kSynthetic: return "/synthetic/";
1081  default: return "???";
1082  }
1083 }
1084 
1085 
1086 // It is very important to keep objects that form a heap snapshot
1087 // as small as possible.
1088 namespace { // Avoid littering the global namespace.
1089 
1090 template <size_t ptr_size> struct SnapshotSizeConstants;
1091 
1092 template <> struct SnapshotSizeConstants<4> {
1093  static const int kExpectedHeapGraphEdgeSize = 12;
1094  static const int kExpectedHeapEntrySize = 24;
1095  static const int kExpectedHeapSnapshotsCollectionSize = 96;
1096  static const int kExpectedHeapSnapshotSize = 136;
1097  static const size_t kMaxSerializableSnapshotRawSize = 256 * MB;
1098 };
1099 
1100 template <> struct SnapshotSizeConstants<8> {
1101  static const int kExpectedHeapGraphEdgeSize = 24;
1102  static const int kExpectedHeapEntrySize = 32;
1103  static const int kExpectedHeapSnapshotsCollectionSize = 144;
1104  static const int kExpectedHeapSnapshotSize = 168;
1105  static const uint64_t kMaxSerializableSnapshotRawSize =
1106  static_cast<uint64_t>(6000) * MB;
1107 };
1108 
1109 } // namespace
1110 
1112  HeapSnapshot::Type type,
1113  const char* title,
1114  unsigned uid)
1115  : collection_(collection),
1116  type_(type),
1117  title_(title),
1118  uid_(uid),
1119  root_index_(HeapEntry::kNoEntry),
1120  gc_roots_index_(HeapEntry::kNoEntry),
1121  natives_root_index_(HeapEntry::kNoEntry),
1122  max_snapshot_js_object_id_(0) {
1123  STATIC_CHECK(
1124  sizeof(HeapGraphEdge) ==
1125  SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
1126  STATIC_CHECK(
1127  sizeof(HeapEntry) ==
1128  SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
1129  for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
1130  gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
1131  }
1132 }
1133 
1134 
1136  collection_->RemoveSnapshot(this);
1137  delete this;
1138 }
1139 
1140 
1142  max_snapshot_js_object_id_ = collection_->last_assigned_id();
1143 }
1144 
1145 
1147  ASSERT(root_index_ == HeapEntry::kNoEntry);
1148  ASSERT(entries_.is_empty()); // Root entry must be the first one.
1149  HeapEntry* entry = AddEntry(HeapEntry::kObject,
1150  "",
1152  0);
1153  root_index_ = entry->index();
1154  ASSERT(root_index_ == 0);
1155  return entry;
1156 }
1157 
1158 
1160  ASSERT(gc_roots_index_ == HeapEntry::kNoEntry);
1161  HeapEntry* entry = AddEntry(HeapEntry::kObject,
1162  "(GC roots)",
1164  0);
1165  gc_roots_index_ = entry->index();
1166  return entry;
1167 }
1168 
1169 
1170 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) {
1171  ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
1173  HeapEntry* entry = AddEntry(
1174  HeapEntry::kObject,
1177  0);
1178  gc_subroot_indexes_[tag] = entry->index();
1179  return entry;
1180 }
1181 
1182 
1183 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
1184  const char* name,
1185  SnapshotObjectId id,
1186  int size) {
1187  HeapEntry entry(this, type, name, id, size);
1188  entries_.Add(entry);
1189  return &entries_.last();
1190 }
1191 
1192 
1194  ASSERT(children().is_empty());
1195  children().Allocate(edges().length());
1196  int children_index = 0;
1197  for (int i = 0; i < entries().length(); ++i) {
1198  HeapEntry* entry = &entries()[i];
1199  children_index = entry->set_children_index(children_index);
1200  }
1201  ASSERT(edges().length() == children_index);
1202  for (int i = 0; i < edges().length(); ++i) {
1203  HeapGraphEdge* edge = &edges()[i];
1204  edge->ReplaceToIndexWithEntry(this);
1205  edge->from()->add_child(edge);
1206  }
1207 }
1208 
1209 
1211  public:
1212  explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
1213  int operator()(HeapEntry* const* entry) {
1214  if ((*entry)->id() == id_) return 0;
1215  return (*entry)->id() < id_ ? -1 : 1;
1216  }
1217  private:
1218  SnapshotObjectId id_;
1219 };
1220 
1221 
1223  List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
1224  // Perform a binary search by id.
1225  int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
1226  if (index == -1)
1227  return NULL;
1228  return entries_by_id->at(index);
1229 }
1230 
1231 
1232 template<class T>
1233 static int SortByIds(const T* entry1_ptr,
1234  const T* entry2_ptr) {
1235  if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
1236  return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
1237 }
1238 
1239 
1241  if (sorted_entries_.is_empty()) {
1242  sorted_entries_.Allocate(entries_.length());
1243  for (int i = 0; i < entries_.length(); ++i) {
1244  sorted_entries_[i] = &entries_[i];
1245  }
1246  sorted_entries_.Sort(SortByIds);
1247  }
1248  return &sorted_entries_;
1249 }
1250 
1251 
1252 void HeapSnapshot::Print(int max_depth) {
1253  root()->Print("", "", max_depth, 0);
1254 }
1255 
1256 
1257 template<typename T, class P>
1258 static size_t GetMemoryUsedByList(const List<T, P>& list) {
1259  return list.length() * sizeof(T) + sizeof(list);
1260 }
1261 
1262 
1264  STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::kExpectedHeapSnapshotSize ==
1265  sizeof(HeapSnapshot)); // NOLINT
1266  return
1267  sizeof(*this) +
1268  GetMemoryUsedByList(entries_) +
1269  GetMemoryUsedByList(edges_) +
1270  GetMemoryUsedByList(children_) +
1271  GetMemoryUsedByList(sorted_entries_);
1272 }
1273 
1274 
1275 // We split IDs on evens for embedder objects (see
1276 // HeapObjectsMap::GenerateId) and odds for native objects.
1285 
1287  : next_id_(kFirstAvailableObjectId),
1288  entries_map_(AddressesMatch) {
1289  // This dummy element solves a problem with entries_map_.
1290  // When we do lookup in HashMap we see no difference between two cases:
1291  // it has an entry with NULL as the value or it has created
1292  // a new entry on the fly with NULL as the default value.
1293  // With such dummy element we have a guaranty that all entries_map_ entries
1294  // will have the value field grater than 0.
1295  // This fact is using in MoveObject method.
1296  entries_.Add(EntryInfo(0, NULL, 0));
1297 }
1298 
1299 
1301  RemoveDeadEntries();
1302 }
1303 
1304 
1306  ASSERT(to != NULL);
1307  ASSERT(from != NULL);
1308  if (from == to) return;
1309  void* from_value = entries_map_.Remove(from, AddressHash(from));
1310  if (from_value == NULL) return;
1311  int from_entry_info_index =
1312  static_cast<int>(reinterpret_cast<intptr_t>(from_value));
1313  entries_.at(from_entry_info_index).addr = to;
1314  HashMap::Entry* to_entry = entries_map_.Lookup(to, AddressHash(to), true);
1315  if (to_entry->value != NULL) {
1316  int to_entry_info_index =
1317  static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
1318  // Without this operation we will have two EntryInfo's with the same
1319  // value in addr field. It is bad because later at RemoveDeadEntries
1320  // one of this entry will be removed with the corresponding entries_map_
1321  // entry.
1322  entries_.at(to_entry_info_index).addr = NULL;
1323  }
1324  to_entry->value = reinterpret_cast<void*>(from_entry_info_index);
1325 }
1326 
1327 
1329  HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), false);
1330  if (entry == NULL) return 0;
1331  int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
1332  EntryInfo& entry_info = entries_.at(entry_index);
1333  ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
1334  return entry_info.id;
1335 }
1336 
1337 
1339  unsigned int size) {
1340  ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
1341  HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), true);
1342  if (entry->value != NULL) {
1343  int entry_index =
1344  static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
1345  EntryInfo& entry_info = entries_.at(entry_index);
1346  entry_info.accessed = true;
1347  entry_info.size = size;
1348  return entry_info.id;
1349  }
1350  entry->value = reinterpret_cast<void*>(entries_.length());
1351  SnapshotObjectId id = next_id_;
1352  next_id_ += kObjectIdStep;
1353  entries_.Add(EntryInfo(id, addr, size));
1354  ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
1355  return id;
1356 }
1357 
1358 
1360  time_intervals_.Clear();
1361 }
1362 
1363 void HeapObjectsMap::UpdateHeapObjectsMap() {
1364  HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
1365  "HeapSnapshotsCollection::UpdateHeapObjectsMap");
1366  HeapIterator iterator;
1367  for (HeapObject* obj = iterator.next();
1368  obj != NULL;
1369  obj = iterator.next()) {
1370  FindOrAddEntry(obj->address(), obj->Size());
1371  }
1372  RemoveDeadEntries();
1373 }
1374 
1375 
1377  UpdateHeapObjectsMap();
1378  time_intervals_.Add(TimeInterval(next_id_));
1379  int prefered_chunk_size = stream->GetChunkSize();
1380  List<v8::HeapStatsUpdate> stats_buffer;
1381  ASSERT(!entries_.is_empty());
1382  EntryInfo* entry_info = &entries_.first();
1383  EntryInfo* end_entry_info = &entries_.last() + 1;
1384  for (int time_interval_index = 0;
1385  time_interval_index < time_intervals_.length();
1386  ++time_interval_index) {
1387  TimeInterval& time_interval = time_intervals_[time_interval_index];
1388  SnapshotObjectId time_interval_id = time_interval.id;
1389  uint32_t entries_size = 0;
1390  EntryInfo* start_entry_info = entry_info;
1391  while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
1392  entries_size += entry_info->size;
1393  ++entry_info;
1394  }
1395  uint32_t entries_count =
1396  static_cast<uint32_t>(entry_info - start_entry_info);
1397  if (time_interval.count != entries_count ||
1398  time_interval.size != entries_size) {
1399  stats_buffer.Add(v8::HeapStatsUpdate(
1400  time_interval_index,
1401  time_interval.count = entries_count,
1402  time_interval.size = entries_size));
1403  if (stats_buffer.length() >= prefered_chunk_size) {
1405  &stats_buffer.first(), stats_buffer.length());
1406  if (result == OutputStream::kAbort) return last_assigned_id();
1407  stats_buffer.Clear();
1408  }
1409  }
1410  }
1411  ASSERT(entry_info == end_entry_info);
1412  if (!stats_buffer.is_empty()) {
1414  &stats_buffer.first(), stats_buffer.length());
1415  if (result == OutputStream::kAbort) return last_assigned_id();
1416  }
1417  stream->EndOfStream();
1418  return last_assigned_id();
1419 }
1420 
1421 
1422 void HeapObjectsMap::RemoveDeadEntries() {
1423  ASSERT(entries_.length() > 0 &&
1424  entries_.at(0).id == 0 &&
1425  entries_.at(0).addr == NULL);
1426  int first_free_entry = 1;
1427  for (int i = 1; i < entries_.length(); ++i) {
1428  EntryInfo& entry_info = entries_.at(i);
1429  if (entry_info.accessed) {
1430  if (first_free_entry != i) {
1431  entries_.at(first_free_entry) = entry_info;
1432  }
1433  entries_.at(first_free_entry).accessed = false;
1434  HashMap::Entry* entry = entries_map_.Lookup(
1435  entry_info.addr, AddressHash(entry_info.addr), false);
1436  ASSERT(entry);
1437  entry->value = reinterpret_cast<void*>(first_free_entry);
1438  ++first_free_entry;
1439  } else {
1440  if (entry_info.addr) {
1441  entries_map_.Remove(entry_info.addr, AddressHash(entry_info.addr));
1442  }
1443  }
1444  }
1445  entries_.Rewind(first_free_entry);
1446  ASSERT(static_cast<uint32_t>(entries_.length()) - 1 ==
1447  entries_map_.occupancy());
1448 }
1449 
1450 
1452  SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
1453  const char* label = info->GetLabel();
1454  id ^= HashSequentialString(label,
1455  static_cast<int>(strlen(label)),
1456  HEAP->HashSeed());
1457  intptr_t element_count = info->GetElementCount();
1458  if (element_count != -1)
1459  id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
1460  v8::internal::kZeroHashSeed);
1461  return id << 1;
1462 }
1463 
1464 
1466  return
1467  sizeof(*this) +
1468  sizeof(HashMap::Entry) * entries_map_.capacity() +
1469  GetMemoryUsedByList(entries_) +
1470  GetMemoryUsedByList(time_intervals_);
1471 }
1472 
1473 
1475  : is_tracking_objects_(false),
1476  snapshots_uids_(HeapSnapshotsMatch),
1477  token_enumerator_(new TokenEnumerator()) {
1478 }
1479 
1480 
1481 static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) {
1482  delete *snapshot_ptr;
1483 }
1484 
1485 
1487  delete token_enumerator_;
1488  snapshots_.Iterate(DeleteHeapSnapshot);
1489 }
1490 
1491 
1493  const char* name,
1494  unsigned uid) {
1495  is_tracking_objects_ = true; // Start watching for heap objects moves.
1496  return new HeapSnapshot(this, type, name, uid);
1497 }
1498 
1499 
1501  HeapSnapshot* snapshot) {
1503  if (snapshot != NULL) {
1504  snapshots_.Add(snapshot);
1505  HashMap::Entry* entry =
1506  snapshots_uids_.Lookup(reinterpret_cast<void*>(snapshot->uid()),
1507  static_cast<uint32_t>(snapshot->uid()),
1508  true);
1509  ASSERT(entry->value == NULL);
1510  entry->value = snapshot;
1511  }
1512 }
1513 
1514 
1516  HashMap::Entry* entry = snapshots_uids_.Lookup(reinterpret_cast<void*>(uid),
1517  static_cast<uint32_t>(uid),
1518  false);
1519  return entry != NULL ? reinterpret_cast<HeapSnapshot*>(entry->value) : NULL;
1520 }
1521 
1522 
1524  snapshots_.RemoveElement(snapshot);
1525  unsigned uid = snapshot->uid();
1526  snapshots_uids_.Remove(reinterpret_cast<void*>(uid),
1527  static_cast<uint32_t>(uid));
1528 }
1529 
1530 
1532  SnapshotObjectId id) {
1533  // First perform a full GC in order to avoid dead objects.
1534  HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
1535  "HeapSnapshotsCollection::FindHeapObjectById");
1536  AssertNoAllocation no_allocation;
1537  HeapObject* object = NULL;
1538  HeapIterator iterator(HeapIterator::kFilterUnreachable);
1539  // Make sure that object with the given id is still reachable.
1540  for (HeapObject* obj = iterator.next();
1541  obj != NULL;
1542  obj = iterator.next()) {
1543  if (ids_.FindEntry(obj->address()) == id) {
1544  ASSERT(object == NULL);
1545  object = obj;
1546  // Can't break -- kFilterUnreachable requires full heap traversal.
1547  }
1548  }
1549  return object != NULL ? Handle<HeapObject>(object) : Handle<HeapObject>();
1550 }
1551 
1552 
1554  STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::
1555  kExpectedHeapSnapshotsCollectionSize ==
1556  sizeof(HeapSnapshotsCollection)); // NOLINT
1557  size_t size = sizeof(*this);
1558  size += names_.GetUsedMemorySize();
1559  size += ids_.GetUsedMemorySize();
1560  size += sizeof(HashMap::Entry) * snapshots_uids_.capacity();
1561  size += GetMemoryUsedByList(snapshots_);
1562  for (int i = 0; i < snapshots_.length(); ++i) {
1563  size += snapshots_[i]->RawSnapshotSize();
1564  }
1565  return size;
1566 }
1567 
1568 
1570  : entries_(HeapThingsMatch) {
1571 }
1572 
1573 
1575  HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
1576  if (cache_entry == NULL) return HeapEntry::kNoEntry;
1577  return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
1578 }
1579 
1580 
1581 void HeapEntriesMap::Pair(HeapThing thing, int entry) {
1582  HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
1583  ASSERT(cache_entry->value == NULL);
1584  cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
1585 }
1586 
1587 
1589  : entries_(HeapEntriesMap::HeapThingsMatch) {
1590 }
1591 
1592 
1594  entries_.Clear();
1595 }
1596 
1597 
1599  if (!obj->IsHeapObject()) return false;
1600  HeapObject* object = HeapObject::cast(obj);
1601  return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL;
1602 }
1603 
1604 
1606  if (!obj->IsHeapObject()) return;
1607  HeapObject* object = HeapObject::cast(obj);
1608  entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
1609 }
1610 
1611 
1612 const char* HeapObjectsSet::GetTag(Object* obj) {
1613  HeapObject* object = HeapObject::cast(obj);
1614  HashMap::Entry* cache_entry =
1615  entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
1616  return cache_entry != NULL
1617  ? reinterpret_cast<const char*>(cache_entry->value)
1618  : NULL;
1619 }
1620 
1621 
1622 void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
1623  if (!obj->IsHeapObject()) return;
1624  HeapObject* object = HeapObject::cast(obj);
1625  HashMap::Entry* cache_entry =
1626  entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
1627  cache_entry->value = const_cast<char*>(tag);
1628 }
1629 
1630 
1632  reinterpret_cast<HeapObject*>(
1633  static_cast<intptr_t>(HeapObjectsMap::kInternalRootObjectId));
1634 HeapObject* const V8HeapExplorer::kGcRootsObject =
1635  reinterpret_cast<HeapObject*>(
1636  static_cast<intptr_t>(HeapObjectsMap::kGcRootsObjectId));
1637 HeapObject* const V8HeapExplorer::kFirstGcSubrootObject =
1638  reinterpret_cast<HeapObject*>(
1639  static_cast<intptr_t>(HeapObjectsMap::kGcRootsFirstSubrootId));
1640 HeapObject* const V8HeapExplorer::kLastGcSubrootObject =
1641  reinterpret_cast<HeapObject*>(
1642  static_cast<intptr_t>(HeapObjectsMap::kFirstAvailableObjectId));
1643 
1644 
1646  HeapSnapshot* snapshot,
1648  : heap_(Isolate::Current()->heap()),
1649  snapshot_(snapshot),
1650  collection_(snapshot_->collection()),
1651  progress_(progress),
1652  filler_(NULL) {
1653 }
1654 
1655 
1657 }
1658 
1659 
1661  return AddEntry(reinterpret_cast<HeapObject*>(ptr));
1662 }
1663 
1664 
1665 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
1666  if (object == kInternalRootObject) {
1667  snapshot_->AddRootEntry();
1668  return snapshot_->root();
1669  } else if (object == kGcRootsObject) {
1670  HeapEntry* entry = snapshot_->AddGcRootsEntry();
1671  return entry;
1672  } else if (object >= kFirstGcSubrootObject && object < kLastGcSubrootObject) {
1673  HeapEntry* entry = snapshot_->AddGcSubrootEntry(GetGcSubrootOrder(object));
1674  return entry;
1675  } else if (object->IsJSFunction()) {
1676  JSFunction* func = JSFunction::cast(object);
1677  SharedFunctionInfo* shared = func->shared();
1678  const char* name = shared->bound() ? "native_bind" :
1679  collection_->names()->GetName(String::cast(shared->name()));
1680  return AddEntry(object, HeapEntry::kClosure, name);
1681  } else if (object->IsJSRegExp()) {
1682  JSRegExp* re = JSRegExp::cast(object);
1683  return AddEntry(object,
1684  HeapEntry::kRegExp,
1685  collection_->names()->GetName(re->Pattern()));
1686  } else if (object->IsJSObject()) {
1687  const char* name = collection_->names()->GetName(
1689  if (object->IsJSGlobalObject()) {
1690  const char* tag = objects_tags_.GetTag(object);
1691  if (tag != NULL) {
1692  name = collection_->names()->GetFormatted("%s / %s", name, tag);
1693  }
1694  }
1695  return AddEntry(object, HeapEntry::kObject, name);
1696  } else if (object->IsString()) {
1697  return AddEntry(object,
1698  HeapEntry::kString,
1699  collection_->names()->GetName(String::cast(object)));
1700  } else if (object->IsCode()) {
1701  return AddEntry(object, HeapEntry::kCode, "");
1702  } else if (object->IsSharedFunctionInfo()) {
1703  String* name = String::cast(SharedFunctionInfo::cast(object)->name());
1704  return AddEntry(object,
1705  HeapEntry::kCode,
1706  collection_->names()->GetName(name));
1707  } else if (object->IsScript()) {
1708  Object* name = Script::cast(object)->name();
1709  return AddEntry(object,
1710  HeapEntry::kCode,
1711  name->IsString()
1712  ? collection_->names()->GetName(String::cast(name))
1713  : "");
1714  } else if (object->IsNativeContext()) {
1715  return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
1716  } else if (object->IsContext()) {
1717  return AddEntry(object, HeapEntry::kHidden, "system / Context");
1718  } else if (object->IsFixedArray() ||
1719  object->IsFixedDoubleArray() ||
1720  object->IsByteArray() ||
1721  object->IsExternalArray()) {
1722  return AddEntry(object, HeapEntry::kArray, "");
1723  } else if (object->IsHeapNumber()) {
1724  return AddEntry(object, HeapEntry::kHeapNumber, "number");
1725  }
1726  return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
1727 }
1728 
1729 
1730 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
1731  HeapEntry::Type type,
1732  const char* name) {
1733  int object_size = object->Size();
1734  SnapshotObjectId object_id =
1735  collection_->GetObjectId(object->address(), object_size);
1736  return snapshot_->AddEntry(type, name, object_id, object_size);
1737 }
1738 
1739 
1740 class GcSubrootsEnumerator : public ObjectVisitor {
1741  public:
1743  SnapshotFillerInterface* filler, V8HeapExplorer* explorer)
1744  : filler_(filler),
1745  explorer_(explorer),
1746  previous_object_count_(0),
1747  object_count_(0) {
1748  }
1749  void VisitPointers(Object** start, Object** end) {
1750  object_count_ += end - start;
1751  }
1753  // Skip empty subroots.
1754  if (previous_object_count_ != object_count_) {
1755  previous_object_count_ = object_count_;
1756  filler_->AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_);
1757  }
1758  }
1759  private:
1760  SnapshotFillerInterface* filler_;
1761  V8HeapExplorer* explorer_;
1762  intptr_t previous_object_count_;
1763  intptr_t object_count_;
1764 };
1765 
1766 
1768  filler->AddEntry(kInternalRootObject, this);
1769  filler->AddEntry(kGcRootsObject, this);
1770  GcSubrootsEnumerator enumerator(filler, this);
1771  heap_->IterateRoots(&enumerator, VISIT_ALL);
1772 }
1773 
1774 
1775 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
1776  switch (object->map()->instance_type()) {
1777  case MAP_TYPE: return "system / Map";
1778  case JS_GLOBAL_PROPERTY_CELL_TYPE: return "system / JSGlobalPropertyCell";
1779  case FOREIGN_TYPE: return "system / Foreign";
1780  case ODDBALL_TYPE: return "system / Oddball";
1781 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1782  case NAME##_TYPE: return "system / "#Name;
1784 #undef MAKE_STRUCT_CASE
1785  default: return "system";
1786  }
1787 }
1788 
1789 
1790 int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
1791  int objects_count = 0;
1792  for (HeapObject* obj = iterator->next();
1793  obj != NULL;
1794  obj = iterator->next()) {
1795  objects_count++;
1796  }
1797  return objects_count;
1798 }
1799 
1800 
1801 class IndexedReferencesExtractor : public ObjectVisitor {
1802  public:
1804  HeapObject* parent_obj,
1805  int parent)
1806  : generator_(generator),
1807  parent_obj_(parent_obj),
1808  parent_(parent),
1809  next_index_(1) {
1810  }
1811  void VisitPointers(Object** start, Object** end) {
1812  for (Object** p = start; p < end; p++) {
1813  if (CheckVisitedAndUnmark(p)) continue;
1814  generator_->SetHiddenReference(parent_obj_, parent_, next_index_++, *p);
1815  }
1816  }
1817  static void MarkVisitedField(HeapObject* obj, int offset) {
1818  if (offset < 0) return;
1819  Address field = obj->address() + offset;
1820  ASSERT(!Memory::Object_at(field)->IsFailure());
1821  ASSERT(Memory::Object_at(field)->IsHeapObject());
1822  *field |= kFailureTag;
1823  }
1824 
1825  private:
1826  bool CheckVisitedAndUnmark(Object** field) {
1827  if ((*field)->IsFailure()) {
1828  intptr_t untagged = reinterpret_cast<intptr_t>(*field) & ~kFailureTagMask;
1829  *field = reinterpret_cast<Object*>(untagged | kHeapObjectTag);
1830  ASSERT((*field)->IsHeapObject());
1831  return true;
1832  }
1833  return false;
1834  }
1835  V8HeapExplorer* generator_;
1836  HeapObject* parent_obj_;
1837  int parent_;
1838  int next_index_;
1839 };
1840 
1841 
1842 void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
1843  HeapEntry* heap_entry = GetEntry(obj);
1844  if (heap_entry == NULL) return; // No interest in this object.
1845  int entry = heap_entry->index();
1846 
1847  bool extract_indexed_refs = true;
1848  if (obj->IsJSGlobalProxy()) {
1849  ExtractJSGlobalProxyReferences(JSGlobalProxy::cast(obj));
1850  } else if (obj->IsJSObject()) {
1851  ExtractJSObjectReferences(entry, JSObject::cast(obj));
1852  } else if (obj->IsString()) {
1853  ExtractStringReferences(entry, String::cast(obj));
1854  extract_indexed_refs = false;
1855  } else if (obj->IsContext()) {
1856  ExtractContextReferences(entry, Context::cast(obj));
1857  } else if (obj->IsMap()) {
1858  ExtractMapReferences(entry, Map::cast(obj));
1859  } else if (obj->IsSharedFunctionInfo()) {
1860  ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1861  } else if (obj->IsScript()) {
1862  ExtractScriptReferences(entry, Script::cast(obj));
1863  } else if (obj->IsCodeCache()) {
1864  ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
1865  } else if (obj->IsCode()) {
1866  ExtractCodeReferences(entry, Code::cast(obj));
1867  } else if (obj->IsJSGlobalPropertyCell()) {
1868  ExtractJSGlobalPropertyCellReferences(
1869  entry, JSGlobalPropertyCell::cast(obj));
1870  extract_indexed_refs = false;
1871  }
1872  if (extract_indexed_refs) {
1873  SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
1874  IndexedReferencesExtractor refs_extractor(this, obj, entry);
1875  obj->Iterate(&refs_extractor);
1876  }
1877 }
1878 
1879 
1880 void V8HeapExplorer::ExtractJSGlobalProxyReferences(JSGlobalProxy* proxy) {
1881  // We need to reference JS global objects from snapshot's root.
1882  // We use JSGlobalProxy because this is what embedder (e.g. browser)
1883  // uses for the global object.
1884  Object* object = proxy->map()->prototype();
1885  bool is_debug_object = false;
1886 #ifdef ENABLE_DEBUGGER_SUPPORT
1887  is_debug_object = object->IsGlobalObject() &&
1888  Isolate::Current()->debug()->IsDebugGlobal(GlobalObject::cast(object));
1889 #endif
1890  if (!is_debug_object) {
1891  SetUserGlobalReference(object);
1892  }
1893 }
1894 
1895 
1896 void V8HeapExplorer::ExtractJSObjectReferences(
1897  int entry, JSObject* js_obj) {
1898  HeapObject* obj = js_obj;
1899  ExtractClosureReferences(js_obj, entry);
1900  ExtractPropertyReferences(js_obj, entry);
1901  ExtractElementReferences(js_obj, entry);
1902  ExtractInternalReferences(js_obj, entry);
1903  SetPropertyReference(
1904  obj, entry, heap_->Proto_symbol(), js_obj->GetPrototype());
1905  if (obj->IsJSFunction()) {
1906  JSFunction* js_fun = JSFunction::cast(js_obj);
1907  Object* proto_or_map = js_fun->prototype_or_initial_map();
1908  if (!proto_or_map->IsTheHole()) {
1909  if (!proto_or_map->IsMap()) {
1910  SetPropertyReference(
1911  obj, entry,
1912  heap_->prototype_symbol(), proto_or_map,
1913  NULL,
1915  } else {
1916  SetPropertyReference(
1917  obj, entry,
1918  heap_->prototype_symbol(), js_fun->prototype());
1919  }
1920  }
1921  SharedFunctionInfo* shared_info = js_fun->shared();
1922  // JSFunction has either bindings or literals and never both.
1923  bool bound = shared_info->bound();
1924  TagObject(js_fun->literals_or_bindings(),
1925  bound ? "(function bindings)" : "(function literals)");
1926  SetInternalReference(js_fun, entry,
1927  bound ? "bindings" : "literals",
1928  js_fun->literals_or_bindings(),
1930  TagObject(shared_info, "(shared function info)");
1931  SetInternalReference(js_fun, entry,
1932  "shared", shared_info,
1934  TagObject(js_fun->unchecked_context(), "(context)");
1935  SetInternalReference(js_fun, entry,
1936  "context", js_fun->unchecked_context(),
1939  i < JSFunction::kSize;
1940  i += kPointerSize) {
1941  SetWeakReference(js_fun, entry, i, *HeapObject::RawField(js_fun, i), i);
1942  }
1943  } else if (obj->IsGlobalObject()) {
1944  GlobalObject* global_obj = GlobalObject::cast(obj);
1945  SetInternalReference(global_obj, entry,
1946  "builtins", global_obj->builtins(),
1948  SetInternalReference(global_obj, entry,
1949  "native_context", global_obj->native_context(),
1951  SetInternalReference(global_obj, entry,
1952  "global_receiver", global_obj->global_receiver(),
1954  }
1955  TagObject(js_obj->properties(), "(object properties)");
1956  SetInternalReference(obj, entry,
1957  "properties", js_obj->properties(),
1959  TagObject(js_obj->elements(), "(object elements)");
1960  SetInternalReference(obj, entry,
1961  "elements", js_obj->elements(),
1963 }
1964 
1965 
1966 void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1967  if (string->IsConsString()) {
1968  ConsString* cs = ConsString::cast(string);
1969  SetInternalReference(cs, entry, "first", cs->first());
1970  SetInternalReference(cs, entry, "second", cs->second());
1971  } else if (string->IsSlicedString()) {
1972  SlicedString* ss = SlicedString::cast(string);
1973  SetInternalReference(ss, entry, "parent", ss->parent());
1974  }
1975 }
1976 
1977 
1978 void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1979 #define EXTRACT_CONTEXT_FIELD(index, type, name) \
1980  SetInternalReference(context, entry, #name, context->get(Context::index), \
1981  FixedArray::OffsetOfElementAt(Context::index));
1982  EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1983  EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1984  EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
1985  EXTRACT_CONTEXT_FIELD(GLOBAL_OBJECT_INDEX, GlobalObject, global);
1986  if (context->IsNativeContext()) {
1987  TagObject(context->jsfunction_result_caches(),
1988  "(context func. result caches)");
1989  TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1990  TagObject(context->runtime_context(), "(runtime context)");
1991  TagObject(context->data(), "(context data)");
1993 #undef EXTRACT_CONTEXT_FIELD
1994  for (int i = Context::FIRST_WEAK_SLOT;
1996  ++i) {
1997  SetWeakReference(context, entry, i, context->get(i),
1999  }
2000  }
2001 }
2002 
2003 
2004 void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
2005  SetInternalReference(map, entry,
2006  "prototype", map->prototype(), Map::kPrototypeOffset);
2007  SetInternalReference(map, entry,
2008  "constructor", map->constructor(),
2010  if (map->HasTransitionArray()) {
2011  TransitionArray* transitions = map->transitions();
2012 
2013  Object* back_pointer = transitions->back_pointer_storage();
2014  TagObject(transitions->back_pointer_storage(), "(back pointer)");
2015  SetInternalReference(transitions, entry,
2016  "backpointer", back_pointer,
2018  IndexedReferencesExtractor transitions_refs(this, transitions, entry);
2019  transitions->Iterate(&transitions_refs);
2020 
2021  TagObject(transitions, "(transition array)");
2022  SetInternalReference(map, entry,
2023  "transitions", transitions,
2025  } else {
2026  Object* back_pointer = map->GetBackPointer();
2027  TagObject(back_pointer, "(back pointer)");
2028  SetInternalReference(map, entry,
2029  "backpointer", back_pointer,
2031  }
2032  DescriptorArray* descriptors = map->instance_descriptors();
2033  TagObject(descriptors, "(map descriptors)");
2034  SetInternalReference(map, entry,
2035  "descriptors", descriptors,
2037 
2038  SetInternalReference(map, entry,
2039  "code_cache", map->code_cache(),
2041 }
2042 
2043 
2044 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
2045  int entry, SharedFunctionInfo* shared) {
2046  HeapObject* obj = shared;
2047  SetInternalReference(obj, entry,
2048  "name", shared->name(),
2050  TagObject(shared->code(), "(code)");
2051  SetInternalReference(obj, entry,
2052  "code", shared->code(),
2054  TagObject(shared->scope_info(), "(function scope info)");
2055  SetInternalReference(obj, entry,
2056  "scope_info", shared->scope_info(),
2058  SetInternalReference(obj, entry,
2059  "instance_class_name", shared->instance_class_name(),
2061  SetInternalReference(obj, entry,
2062  "script", shared->script(),
2064  TagObject(shared->construct_stub(), "(code)");
2065  SetInternalReference(obj, entry,
2066  "construct_stub", shared->construct_stub(),
2068  SetInternalReference(obj, entry,
2069  "function_data", shared->function_data(),
2071  SetInternalReference(obj, entry,
2072  "debug_info", shared->debug_info(),
2074  SetInternalReference(obj, entry,
2075  "inferred_name", shared->inferred_name(),
2077  SetInternalReference(obj, entry,
2078  "this_property_assignments",
2079  shared->this_property_assignments(),
2081  SetWeakReference(obj, entry,
2082  1, shared->initial_map(),
2084 }
2085 
2086 
2087 void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
2088  HeapObject* obj = script;
2089  SetInternalReference(obj, entry,
2090  "source", script->source(),
2092  SetInternalReference(obj, entry,
2093  "name", script->name(),
2095  SetInternalReference(obj, entry,
2096  "data", script->data(),
2098  SetInternalReference(obj, entry,
2099  "context_data", script->context_data(),
2101  TagObject(script->line_ends(), "(script line ends)");
2102  SetInternalReference(obj, entry,
2103  "line_ends", script->line_ends(),
2105 }
2106 
2107 
2108 void V8HeapExplorer::ExtractCodeCacheReferences(
2109  int entry, CodeCache* code_cache) {
2110  TagObject(code_cache->default_cache(), "(default code cache)");
2111  SetInternalReference(code_cache, entry,
2112  "default_cache", code_cache->default_cache(),
2114  TagObject(code_cache->normal_type_cache(), "(code type cache)");
2115  SetInternalReference(code_cache, entry,
2116  "type_cache", code_cache->normal_type_cache(),
2118 }
2119 
2120 
2121 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
2122  TagObject(code->relocation_info(), "(code relocation info)");
2123  SetInternalReference(code, entry,
2124  "relocation_info", code->relocation_info(),
2126  SetInternalReference(code, entry,
2127  "handler_table", code->handler_table(),
2129  TagObject(code->deoptimization_data(), "(code deopt data)");
2130  SetInternalReference(code, entry,
2131  "deoptimization_data", code->deoptimization_data(),
2133  SetInternalReference(code, entry,
2134  "type_feedback_info", code->type_feedback_info(),
2136  SetInternalReference(code, entry,
2137  "gc_metadata", code->gc_metadata(),
2139 }
2140 
2141 
2142 void V8HeapExplorer::ExtractJSGlobalPropertyCellReferences(
2143  int entry, JSGlobalPropertyCell* cell) {
2144  SetInternalReference(cell, entry, "value", cell->value());
2145 }
2146 
2147 
2148 void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
2149  if (!js_obj->IsJSFunction()) return;
2150 
2151  JSFunction* func = JSFunction::cast(js_obj);
2152  if (func->shared()->bound()) {
2153  FixedArray* bindings = func->function_bindings();
2154  SetNativeBindReference(js_obj, entry, "bound_this",
2155  bindings->get(JSFunction::kBoundThisIndex));
2156  SetNativeBindReference(js_obj, entry, "bound_function",
2157  bindings->get(JSFunction::kBoundFunctionIndex));
2159  i < bindings->length(); i++) {
2160  const char* reference_name = collection_->names()->GetFormatted(
2161  "bound_argument_%d",
2163  SetNativeBindReference(js_obj, entry, reference_name,
2164  bindings->get(i));
2165  }
2166  } else {
2167  Context* context = func->context()->declaration_context();
2168  ScopeInfo* scope_info = context->closure()->shared()->scope_info();
2169  // Add context allocated locals.
2170  int context_locals = scope_info->ContextLocalCount();
2171  for (int i = 0; i < context_locals; ++i) {
2172  String* local_name = scope_info->ContextLocalName(i);
2173  int idx = Context::MIN_CONTEXT_SLOTS + i;
2174  SetClosureReference(js_obj, entry, local_name, context->get(idx));
2175  }
2176 
2177  // Add function variable.
2178  if (scope_info->HasFunctionName()) {
2179  String* name = scope_info->FunctionName();
2180  VariableMode mode;
2181  int idx = scope_info->FunctionContextSlotIndex(name, &mode);
2182  if (idx >= 0) {
2183  SetClosureReference(js_obj, entry, name, context->get(idx));
2184  }
2185  }
2186  }
2187 }
2188 
2189 
2190 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
2191  if (js_obj->HasFastProperties()) {
2192  DescriptorArray* descs = js_obj->map()->instance_descriptors();
2193  int real_size = js_obj->map()->NumberOfOwnDescriptors();
2194  for (int i = 0; i < descs->number_of_descriptors(); i++) {
2195  if (descs->GetDetails(i).descriptor_index() > real_size) continue;
2196  switch (descs->GetType(i)) {
2197  case FIELD: {
2198  int index = descs->GetFieldIndex(i);
2199 
2200  String* k = descs->GetKey(i);
2201  if (index < js_obj->map()->inobject_properties()) {
2202  Object* value = js_obj->InObjectPropertyAt(index);
2203  if (k != heap_->hidden_symbol()) {
2204  SetPropertyReference(
2205  js_obj, entry,
2206  k, value,
2207  NULL,
2208  js_obj->GetInObjectPropertyOffset(index));
2209  } else {
2210  TagObject(value, "(hidden properties)");
2211  SetInternalReference(
2212  js_obj, entry,
2213  "hidden_properties", value,
2214  js_obj->GetInObjectPropertyOffset(index));
2215  }
2216  } else {
2217  Object* value = js_obj->FastPropertyAt(index);
2218  if (k != heap_->hidden_symbol()) {
2219  SetPropertyReference(js_obj, entry, k, value);
2220  } else {
2221  TagObject(value, "(hidden properties)");
2222  SetInternalReference(js_obj, entry, "hidden_properties", value);
2223  }
2224  }
2225  break;
2226  }
2227  case CONSTANT_FUNCTION:
2228  SetPropertyReference(
2229  js_obj, entry,
2230  descs->GetKey(i), descs->GetConstantFunction(i));
2231  break;
2232  case CALLBACKS: {
2233  Object* callback_obj = descs->GetValue(i);
2234  if (callback_obj->IsAccessorPair()) {
2235  AccessorPair* accessors = AccessorPair::cast(callback_obj);
2236  if (Object* getter = accessors->getter()) {
2237  SetPropertyReference(js_obj, entry, descs->GetKey(i),
2238  getter, "get-%s");
2239  }
2240  if (Object* setter = accessors->setter()) {
2241  SetPropertyReference(js_obj, entry, descs->GetKey(i),
2242  setter, "set-%s");
2243  }
2244  }
2245  break;
2246  }
2247  case NORMAL: // only in slow mode
2248  case HANDLER: // only in lookup results, not in descriptors
2249  case INTERCEPTOR: // only in lookup results, not in descriptors
2250  break;
2251  case TRANSITION:
2252  case NONEXISTENT:
2253  UNREACHABLE();
2254  break;
2255  }
2256  }
2257  } else {
2258  StringDictionary* dictionary = js_obj->property_dictionary();
2259  int length = dictionary->Capacity();
2260  for (int i = 0; i < length; ++i) {
2261  Object* k = dictionary->KeyAt(i);
2262  if (dictionary->IsKey(k)) {
2263  Object* target = dictionary->ValueAt(i);
2264  // We assume that global objects can only have slow properties.
2265  Object* value = target->IsJSGlobalPropertyCell()
2266  ? JSGlobalPropertyCell::cast(target)->value()
2267  : target;
2268  if (k != heap_->hidden_symbol()) {
2269  SetPropertyReference(js_obj, entry, String::cast(k), value);
2270  } else {
2271  TagObject(value, "(hidden properties)");
2272  SetInternalReference(js_obj, entry, "hidden_properties", value);
2273  }
2274  }
2275  }
2276  }
2277 }
2278 
2279 
2280 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
2281  if (js_obj->HasFastObjectElements()) {
2282  FixedArray* elements = FixedArray::cast(js_obj->elements());
2283  int length = js_obj->IsJSArray() ?
2284  Smi::cast(JSArray::cast(js_obj)->length())->value() :
2285  elements->length();
2286  for (int i = 0; i < length; ++i) {
2287  if (!elements->get(i)->IsTheHole()) {
2288  SetElementReference(js_obj, entry, i, elements->get(i));
2289  }
2290  }
2291  } else if (js_obj->HasDictionaryElements()) {
2292  SeededNumberDictionary* dictionary = js_obj->element_dictionary();
2293  int length = dictionary->Capacity();
2294  for (int i = 0; i < length; ++i) {
2295  Object* k = dictionary->KeyAt(i);
2296  if (dictionary->IsKey(k)) {
2297  ASSERT(k->IsNumber());
2298  uint32_t index = static_cast<uint32_t>(k->Number());
2299  SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
2300  }
2301  }
2302  }
2303 }
2304 
2305 
2306 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
2307  int length = js_obj->GetInternalFieldCount();
2308  for (int i = 0; i < length; ++i) {
2309  Object* o = js_obj->GetInternalField(i);
2310  SetInternalReference(
2311  js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
2312  }
2313 }
2314 
2315 
2317  Heap* heap = object->GetHeap();
2318  if (object->IsJSFunction()) return heap->closure_symbol();
2319  String* constructor_name = object->constructor_name();
2320  if (constructor_name == heap->Object_symbol()) {
2321  // Look up an immediate "constructor" property, if it is a function,
2322  // return its name. This is for instances of binding objects, which
2323  // have prototype constructor type "Object".
2324  Object* constructor_prop = NULL;
2325  LookupResult result(heap->isolate());
2326  object->LocalLookupRealNamedProperty(heap->constructor_symbol(), &result);
2327  if (!result.IsFound()) return object->constructor_name();
2328 
2329  constructor_prop = result.GetLazyValue();
2330  if (constructor_prop->IsJSFunction()) {
2331  Object* maybe_name =
2332  JSFunction::cast(constructor_prop)->shared()->name();
2333  if (maybe_name->IsString()) {
2334  String* name = String::cast(maybe_name);
2335  if (name->length() > 0) return name;
2336  }
2337  }
2338  }
2339  return object->constructor_name();
2340 }
2341 
2342 
2343 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
2344  if (!obj->IsHeapObject()) return NULL;
2345  return filler_->FindOrAddEntry(obj, this);
2346 }
2347 
2348 
2349 class RootsReferencesExtractor : public ObjectVisitor {
2350  private:
2351  struct IndexTag {
2352  IndexTag(int index, VisitorSynchronization::SyncTag tag)
2353  : index(index), tag(tag) { }
2354  int index;
2356  };
2357 
2358  public:
2360  : collecting_all_references_(false),
2361  previous_reference_count_(0) {
2362  }
2363 
2364  void VisitPointers(Object** start, Object** end) {
2365  if (collecting_all_references_) {
2366  for (Object** p = start; p < end; p++) all_references_.Add(*p);
2367  } else {
2368  for (Object** p = start; p < end; p++) strong_references_.Add(*p);
2369  }
2370  }
2371 
2372  void SetCollectingAllReferences() { collecting_all_references_ = true; }
2373 
2374  void FillReferences(V8HeapExplorer* explorer) {
2375  ASSERT(strong_references_.length() <= all_references_.length());
2376  for (int i = 0; i < reference_tags_.length(); ++i) {
2377  explorer->SetGcRootsReference(reference_tags_[i].tag);
2378  }
2379  int strong_index = 0, all_index = 0, tags_index = 0;
2380  while (all_index < all_references_.length()) {
2381  if (strong_index < strong_references_.length() &&
2382  strong_references_[strong_index] == all_references_[all_index]) {
2383  explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
2384  false,
2385  all_references_[all_index++]);
2386  ++strong_index;
2387  } else {
2388  explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
2389  true,
2390  all_references_[all_index++]);
2391  }
2392  if (reference_tags_[tags_index].index == all_index) ++tags_index;
2393  }
2394  }
2395 
2397  if (collecting_all_references_ &&
2398  previous_reference_count_ != all_references_.length()) {
2399  previous_reference_count_ = all_references_.length();
2400  reference_tags_.Add(IndexTag(previous_reference_count_, tag));
2401  }
2402  }
2403 
2404  private:
2405  bool collecting_all_references_;
2406  List<Object*> strong_references_;
2407  List<Object*> all_references_;
2408  int previous_reference_count_;
2409  List<IndexTag> reference_tags_;
2410 };
2411 
2412 
2414  SnapshotFillerInterface* filler) {
2415  HeapIterator iterator(HeapIterator::kFilterUnreachable);
2416 
2417  filler_ = filler;
2418  bool interrupted = false;
2419 
2420  // Heap iteration with filtering must be finished in any case.
2421  for (HeapObject* obj = iterator.next();
2422  obj != NULL;
2423  obj = iterator.next(), progress_->ProgressStep()) {
2424  if (!interrupted) {
2425  ExtractReferences(obj);
2426  if (!progress_->ProgressReport(false)) interrupted = true;
2427  }
2428  }
2429  if (interrupted) {
2430  filler_ = NULL;
2431  return false;
2432  }
2433 
2434  SetRootGcRootsReference();
2435  RootsReferencesExtractor extractor;
2436  heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
2437  extractor.SetCollectingAllReferences();
2438  heap_->IterateRoots(&extractor, VISIT_ALL);
2439  extractor.FillReferences(this);
2440  filler_ = NULL;
2441  return progress_->ProgressReport(true);
2442 }
2443 
2444 
2445 bool V8HeapExplorer::IsEssentialObject(Object* object) {
2446  // We have to use raw_unchecked_* versions because checked versions
2447  // would fail during iteration over object properties.
2448  return object->IsHeapObject()
2449  && !object->IsOddball()
2450  && object != heap_->raw_unchecked_empty_byte_array()
2451  && object != heap_->raw_unchecked_empty_fixed_array()
2452  && object != heap_->raw_unchecked_empty_descriptor_array()
2453  && object != heap_->raw_unchecked_fixed_array_map()
2454  && object != heap_->raw_unchecked_global_property_cell_map()
2455  && object != heap_->raw_unchecked_shared_function_info_map()
2456  && object != heap_->raw_unchecked_free_space_map()
2457  && object != heap_->raw_unchecked_one_pointer_filler_map()
2458  && object != heap_->raw_unchecked_two_pointer_filler_map();
2459 }
2460 
2461 
2462 void V8HeapExplorer::SetClosureReference(HeapObject* parent_obj,
2463  int parent_entry,
2464  String* reference_name,
2465  Object* child_obj) {
2466  HeapEntry* child_entry = GetEntry(child_obj);
2467  if (child_entry != NULL) {
2469  parent_entry,
2470  collection_->names()->GetName(reference_name),
2471  child_entry);
2472  }
2473 }
2474 
2475 
2476 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
2477  int parent_entry,
2478  const char* reference_name,
2479  Object* child_obj) {
2480  HeapEntry* child_entry = GetEntry(child_obj);
2481  if (child_entry != NULL) {
2483  parent_entry,
2484  reference_name,
2485  child_entry);
2486  }
2487 }
2488 
2489 
2490 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
2491  int parent_entry,
2492  int index,
2493  Object* child_obj) {
2494  HeapEntry* child_entry = GetEntry(child_obj);
2495  if (child_entry != NULL) {
2497  parent_entry,
2498  index,
2499  child_entry);
2500  }
2501 }
2502 
2503 
2504 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
2505  int parent_entry,
2506  const char* reference_name,
2507  Object* child_obj,
2508  int field_offset) {
2509  HeapEntry* child_entry = GetEntry(child_obj);
2510  if (child_entry == NULL) return;
2511  if (IsEssentialObject(child_obj)) {
2513  parent_entry,
2514  reference_name,
2515  child_entry);
2516  }
2517  IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2518 }
2519 
2520 
2521 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
2522  int parent_entry,
2523  int index,
2524  Object* child_obj,
2525  int field_offset) {
2526  HeapEntry* child_entry = GetEntry(child_obj);
2527  if (child_entry == NULL) return;
2528  if (IsEssentialObject(child_obj)) {
2530  parent_entry,
2531  collection_->names()->GetName(index),
2532  child_entry);
2533  }
2534  IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2535 }
2536 
2537 
2538 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
2539  int parent_entry,
2540  int index,
2541  Object* child_obj) {
2542  HeapEntry* child_entry = GetEntry(child_obj);
2543  if (child_entry != NULL && IsEssentialObject(child_obj)) {
2545  parent_entry,
2546  index,
2547  child_entry);
2548  }
2549 }
2550 
2551 
2552 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
2553  int parent_entry,
2554  int index,
2555  Object* child_obj,
2556  int field_offset) {
2557  HeapEntry* child_entry = GetEntry(child_obj);
2558  if (child_entry != NULL) {
2560  parent_entry,
2561  index,
2562  child_entry);
2563  IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2564  }
2565 }
2566 
2567 
2568 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
2569  int parent_entry,
2570  String* reference_name,
2571  Object* child_obj,
2572  const char* name_format_string,
2573  int field_offset) {
2574  HeapEntry* child_entry = GetEntry(child_obj);
2575  if (child_entry != NULL) {
2576  HeapGraphEdge::Type type = reference_name->length() > 0 ?
2578  const char* name = name_format_string != NULL ?
2579  collection_->names()->GetFormatted(
2580  name_format_string,
2581  *reference_name->ToCString(DISALLOW_NULLS,
2583  collection_->names()->GetName(reference_name);
2584 
2585  filler_->SetNamedReference(type,
2586  parent_entry,
2587  name,
2588  child_entry);
2589  IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2590  }
2591 }
2592 
2593 
2594 void V8HeapExplorer::SetRootGcRootsReference() {
2597  snapshot_->root()->index(),
2598  snapshot_->gc_roots());
2599 }
2600 
2601 
2602 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
2603  HeapEntry* child_entry = GetEntry(child_obj);
2604  ASSERT(child_entry != NULL);
2605  filler_->SetNamedAutoIndexReference(
2607  snapshot_->root()->index(),
2608  child_entry);
2609 }
2610 
2611 
2612 void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
2615  snapshot_->gc_roots()->index(),
2616  snapshot_->gc_subroot(tag));
2617 }
2618 
2619 
2620 void V8HeapExplorer::SetGcSubrootReference(
2621  VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
2622  HeapEntry* child_entry = GetEntry(child_obj);
2623  if (child_entry != NULL) {
2624  const char* name = GetStrongGcSubrootName(child_obj);
2625  if (name != NULL) {
2626  filler_->SetNamedReference(
2628  snapshot_->gc_subroot(tag)->index(),
2629  name,
2630  child_entry);
2631  } else {
2634  snapshot_->gc_subroot(tag)->index(),
2635  child_entry);
2636  }
2637  }
2638 }
2639 
2640 
2641 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
2642  if (strong_gc_subroot_names_.is_empty()) {
2643 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2644 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2646 #undef ROOT_NAME
2647 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2649 #undef STRUCT_MAP_NAME
2650 #define SYMBOL_NAME(name, str) NAME_ENTRY(name)
2652 #undef SYMBOL_NAME
2653 #undef NAME_ENTRY
2654  CHECK(!strong_gc_subroot_names_.is_empty());
2655  }
2656  return strong_gc_subroot_names_.GetTag(object);
2657 }
2658 
2659 
2660 void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2661  if (IsEssentialObject(obj)) {
2662  HeapEntry* entry = GetEntry(obj);
2663  if (entry->name()[0] == '\0') {
2664  entry->set_name(tag);
2665  }
2666  }
2667 }
2668 
2669 
2670 class GlobalObjectsEnumerator : public ObjectVisitor {
2671  public:
2672  virtual void VisitPointers(Object** start, Object** end) {
2673  for (Object** p = start; p < end; p++) {
2674  if ((*p)->IsNativeContext()) {
2675  Context* context = Context::cast(*p);
2676  JSObject* proxy = context->global_proxy();
2677  if (proxy->IsJSGlobalProxy()) {
2678  Object* global = proxy->map()->prototype();
2679  if (global->IsJSGlobalObject()) {
2680  objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2681  }
2682  }
2683  }
2684  }
2685  }
2686  int count() { return objects_.length(); }
2687  Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2688 
2689  private:
2690  List<Handle<JSGlobalObject> > objects_;
2691 };
2692 
2693 
2694 // Modifies heap. Must not be run during heap traversal.
2696  HandleScope scope;
2697  Isolate* isolate = Isolate::Current();
2698  GlobalObjectsEnumerator enumerator;
2699  isolate->global_handles()->IterateAllRoots(&enumerator);
2700  Handle<String> document_string =
2701  isolate->factory()->NewStringFromAscii(CStrVector("document"));
2702  Handle<String> url_string =
2703  isolate->factory()->NewStringFromAscii(CStrVector("URL"));
2704  const char** urls = NewArray<const char*>(enumerator.count());
2705  for (int i = 0, l = enumerator.count(); i < l; ++i) {
2706  urls[i] = NULL;
2707  HandleScope scope;
2708  Handle<JSGlobalObject> global_obj = enumerator.at(i);
2709  Object* obj_document;
2710  if (global_obj->GetProperty(*document_string)->ToObject(&obj_document) &&
2711  obj_document->IsJSObject()) {
2712  // FixMe: Workaround: SharedWorker's current Isolate has NULL context.
2713  // As result GetProperty(*url_string) will crash.
2714  if (!Isolate::Current()->context() && obj_document->IsJSGlobalProxy())
2715  continue;
2716  JSObject* document = JSObject::cast(obj_document);
2717  Object* obj_url;
2718  if (document->GetProperty(*url_string)->ToObject(&obj_url) &&
2719  obj_url->IsString()) {
2720  urls[i] = collection_->names()->GetName(String::cast(obj_url));
2721  }
2722  }
2723  }
2724 
2725  AssertNoAllocation no_allocation;
2726  for (int i = 0, l = enumerator.count(); i < l; ++i) {
2727  objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2728  }
2729 
2730  DeleteArray(urls);
2731 }
2732 
2733 
2734 class GlobalHandlesExtractor : public ObjectVisitor {
2735  public:
2737  : explorer_(explorer) {}
2739  virtual void VisitPointers(Object** start, Object** end) {
2740  UNREACHABLE();
2741  }
2742  virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
2743  explorer_->VisitSubtreeWrapper(p, class_id);
2744  }
2745  private:
2746  NativeObjectsExplorer* explorer_;
2747 };
2748 
2749 
2751  public:
2753  HeapSnapshot* snapshot,
2754  HeapEntry::Type entries_type)
2755  : snapshot_(snapshot),
2756  collection_(snapshot_->collection()),
2757  entries_type_(entries_type) {
2758  }
2759  virtual HeapEntry* AllocateEntry(HeapThing ptr);
2760  private:
2761  HeapSnapshot* snapshot_;
2762  HeapSnapshotsCollection* collection_;
2763  HeapEntry::Type entries_type_;
2764 };
2765 
2766 
2768  v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2769  intptr_t elements = info->GetElementCount();
2770  intptr_t size = info->GetSizeInBytes();
2771  const char* name = elements != -1
2772  ? collection_->names()->GetFormatted(
2773  "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
2774  : collection_->names()->GetCopy(info->GetLabel());
2775  return snapshot_->AddEntry(
2776  entries_type_,
2777  name,
2779  size != -1 ? static_cast<int>(size) : 0);
2780 }
2781 
2782 
2785  : snapshot_(snapshot),
2786  collection_(snapshot_->collection()),
2787  progress_(progress),
2788  embedder_queried_(false),
2789  objects_by_info_(RetainedInfosMatch),
2790  native_groups_(StringsMatch),
2791  filler_(NULL) {
2792  synthetic_entries_allocator_ =
2793  new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2794  native_entries_allocator_ =
2795  new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2796 }
2797 
2798 
2800  for (HashMap::Entry* p = objects_by_info_.Start();
2801  p != NULL;
2802  p = objects_by_info_.Next(p)) {
2803  v8::RetainedObjectInfo* info =
2804  reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2805  info->Dispose();
2806  List<HeapObject*>* objects =
2807  reinterpret_cast<List<HeapObject*>* >(p->value);
2808  delete objects;
2809  }
2810  for (HashMap::Entry* p = native_groups_.Start();
2811  p != NULL;
2812  p = native_groups_.Next(p)) {
2813  v8::RetainedObjectInfo* info =
2814  reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2815  info->Dispose();
2816  }
2817  delete synthetic_entries_allocator_;
2818  delete native_entries_allocator_;
2819 }
2820 
2821 
2823  FillRetainedObjects();
2824  return objects_by_info_.occupancy();
2825 }
2826 
2827 
2828 void NativeObjectsExplorer::FillRetainedObjects() {
2829  if (embedder_queried_) return;
2830  Isolate* isolate = Isolate::Current();
2831  // Record objects that are joined into ObjectGroups.
2832  isolate->heap()->CallGlobalGCPrologueCallback();
2833  List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2834  for (int i = 0; i < groups->length(); ++i) {
2835  ObjectGroup* group = groups->at(i);
2836  if (group->info_ == NULL) continue;
2837  List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info_);
2838  for (size_t j = 0; j < group->length_; ++j) {
2839  HeapObject* obj = HeapObject::cast(*group->objects_[j]);
2840  list->Add(obj);
2841  in_groups_.Insert(obj);
2842  }
2843  group->info_ = NULL; // Acquire info object ownership.
2844  }
2845  isolate->global_handles()->RemoveObjectGroups();
2846  isolate->heap()->CallGlobalGCEpilogueCallback();
2847  // Record objects that are not in ObjectGroups, but have class ID.
2848  GlobalHandlesExtractor extractor(this);
2849  isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2850  embedder_queried_ = true;
2851 }
2852 
2853 void NativeObjectsExplorer::FillImplicitReferences() {
2854  Isolate* isolate = Isolate::Current();
2855  List<ImplicitRefGroup*>* groups =
2856  isolate->global_handles()->implicit_ref_groups();
2857  for (int i = 0; i < groups->length(); ++i) {
2858  ImplicitRefGroup* group = groups->at(i);
2859  HeapObject* parent = *group->parent_;
2860  int parent_entry =
2861  filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2862  ASSERT(parent_entry != HeapEntry::kNoEntry);
2863  Object*** children = group->children_;
2864  for (size_t j = 0; j < group->length_; ++j) {
2865  Object* child = *children[j];
2866  HeapEntry* child_entry =
2867  filler_->FindOrAddEntry(child, native_entries_allocator_);
2868  filler_->SetNamedReference(
2870  parent_entry,
2871  "native",
2872  child_entry);
2873  }
2874  }
2875 }
2876 
2877 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2878  v8::RetainedObjectInfo* info) {
2879  HashMap::Entry* entry =
2880  objects_by_info_.Lookup(info, InfoHash(info), true);
2881  if (entry->value != NULL) {
2882  info->Dispose();
2883  } else {
2884  entry->value = new List<HeapObject*>(4);
2885  }
2886  return reinterpret_cast<List<HeapObject*>* >(entry->value);
2887 }
2888 
2889 
2891  SnapshotFillerInterface* filler) {
2892  filler_ = filler;
2893  FillRetainedObjects();
2894  FillImplicitReferences();
2895  if (EstimateObjectsCount() > 0) {
2896  for (HashMap::Entry* p = objects_by_info_.Start();
2897  p != NULL;
2898  p = objects_by_info_.Next(p)) {
2899  v8::RetainedObjectInfo* info =
2900  reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2901  SetNativeRootReference(info);
2902  List<HeapObject*>* objects =
2903  reinterpret_cast<List<HeapObject*>* >(p->value);
2904  for (int i = 0; i < objects->length(); ++i) {
2905  SetWrapperNativeReferences(objects->at(i), info);
2906  }
2907  }
2908  SetRootNativeRootsReference();
2909  }
2910  filler_ = NULL;
2911  return true;
2912 }
2913 
2914 
2916  public:
2917  explicit NativeGroupRetainedObjectInfo(const char* label)
2918  : disposed_(false),
2919  hash_(reinterpret_cast<intptr_t>(label)),
2920  label_(label) {
2921  }
2922 
2924  virtual void Dispose() {
2925  CHECK(!disposed_);
2926  disposed_ = true;
2927  delete this;
2928  }
2929  virtual bool IsEquivalent(RetainedObjectInfo* other) {
2930  return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2931  }
2932  virtual intptr_t GetHash() { return hash_; }
2933  virtual const char* GetLabel() { return label_; }
2934 
2935  private:
2936  bool disposed_;
2937  intptr_t hash_;
2938  const char* label_;
2939 };
2940 
2941 
2942 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2943  const char* label) {
2944  const char* label_copy = collection_->names()->GetCopy(label);
2945  uint32_t hash = HashSequentialString(label_copy,
2946  static_cast<int>(strlen(label_copy)),
2947  HEAP->HashSeed());
2948  HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
2949  hash, true);
2950  if (entry->value == NULL) {
2951  entry->value = new NativeGroupRetainedObjectInfo(label);
2952  }
2953  return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2954 }
2955 
2956 
2957 void NativeObjectsExplorer::SetNativeRootReference(
2958  v8::RetainedObjectInfo* info) {
2959  HeapEntry* child_entry =
2960  filler_->FindOrAddEntry(info, native_entries_allocator_);
2961  ASSERT(child_entry != NULL);
2962  NativeGroupRetainedObjectInfo* group_info =
2963  FindOrAddGroupInfo(info->GetGroupLabel());
2964  HeapEntry* group_entry =
2965  filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2966  filler_->SetNamedAutoIndexReference(
2968  group_entry->index(),
2969  child_entry);
2970 }
2971 
2972 
2973 void NativeObjectsExplorer::SetWrapperNativeReferences(
2974  HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2975  HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2976  ASSERT(wrapper_entry != NULL);
2977  HeapEntry* info_entry =
2978  filler_->FindOrAddEntry(info, native_entries_allocator_);
2979  ASSERT(info_entry != NULL);
2981  wrapper_entry->index(),
2982  "native",
2983  info_entry);
2985  info_entry->index(),
2986  wrapper_entry);
2987 }
2988 
2989 
2990 void NativeObjectsExplorer::SetRootNativeRootsReference() {
2991  for (HashMap::Entry* entry = native_groups_.Start();
2992  entry;
2993  entry = native_groups_.Next(entry)) {
2994  NativeGroupRetainedObjectInfo* group_info =
2995  static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2996  HeapEntry* group_entry =
2997  filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2998  ASSERT(group_entry != NULL);
3001  snapshot_->root()->index(),
3002  group_entry);
3003  }
3004 }
3005 
3006 
3007 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
3008  if (in_groups_.Contains(*p)) return;
3009  Isolate* isolate = Isolate::Current();
3010  v8::RetainedObjectInfo* info =
3011  isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
3012  if (info == NULL) return;
3013  GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
3014 }
3015 
3016 
3018  public:
3019  explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
3020  : snapshot_(snapshot),
3021  collection_(snapshot->collection()),
3022  entries_(entries) { }
3023  HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
3024  HeapEntry* entry = allocator->AllocateEntry(ptr);
3025  entries_->Pair(ptr, entry->index());
3026  return entry;
3027  }
3028  HeapEntry* FindEntry(HeapThing ptr) {
3029  int index = entries_->Map(ptr);
3030  return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
3031  }
3032  HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
3033  HeapEntry* entry = FindEntry(ptr);
3034  return entry != NULL ? entry : AddEntry(ptr, allocator);
3035  }
3037  int parent,
3038  int index,
3039  HeapEntry* child_entry) {
3040  HeapEntry* parent_entry = &snapshot_->entries()[parent];
3041  parent_entry->SetIndexedReference(type, index, child_entry);
3042  }
3044  int parent,
3045  HeapEntry* child_entry) {
3046  HeapEntry* parent_entry = &snapshot_->entries()[parent];
3047  int index = parent_entry->children_count() + 1;
3048  parent_entry->SetIndexedReference(type, index, child_entry);
3049  }
3051  int parent,
3052  const char* reference_name,
3053  HeapEntry* child_entry) {
3054  HeapEntry* parent_entry = &snapshot_->entries()[parent];
3055  parent_entry->SetNamedReference(type, reference_name, child_entry);
3056  }
3058  int parent,
3059  HeapEntry* child_entry) {
3060  HeapEntry* parent_entry = &snapshot_->entries()[parent];
3061  int index = parent_entry->children_count() + 1;
3062  parent_entry->SetNamedReference(
3063  type,
3064  collection_->names()->GetName(index),
3065  child_entry);
3066  }
3067 
3068  private:
3069  HeapSnapshot* snapshot_;
3070  HeapSnapshotsCollection* collection_;
3071  HeapEntriesMap* entries_;
3072 };
3073 
3074 
3076  v8::ActivityControl* control)
3077  : snapshot_(snapshot),
3078  control_(control),
3079  v8_heap_explorer_(snapshot_, this),
3080  dom_explorer_(snapshot_, this) {
3081 }
3082 
3083 
3085  v8_heap_explorer_.TagGlobalObjects();
3086 
3087  // TODO(1562) Profiler assumes that any object that is in the heap after
3088  // full GC is reachable from the root when computing dominators.
3089  // This is not true for weakly reachable objects.
3090  // As a temporary solution we call GC twice.
3091  Isolate::Current()->heap()->CollectAllGarbage(
3093  "HeapSnapshotGenerator::GenerateSnapshot");
3094  Isolate::Current()->heap()->CollectAllGarbage(
3096  "HeapSnapshotGenerator::GenerateSnapshot");
3097 
3098 #ifdef VERIFY_HEAP
3099  Heap* debug_heap = Isolate::Current()->heap();
3100  CHECK(!debug_heap->old_data_space()->was_swept_conservatively());
3102  CHECK(!debug_heap->code_space()->was_swept_conservatively());
3103  CHECK(!debug_heap->cell_space()->was_swept_conservatively());
3104  CHECK(!debug_heap->map_space()->was_swept_conservatively());
3105 #endif
3106 
3107  // The following code uses heap iterators, so we want the heap to be
3108  // stable. It should follow TagGlobalObjects as that can allocate.
3109  AssertNoAllocation no_alloc;
3110 
3111 #ifdef VERIFY_HEAP
3112  debug_heap->Verify();
3113 #endif
3114 
3115  SetProgressTotal(1); // 1 pass.
3116 
3117 #ifdef VERIFY_HEAP
3118  debug_heap->Verify();
3119 #endif
3120 
3121  if (!FillReferences()) return false;
3122 
3123  snapshot_->FillChildren();
3124  snapshot_->RememberLastJSObjectId();
3125 
3126  progress_counter_ = progress_total_;
3127  if (!ProgressReport(true)) return false;
3128  return true;
3129 }
3130 
3131 
3132 void HeapSnapshotGenerator::ProgressStep() {
3133  ++progress_counter_;
3134 }
3135 
3136 
3137 bool HeapSnapshotGenerator::ProgressReport(bool force) {
3138  const int kProgressReportGranularity = 10000;
3139  if (control_ != NULL
3140  && (force || progress_counter_ % kProgressReportGranularity == 0)) {
3141  return
3142  control_->ReportProgressValue(progress_counter_, progress_total_) ==
3144  }
3145  return true;
3146 }
3147 
3148 
3149 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
3150  if (control_ == NULL) return;
3151  HeapIterator iterator(HeapIterator::kFilterUnreachable);
3152  progress_total_ = iterations_count * (
3153  v8_heap_explorer_.EstimateObjectsCount(&iterator) +
3154  dom_explorer_.EstimateObjectsCount());
3155  progress_counter_ = 0;
3156 }
3157 
3158 
3159 bool HeapSnapshotGenerator::FillReferences() {
3160  SnapshotFiller filler(snapshot_, &entries_);
3161  v8_heap_explorer_.AddRootEntries(&filler);
3162  return v8_heap_explorer_.IterateAndExtractReferences(&filler)
3163  && dom_explorer_.IterateAndExtractReferences(&filler);
3164 }
3165 
3166 
3167 template<int bytes> struct MaxDecimalDigitsIn;
3168 template<> struct MaxDecimalDigitsIn<4> {
3169  static const int kSigned = 11;
3170  static const int kUnsigned = 10;
3171 };
3172 template<> struct MaxDecimalDigitsIn<8> {
3173  static const int kSigned = 20;
3174  static const int kUnsigned = 20;
3175 };
3176 
3177 
3179  public:
3181  : stream_(stream),
3182  chunk_size_(stream->GetChunkSize()),
3183  chunk_(chunk_size_),
3184  chunk_pos_(0),
3185  aborted_(false) {
3186  ASSERT(chunk_size_ > 0);
3187  }
3188  bool aborted() { return aborted_; }
3189  void AddCharacter(char c) {
3190  ASSERT(c != '\0');
3191  ASSERT(chunk_pos_ < chunk_size_);
3192  chunk_[chunk_pos_++] = c;
3193  MaybeWriteChunk();
3194  }
3195  void AddString(const char* s) {
3196  AddSubstring(s, StrLength(s));
3197  }
3198  void AddSubstring(const char* s, int n) {
3199  if (n <= 0) return;
3200  ASSERT(static_cast<size_t>(n) <= strlen(s));
3201  const char* s_end = s + n;
3202  while (s < s_end) {
3203  int s_chunk_size = Min(
3204  chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
3205  ASSERT(s_chunk_size > 0);
3206  memcpy(chunk_.start() + chunk_pos_, s, s_chunk_size);
3207  s += s_chunk_size;
3208  chunk_pos_ += s_chunk_size;
3209  MaybeWriteChunk();
3210  }
3211  }
3212  void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
3213  void Finalize() {
3214  if (aborted_) return;
3215  ASSERT(chunk_pos_ < chunk_size_);
3216  if (chunk_pos_ != 0) {
3217  WriteChunk();
3218  }
3219  stream_->EndOfStream();
3220  }
3221 
3222  private:
3223  template<typename T>
3224  void AddNumberImpl(T n, const char* format) {
3225  // Buffer for the longest value plus trailing \0
3226  static const int kMaxNumberSize =
3228  if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
3229  int result = OS::SNPrintF(
3230  chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
3231  ASSERT(result != -1);
3232  chunk_pos_ += result;
3233  MaybeWriteChunk();
3234  } else {
3235  EmbeddedVector<char, kMaxNumberSize> buffer;
3236  int result = OS::SNPrintF(buffer, format, n);
3237  USE(result);
3238  ASSERT(result != -1);
3239  AddString(buffer.start());
3240  }
3241  }
3242  void MaybeWriteChunk() {
3243  ASSERT(chunk_pos_ <= chunk_size_);
3244  if (chunk_pos_ == chunk_size_) {
3245  WriteChunk();
3246  }
3247  }
3248  void WriteChunk() {
3249  if (aborted_) return;
3250  if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
3251  v8::OutputStream::kAbort) aborted_ = true;
3252  chunk_pos_ = 0;
3253  }
3254 
3255  v8::OutputStream* stream_;
3256  int chunk_size_;
3257  ScopedVector<char> chunk_;
3258  int chunk_pos_;
3259  bool aborted_;
3260 };
3261 
3262 
3263 // type, name|index, to_node.
3264 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
3265 // type, name, id, self_size, children_index.
3266 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 5;
3267 
3269  ASSERT(writer_ == NULL);
3270  writer_ = new OutputStreamWriter(stream);
3271 
3272  HeapSnapshot* original_snapshot = NULL;
3273  if (snapshot_->RawSnapshotSize() >=
3274  SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize) {
3275  // The snapshot is too big. Serialize a fake snapshot.
3276  original_snapshot = snapshot_;
3277  snapshot_ = CreateFakeSnapshot();
3278  }
3279 
3280  SerializeImpl();
3281 
3282  delete writer_;
3283  writer_ = NULL;
3284 
3285  if (original_snapshot != NULL) {
3286  delete snapshot_;
3287  snapshot_ = original_snapshot;
3288  }
3289 }
3290 
3291 
3292 HeapSnapshot* HeapSnapshotJSONSerializer::CreateFakeSnapshot() {
3293  HeapSnapshot* result = new HeapSnapshot(snapshot_->collection(),
3295  snapshot_->title(),
3296  snapshot_->uid());
3297  result->AddRootEntry();
3298  const char* text = snapshot_->collection()->names()->GetFormatted(
3299  "The snapshot is too big. "
3300  "Maximum snapshot size is %" V8_PTR_PREFIX "u MB. "
3301  "Actual snapshot size is %" V8_PTR_PREFIX "u MB.",
3302  SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize / MB,
3303  (snapshot_->RawSnapshotSize() + MB - 1) / MB);
3304  HeapEntry* message = result->AddEntry(HeapEntry::kString, text, 0, 4);
3305  result->root()->SetIndexedReference(HeapGraphEdge::kElement, 1, message);
3306  result->FillChildren();
3307  return result;
3308 }
3309 
3310 
3311 void HeapSnapshotJSONSerializer::SerializeImpl() {
3312  ASSERT(0 == snapshot_->root()->index());
3313  writer_->AddCharacter('{');
3314  writer_->AddString("\"snapshot\":{");
3315  SerializeSnapshot();
3316  if (writer_->aborted()) return;
3317  writer_->AddString("},\n");
3318  writer_->AddString("\"nodes\":[");
3319  SerializeNodes();
3320  if (writer_->aborted()) return;
3321  writer_->AddString("],\n");
3322  writer_->AddString("\"edges\":[");
3323  SerializeEdges();
3324  if (writer_->aborted()) return;
3325  writer_->AddString("],\n");
3326  writer_->AddString("\"strings\":[");
3327  SerializeStrings();
3328  if (writer_->aborted()) return;
3329  writer_->AddCharacter(']');
3330  writer_->AddCharacter('}');
3331  writer_->Finalize();
3332 }
3333 
3334 
3335 int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
3336  HashMap::Entry* cache_entry = strings_.Lookup(
3337  const_cast<char*>(s), ObjectHash(s), true);
3338  if (cache_entry->value == NULL) {
3339  cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
3340  }
3341  return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
3342 }
3343 
3344 
3345 static int utoa(unsigned value, const Vector<char>& buffer, int buffer_pos) {
3346  int number_of_digits = 0;
3347  unsigned t = value;
3348  do {
3349  ++number_of_digits;
3350  } while (t /= 10);
3351 
3352  buffer_pos += number_of_digits;
3353  int result = buffer_pos;
3354  do {
3355  int last_digit = value % 10;
3356  buffer[--buffer_pos] = '0' + last_digit;
3357  value /= 10;
3358  } while (value);
3359  return result;
3360 }
3361 
3362 
3363 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
3364  bool first_edge) {
3365  // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
3366  static const int kBufferSize =
3367  MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
3368  EmbeddedVector<char, kBufferSize> buffer;
3369  int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
3370  || edge->type() == HeapGraphEdge::kHidden
3371  || edge->type() == HeapGraphEdge::kWeak
3372  ? edge->index() : GetStringId(edge->name());
3373  int buffer_pos = 0;
3374  if (!first_edge) {
3375  buffer[buffer_pos++] = ',';
3376  }
3377  buffer_pos = utoa(edge->type(), buffer, buffer_pos);
3378  buffer[buffer_pos++] = ',';
3379  buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
3380  buffer[buffer_pos++] = ',';
3381  buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
3382  buffer[buffer_pos++] = '\n';
3383  buffer[buffer_pos++] = '\0';
3384  writer_->AddString(buffer.start());
3385 }
3386 
3387 
3388 void HeapSnapshotJSONSerializer::SerializeEdges() {
3389  List<HeapGraphEdge*>& edges = snapshot_->children();
3390  for (int i = 0; i < edges.length(); ++i) {
3391  ASSERT(i == 0 ||
3392  edges[i - 1]->from()->index() <= edges[i]->from()->index());
3393  SerializeEdge(edges[i], i == 0);
3394  if (writer_->aborted()) return;
3395  }
3396 }
3397 
3398 
3399 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
3400  // The buffer needs space for 5 unsigned ints, 5 commas, \n and \0
3401  static const int kBufferSize =
3402  5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
3403  + 5 + 1 + 1;
3404  EmbeddedVector<char, kBufferSize> buffer;
3405  int buffer_pos = 0;
3406  if (entry_index(entry) != 0) {
3407  buffer[buffer_pos++] = ',';
3408  }
3409  buffer_pos = utoa(entry->type(), buffer, buffer_pos);
3410  buffer[buffer_pos++] = ',';
3411  buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
3412  buffer[buffer_pos++] = ',';
3413  buffer_pos = utoa(entry->id(), buffer, buffer_pos);
3414  buffer[buffer_pos++] = ',';
3415  buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
3416  buffer[buffer_pos++] = ',';
3417  buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
3418  buffer[buffer_pos++] = '\n';
3419  buffer[buffer_pos++] = '\0';
3420  writer_->AddString(buffer.start());
3421 }
3422 
3423 
3424 void HeapSnapshotJSONSerializer::SerializeNodes() {
3425  List<HeapEntry>& entries = snapshot_->entries();
3426  for (int i = 0; i < entries.length(); ++i) {
3427  SerializeNode(&entries[i]);
3428  if (writer_->aborted()) return;
3429  }
3430 }
3431 
3432 
3433 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
3434  writer_->AddString("\"title\":\"");
3435  writer_->AddString(snapshot_->title());
3436  writer_->AddString("\"");
3437  writer_->AddString(",\"uid\":");
3438  writer_->AddNumber(snapshot_->uid());
3439  writer_->AddString(",\"meta\":");
3440  // The object describing node serialization layout.
3441  // We use a set of macros to improve readability.
3442 #define JSON_A(s) "[" s "]"
3443 #define JSON_O(s) "{" s "}"
3444 #define JSON_S(s) "\"" s "\""
3445  writer_->AddString(JSON_O(
3446  JSON_S("node_fields") ":" JSON_A(
3447  JSON_S("type") ","
3448  JSON_S("name") ","
3449  JSON_S("id") ","
3450  JSON_S("self_size") ","
3451  JSON_S("edge_count")) ","
3452  JSON_S("node_types") ":" JSON_A(
3453  JSON_A(
3454  JSON_S("hidden") ","
3455  JSON_S("array") ","
3456  JSON_S("string") ","
3457  JSON_S("object") ","
3458  JSON_S("code") ","
3459  JSON_S("closure") ","
3460  JSON_S("regexp") ","
3461  JSON_S("number") ","
3462  JSON_S("native") ","
3463  JSON_S("synthetic")) ","
3464  JSON_S("string") ","
3465  JSON_S("number") ","
3466  JSON_S("number") ","
3467  JSON_S("number") ","
3468  JSON_S("number") ","
3469  JSON_S("number")) ","
3470  JSON_S("edge_fields") ":" JSON_A(
3471  JSON_S("type") ","
3472  JSON_S("name_or_index") ","
3473  JSON_S("to_node")) ","
3474  JSON_S("edge_types") ":" JSON_A(
3475  JSON_A(
3476  JSON_S("context") ","
3477  JSON_S("element") ","
3478  JSON_S("property") ","
3479  JSON_S("internal") ","
3480  JSON_S("hidden") ","
3481  JSON_S("shortcut") ","
3482  JSON_S("weak")) ","
3483  JSON_S("string_or_number") ","
3484  JSON_S("node"))));
3485 #undef JSON_S
3486 #undef JSON_O
3487 #undef JSON_A
3488  writer_->AddString(",\"node_count\":");
3489  writer_->AddNumber(snapshot_->entries().length());
3490  writer_->AddString(",\"edge_count\":");
3491  writer_->AddNumber(snapshot_->edges().length());
3492 }
3493 
3494 
3495 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
3496  static const char hex_chars[] = "0123456789ABCDEF";
3497  w->AddString("\\u");
3498  w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
3499  w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
3500  w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
3501  w->AddCharacter(hex_chars[u & 0xf]);
3502 }
3503 
3504 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3505  writer_->AddCharacter('\n');
3506  writer_->AddCharacter('\"');
3507  for ( ; *s != '\0'; ++s) {
3508  switch (*s) {
3509  case '\b':
3510  writer_->AddString("\\b");
3511  continue;
3512  case '\f':
3513  writer_->AddString("\\f");
3514  continue;
3515  case '\n':
3516  writer_->AddString("\\n");
3517  continue;
3518  case '\r':
3519  writer_->AddString("\\r");
3520  continue;
3521  case '\t':
3522  writer_->AddString("\\t");
3523  continue;
3524  case '\"':
3525  case '\\':
3526  writer_->AddCharacter('\\');
3527  writer_->AddCharacter(*s);
3528  continue;
3529  default:
3530  if (*s > 31 && *s < 128) {
3531  writer_->AddCharacter(*s);
3532  } else if (*s <= 31) {
3533  // Special character with no dedicated literal.
3534  WriteUChar(writer_, *s);
3535  } else {
3536  // Convert UTF-8 into \u UTF-16 literal.
3537  unsigned length = 1, cursor = 0;
3538  for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3539  unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3540  if (c != unibrow::Utf8::kBadChar) {
3541  WriteUChar(writer_, c);
3542  ASSERT(cursor != 0);
3543  s += cursor - 1;
3544  } else {
3545  writer_->AddCharacter('?');
3546  }
3547  }
3548  }
3549  }
3550  writer_->AddCharacter('\"');
3551 }
3552 
3553 
3554 void HeapSnapshotJSONSerializer::SerializeStrings() {
3555  List<HashMap::Entry*> sorted_strings;
3556  SortHashMap(&strings_, &sorted_strings);
3557  writer_->AddString("\"<dummy>\"");
3558  for (int i = 0; i < sorted_strings.length(); ++i) {
3559  writer_->AddCharacter(',');
3560  SerializeString(
3561  reinterpret_cast<const unsigned char*>(sorted_strings[i]->key));
3562  if (writer_->aborted()) return;
3563  }
3564 }
3565 
3566 
3567 template<typename T>
3568 inline static int SortUsingEntryValue(const T* x, const T* y) {
3569  uintptr_t x_uint = reinterpret_cast<uintptr_t>((*x)->value);
3570  uintptr_t y_uint = reinterpret_cast<uintptr_t>((*y)->value);
3571  if (x_uint > y_uint) {
3572  return 1;
3573  } else if (x_uint == y_uint) {
3574  return 0;
3575  } else {
3576  return -1;
3577  }
3578 }
3579 
3580 
3581 void HeapSnapshotJSONSerializer::SortHashMap(
3582  HashMap* map, List<HashMap::Entry*>* sorted_entries) {
3583  for (HashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p))
3584  sorted_entries->Add(p);
3585  sorted_entries->Sort(SortUsingEntryValue);
3586 }
3587 
3588 } } // namespace v8::internal
void VisitPointers(Object **start, Object **end)
byte * Address
Definition: globals.h:157
void AddSubstring(const char *s, int n)
#define ROOT_NAME(type, name, camel_name)
void SetIndexedReference(HeapGraphEdge::Type type, int parent, int index, HeapEntry *child_entry)
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
virtual HeapEntry * AllocateEntry(HeapThing ptr)=0
static const int kDefaultCacheOffset
Definition: objects.h:6780
void Destroy(Object **location)
OutputStreamWriter(v8::OutputStream *stream)
SnapshotFiller(HeapSnapshot *snapshot, HeapEntriesMap *entries)
bool Find(const Key &key, Locator *locator)
static const int kCodeOffset
Definition: objects.h:5796
static Object *& Object_at(Address addr)
Definition: v8memory.h:75
static const SnapshotObjectId kGcRootsFirstSubrootId
#define NATIVE_CONTEXT_FIELDS(V)
Definition: contexts.h:99
void CallGlobalGCEpilogueCallback()
Definition: heap.h:1583
SnapshotObjectId FindOrAddEntry(Address addr, unsigned int size)
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:6183
void RemoveSnapshot(HeapSnapshot *snapshot)
static const int kInheritsSecurityToken
static void MarkVisitedField(HeapObject *obj, int offset)
#define STRUCT_MAP_NAME(NAME, Name, name)
bool Insert(const Key &key, Locator *locator)
static const int kBuiltinsOffset
Definition: objects.h:6285
void BeforeTraversingChild(ProfileNode *parent, ProfileNode *child)
void Dispose()
Definition: v8.h:4235
Handle< HeapObject > FindHeapObjectById(SnapshotObjectId id)
#define SYMBOL_NAME(name, str)
virtual HeapEntry * AllocateEntry(HeapThing ptr)
void SetTickRatePerMs(double ticks_per_ms)
virtual intptr_t GetHash()=0
static int VSNPrintF(Vector< char > str, const char *format, va_list args)
uint32_t GetCallUid() const
bool was_swept_conservatively()
Definition: spaces.h:1596
static const int kTransitionsOrBackPointerOffset
Definition: objects.h:5132
static String * cast(Object *obj)
uint32_t HashSeed()
Definition: heap.h:1603
Isolate * isolate()
Definition: heap-inl.h:503
FindEntryById(SnapshotObjectId id)
GlobalHandlesExtractor(NativeObjectsExplorer *explorer)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the snapshot(mksnapshot only)") DEFINE_bool(help
void AfterChildTraversed(ProfileNode *, ProfileNode *child)
void BeforeTraversingChild(ProfileNode *, ProfileNode *)
ProfileGenerator(CpuProfilesCollection *profiles)
void VisitPointers(Object **start, Object **end)
static SnapshotObjectId GetNthGcSubrootId(int delta)
static HeapObject * cast(Object *obj)
CpuProfile * GetProfile(int security_token_id, unsigned uid)
static const int kGlobalReceiverOffset
Definition: objects.h:6288
static const int kDeoptimizationDataOffset
Definition: objects.h:4533
static AccessorPair * cast(Object *obj)
NodesPair(ProfileNode *src, ProfileNode *dst)
IndexedReferencesExtractor(V8HeapExplorer *generator, HeapObject *parent_obj, int parent)
static Map * cast(Object *obj)
BasicHeapEntriesAllocator(HeapSnapshot *snapshot, HeapEntry::Type entries_type)
bool IterateAndExtractReferences(SnapshotFillerInterface *filler)
CodeEntry * NewCodeEntry(Logger::LogEventsAndTags tag, String *name, String *resource_name, int line_number)
void ForEach(Callback *callback)
void VisitPointers(Object **start, Object **end)
T & at(int i) const
Definition: list.h:90
Vector< T > SubVector(int from, int to)
Definition: utils.h:376
void FilteredClone(ProfileTree *src, int security_token_id)
TickSample * sample
virtual bool IsEquivalent(RetainedObjectInfo *other)
void SetNamedAutoIndexReference(HeapGraphEdge::Type type, int parent, HeapEntry *child_entry)
virtual void SetNamedAutoIndexReference(HeapGraphEdge::Type type, int parent_entry, HeapEntry *child_entry)=0
static const int kHandlerTableOffset
Definition: objects.h:4532
#define ASSERT(condition)
Definition: checks.h:270
void ClearWeakness(Object **location)
v8::Handle< v8::Value > Print(const v8::Arguments &args)
const char * GetFormatted(const char *format,...)
SnapshotObjectId PushHeapObjectsStats(OutputStream *stream)
static Script * cast(Object *obj)
unsigned short uint16_t
Definition: unicode.cc:46
static const int kDebugInfoOffset
Definition: objects.h:5805
static JSRegExp * cast(Object *obj)
static const int kDataOffset
Definition: objects.h:5303
#define STRONG_ROOT_LIST(V)
Definition: heap.h:49
static Context * cast(Object *context)
Definition: contexts.h:212
#define MAKE_STRUCT_CASE(NAME, Name, name)
static const int kInitialMapOffset
Definition: objects.h:5807
static SharedFunctionInfo * cast(Object *obj)
virtual HeapEntry * AddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)=0
void SetTag(Object *obj, const char *tag)
#define CHECK(condition)
Definition: checks.h:56
static uchar CalculateValue(const byte *str, unsigned length, unsigned *cursor)
Definition: unicode.cc:210
Address stack[kMaxFramesCount]
Definition: platform.h:728
static const int kInstanceClassNameOffset
Definition: objects.h:5800
static const int kDescriptorsOffset
Definition: objects.h:5134
void SetActualSamplingRate(double actual_sampling_rate)
bool IterateAndExtractReferences(SnapshotFillerInterface *filler)
Factory * factory()
Definition: isolate.h:992
virtual ControlOption ReportProgressValue(int done, int total)=0
void CallGlobalGCPrologueCallback()
Definition: heap.h:1579
static const int kContextOffset
Definition: objects.h:6187
static Code * cast(Object *obj)
HeapEntry * FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)
HeapEntry * AddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)
void AfterChildTraversed(ProfileNode *, ProfileNode *)
void SetNamedReference(HeapGraphEdge::Type type, int parent, const char *reference_name, HeapEntry *child_entry)
static Object ** RawField(HeapObject *obj, int offset)
Definition: objects-inl.h:971
static Smi * cast(Object *object)
virtual void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, int parent_entry, HeapEntry *child_entry)=0
int operator()(HeapEntry *const *entry)
static const SnapshotObjectId kGcRootsObjectId
static SnapshotObjectId GenerateId(v8::RetainedObjectInfo *info)
void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, int parent, HeapEntry *child_entry)
SmartArrayPointer< char > ToCString(AllowNullsFlag allow_nulls, RobustnessFlag robustness_flag, int offset, int length, int *length_output=0)
Definition: objects.cc:6233
void BeforeTraversingChild(ProfileNode *, ProfileNode *)
T ** location() const
Definition: handles.h:75
const char * GetName(String *name)
static const int kLiteralsOffset
Definition: objects.h:6188
#define EXTRACT_CONTEXT_FIELD(index, type, name)
static const int kSourceOffset
Definition: objects.h:5299
#define UNREACHABLE()
Definition: checks.h:50
SnapshotObjectId last_assigned_id() const
virtual void SetNamedReference(HeapGraphEdge::Type type, int parent_entry, const char *reference_name, HeapEntry *child_entry)=0
T * start() const
Definition: utils.h:390
void AfterChildTraversed(ProfileNode *parent, ProfileNode *child)
T & last() const
Definition: list.h:91
CodeEntry * FindEntry(Address addr)
Position(ProfileNode *node)
static JSGlobalProxy * cast(Object *obj)
NativeObjectsExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress)
List< HeapEntry > & entries()
static const int kGCMetadataOffset
Definition: objects.h:4537
const intptr_t kFailureTagMask
Definition: v8globals.h:64
void RecordTickSample(const TickSample &sample)
const char * GetTag(Object *obj)
static SlicedString * cast(Object *obj)
int GetSharedId(Address addr)
static const int kScopeInfoOffset
Definition: objects.h:5798
virtual int GetChunkSize()
Definition: v8.h:3961
static String * GetConstructorName(JSObject *object)
Handle< Object > Create(Object *value)
HeapEntry * gc_subroot(int index)
virtual const char * GetLabel()=0
ProfileNode * FindChild(CodeEntry *entry)
HeapSnapshotsCollection * collection()
JSObject * global_proxy()
Definition: contexts.cc:78
const int kPointerSize
Definition: globals.h:220
uint32_t occupancy() const
Definition: hashmap.h:83
static HeapObject *const kInternalRootObject
const int kHeapObjectTag
Definition: v8.h:4009
T Remove(int i)
Definition: list-inl.h:116
GlobalHandles * global_handles()
Definition: isolate.h:880
virtual WriteResult WriteHeapStatsChunk(HeapStatsUpdate *data, int count)
Definition: v8.h:3975
Entry * Lookup(void *key, uint32_t hash, bool insert, AllocationPolicy allocator=AllocationPolicy())
Definition: hashmap.h:131
void MoveCode(Address from, Address to)
static const char *const kProgramEntryName
static const int kNameOffset
Definition: objects.h:5795
intptr_t AtomicWord
Definition: atomicops.h:75
#define JSON_A(s)
double TicksToMillis(unsigned ticks) const
int length() const
Definition: utils.h:384
OldSpace * old_pointer_space()
Definition: heap.h:506
static const int kPropertiesOffset
Definition: objects.h:2171
static const SnapshotObjectId kFirstAvailableObjectId
List< HeapGraphEdge > & edges()
static double TimeCurrentMillis()
HeapSnapshotGenerator(HeapSnapshot *snapshot, v8::ActivityControl *control)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
#define SYMBOL_LIST(V)
Definition: heap.h:163
void IterateAllRoots(ObjectVisitor *v)
SnapshotObjectId FindEntry(Address addr)
OldSpace * code_space()
Definition: heap.h:508
static const int kMakeHeapIterableMask
Definition: heap.h:1088
bool IsSameAs(CodeEntry *entry) const
void AddPathFromEnd(const Vector< CodeEntry * > &path)
#define V8_PTR_PREFIX
Definition: globals.h:181
virtual void SetIndexedReference(HeapGraphEdge::Type type, int parent_entry, int index, HeapEntry *child_entry)=0
static const int kLineEndsOffset
Definition: objects.h:5310
V8HeapExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress)
static const int kElementsOffset
Definition: objects.h:2172
static Vector< T > New(int length)
Definition: utils.h:370
void RemoveProfile(CpuProfile *profile)
HeapEntry * FindEntry(HeapThing ptr)
static const int kTypeFeedbackInfoOffset
Definition: objects.h:4535
activate correct semantics for inheriting readonliness false
Definition: flags.cc:141
virtual void VisitPointers(Object **start, Object **end)
void IterateAllRootsWithClassIds(ObjectVisitor *v)
static const int kRelocationInfoOffset
Definition: objects.h:4531
static const int kNonWeakFieldsEndOffset
Definition: objects.h:6189
CpuProfile * FilteredClone(int security_token_id)
Vector< const char > CStrVector(const char *data)
Definition: utils.h:526
CellSpace * cell_space()
Definition: heap.h:510
int StrLength(const char *string)
Definition: utils.h:234
static int OffsetOfElementAt(int index)
Definition: objects.h:2356
static JSArray * cast(Object *obj)
static void Print(const char *format,...)
#define T(name, string, precedence)
Definition: token.cc:48
static const char *const kGarbageCollectorEntryName
INLINE(bool has_current_child())
void AddPathToCurrentProfiles(const Vector< CodeEntry * > &path)
static const int kBackPointerStorageOffset
Definition: transitions.h:152
HeapEntry * GetEntryById(SnapshotObjectId id)
virtual HeapEntry * FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)=0
void AddRootEntries(SnapshotFillerInterface *filler)
List< ObjectGroup * > * object_groups()
static int SNPrintF(Vector< char > str, const char *format,...)
void CopyData(const CodeEntry &source)
void UpdateMeasurements(double current_time)
static const unsigned kWallTimeQueryIntervalMs
static const int kMapOffset
Definition: objects.h:1261
#define JSON_S(s)
void AddPath(const Vector< CodeEntry * > &path)
static const int kFunctionDataOffset
Definition: objects.h:5802
void AddCode(Address addr, CodeEntry *entry, unsigned size)
Handle< String > NewStringFromAscii(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
Definition: factory.cc:199
INLINE(void next_child())
static const int kNormalTypeCacheOffset
Definition: objects.h:6781
void Serialize(v8::OutputStream *stream)
void AddPathFromStart(const Vector< CodeEntry * > &path)
virtual WriteResult WriteAsciiChunk(char *data, int size)=0
void IterateRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:5740
HeapEntry * AddEntry(HeapEntry::Type type, const char *name, SnapshotObjectId id, int size)
void Sort(int(*cmp)(const T *x, const T *y))
Definition: list-inl.h:198
bool Remove(const Key &key)
virtual void Dispose()=0
static const char *const kAnonymousFunctionName
uint32_t ComputeIntegerHash(uint32_t key, uint32_t seed)
Definition: utils.h:286
GcSubrootsEnumerator(SnapshotFillerInterface *filler, V8HeapExplorer *explorer)
#define STRUCT_LIST(V)
Definition: objects.h:448
bool FindGreatestLessThan(const Key &key, Locator *locator)
INLINE(ProfileNode *current_child())
FilteredCloneCallback(ProfileNode *dst_root, int security_token_id)
static const SnapshotObjectId kInternalRootObjectId
virtual void VisitEmbedderReference(Object **p, uint16_t class_id)
static JSGlobalPropertyCell * cast(Object *obj)
List< HeapEntry * > * GetSortedEntriesList()
uint32_t SnapshotObjectId
Definition: v8-profiler.h:68
virtual HeapEntry * FindEntry(HeapThing ptr)=0
String * hidden_symbol()
Definition: heap.h:1184
static const int kInferredNameOffset
Definition: objects.h:5806
static const int kThisPropertyAssignmentsOffset
Definition: objects.h:5809
void SnapshotGenerationFinished(HeapSnapshot *snapshot)
const char * GetCopy(const char *src)
virtual void Signal()=0
uint32_t HashSequentialString(const schar *chars, int length, uint32_t seed)
Definition: objects-inl.h:5013
void Pair(HeapThing thing, int entry)
T & first() const
Definition: list.h:92
HeapEntry * AddGcSubrootEntry(int tag)
void * Remove(void *key, uint32_t hash)
Definition: hashmap.h:162
TemplateHashMapImpl< FreeStoreAllocationPolicy > HashMap
Definition: hashmap.h:113
void AfterAllChildrenTraversed(ProfileNode *node)
SnapshotObjectId last_assigned_id() const
#define HEAP
Definition: isolate.h:1433
static const char *const kEmptyNamePrefix
MUST_USE_RESULT MaybeObject * GetProperty(String *key)
Definition: objects-inl.h:859
uint32_t capacity() const
Definition: hashmap.h:88
static const int kNameOffset
Definition: objects.h:5300
virtual intptr_t GetElementCount()
Definition: v8-profiler.h:531
InstanceType instance_type()
Definition: objects-inl.h:3009
static const uchar kBadChar
Definition: unicode.h:162
void USE(T)
Definition: globals.h:289
static const int kConstructorOffset
Definition: objects.h:5127
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage message
Definition: flags.cc:495
int SortedListBSearch(const List< T > &list, P cmp)
Definition: list-inl.h:223
static void StrNCpy(Vector< char > dest, const char *src, size_t n)
Handle< JSGlobalObject > & at(int i)
static FixedArray * cast(Object *obj)
HeapSnapshot * NewSnapshot(HeapSnapshot::Type type, const char *name, unsigned uid)
bool StartProfiling(const char *title, unsigned uid)
void FillReferences(V8HeapExplorer *explorer)
MapSpace * map_space()
Definition: heap.h:509
HeapSnapshot * GetSnapshot(unsigned uid)
static const int kBoundFunctionIndex
Definition: objects.h:6198
virtual void Wait()=0
const int kFailureTag
Definition: v8globals.h:62
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:38
static const int kScriptOffset
Definition: objects.h:5804
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
static const int kPrototypeOffset
Definition: objects.h:5126
void Synchronize(VisitorSynchronization::SyncTag tag)
static const int kSize
Definition: objects.h:6191
List< HeapGraphEdge * > & children()
SnapshotObjectId GetObjectId(Address object_addr, int object_size)
virtual void VisitPointers(Object **start, Object **end)
void Synchronize(VisitorSynchronization::SyncTag tag)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kContextOffset
Definition: objects.h:5304
void MakeWeak(Object **location, void *parameter, WeakReferenceCallback callback)
static const int kNativeContextOffset
Definition: objects.h:6286
int EstimateObjectsCount(HeapIterator *iterator)
virtual void EndOfStream()=0
const char * GetVFormatted(const char *format, va_list args)
CpuProfile * StopProfiling(int security_token_id, const char *title, double actual_sampling_rate)
static GlobalObject * cast(Object *obj)
static const int kBoundThisIndex
Definition: objects.h:6199
static const int kConstructStubOffset
Definition: objects.h:5799
void DeleteArray(T *array)
Definition: allocation.h:91
T Min(T a, T b)
Definition: utils.h:229
static const int kSharedFunctionInfoOffset
Definition: objects.h:6185
static ConsString * cast(Object *obj)
void AfterAllChildrenTraversed(ProfileNode *parent)
static const int kNoLineNumberInfo
Definition: v8-profiler.h:114
static CodeCache * cast(Object *obj)
virtual intptr_t GetSizeInBytes()
Definition: v8-profiler.h:534
virtual HeapEntry * AllocateEntry(HeapThing ptr)
const char * GetName(String *name)
void MoveObject(Address from, Address to)
static const int kCodeCacheOffset
Definition: objects.h:5136
ProfileNode * FindOrAddChild(CodeEntry *entry)
CpuProfile(const char *title, unsigned uid)
static const int kBoundArgumentsStartIndex
Definition: objects.h:6200
HeapSnapshot(HeapSnapshotsCollection *collection, Type type, const char *title, unsigned uid)
virtual const char * GetGroupLabel()
Definition: v8-profiler.h:525
static JSObject * cast(Object *obj)
OldSpace * old_data_space()
Definition: heap.h:507
unsigned int uchar
Definition: unicode.h:40
Entry * Next(Entry *p) const
Definition: hashmap.h:243
static const char *const kTagNames[kNumberOfSyncTags]
Definition: objects.h:8863
v8::RetainedObjectInfo * info_
List< CpuProfile * > * Profiles(int security_token_id)
#define JSON_O(s)
String * constructor_name()
Definition: objects.cc:1487
static JSGlobalObject * cast(Object *obj)
const int MB
Definition: globals.h:208
static JSFunction * cast(Object *obj)