v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
profile-generator.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "profile-generator-inl.h"
31 
32 #include "global-handles.h"
33 #include "heap-profiler.h"
34 #include "scopeinfo.h"
35 #include "unicode.h"
36 #include "zone-inl.h"
37 #include "debug.h"
38 
39 namespace v8 {
40 namespace internal {
41 
42 
44  : token_locations_(4),
45  token_removed_(4) {
46 }
47 
48 
50  Isolate* isolate = Isolate::Current();
51  for (int i = 0; i < token_locations_.length(); ++i) {
52  if (!token_removed_[i]) {
53  isolate->global_handles()->ClearWeakness(token_locations_[i]);
54  isolate->global_handles()->Destroy(token_locations_[i]);
55  }
56  }
57 }
58 
59 
61  Isolate* isolate = Isolate::Current();
62  if (token == NULL) return TokenEnumerator::kNoSecurityToken;
63  for (int i = 0; i < token_locations_.length(); ++i) {
64  if (*token_locations_[i] == token && !token_removed_[i]) return i;
65  }
66  Handle<Object> handle = isolate->global_handles()->Create(token);
67  // handle.location() points to a memory cell holding a pointer
68  // to a token object in the V8's heap.
69  isolate->global_handles()->MakeWeak(handle.location(), this,
70  TokenRemovedCallback);
71  token_locations_.Add(handle.location());
72  token_removed_.Add(false);
73  return token_locations_.length() - 1;
74 }
75 
76 
77 void TokenEnumerator::TokenRemovedCallback(v8::Persistent<v8::Value> handle,
78  void* parameter) {
79  reinterpret_cast<TokenEnumerator*>(parameter)->TokenRemoved(
80  Utils::OpenHandle(*handle).location());
81  handle.Dispose();
82 }
83 
84 
85 void TokenEnumerator::TokenRemoved(Object** token_location) {
86  for (int i = 0; i < token_locations_.length(); ++i) {
87  if (token_locations_[i] == token_location && !token_removed_[i]) {
88  token_removed_[i] = true;
89  return;
90  }
91  }
92 }
93 
94 
96  : names_(StringsMatch) {
97 }
98 
99 
101  for (HashMap::Entry* p = names_.Start();
102  p != NULL;
103  p = names_.Next(p)) {
104  DeleteArray(reinterpret_cast<const char*>(p->value));
105  }
106 }
107 
108 
109 const char* StringsStorage::GetCopy(const char* src) {
110  int len = static_cast<int>(strlen(src));
111  Vector<char> dst = Vector<char>::New(len + 1);
112  OS::StrNCpy(dst, src, len);
113  dst[len] = '\0';
114  uint32_t hash =
115  HashSequentialString(dst.start(), len, HEAP->HashSeed());
116  return AddOrDisposeString(dst.start(), hash);
117 }
118 
119 
120 const char* StringsStorage::GetFormatted(const char* format, ...) {
121  va_list args;
122  va_start(args, format);
123  const char* result = GetVFormatted(format, args);
124  va_end(args);
125  return result;
126 }
127 
128 
129 const char* StringsStorage::AddOrDisposeString(char* str, uint32_t hash) {
130  HashMap::Entry* cache_entry = names_.Lookup(str, hash, true);
131  if (cache_entry->value == NULL) {
132  // New entry added.
133  cache_entry->value = str;
134  } else {
135  DeleteArray(str);
136  }
137  return reinterpret_cast<const char*>(cache_entry->value);
138 }
139 
140 
141 const char* StringsStorage::GetVFormatted(const char* format, va_list args) {
142  Vector<char> str = Vector<char>::New(1024);
143  int len = OS::VSNPrintF(str, format, args);
144  if (len == -1) {
145  DeleteArray(str.start());
146  return format;
147  }
148  uint32_t hash = HashSequentialString(
149  str.start(), len, HEAP->HashSeed());
150  return AddOrDisposeString(str.start(), hash);
151 }
152 
153 
155  if (name->IsString()) {
156  int length = Min(kMaxNameSize, name->length());
159  uint32_t hash =
160  HashSequentialString(*data, length, name->GetHeap()->HashSeed());
161  return AddOrDisposeString(data.Detach(), hash);
162  }
163  return "";
164 }
165 
166 
167 const char* StringsStorage::GetName(int index) {
168  return GetFormatted("%d", index);
169 }
170 
171 
173  size_t size = sizeof(*this);
174  size += sizeof(HashMap::Entry) * names_.capacity();
175  for (HashMap::Entry* p = names_.Start(); p != NULL; p = names_.Next(p)) {
176  size += strlen(reinterpret_cast<const char*>(p->value)) + 1;
177  }
178  return size;
179 }
180 
181 const char* const CodeEntry::kEmptyNamePrefix = "";
182 
183 
184 void CodeEntry::CopyData(const CodeEntry& source) {
185  tag_ = source.tag_;
186  name_prefix_ = source.name_prefix_;
187  name_ = source.name_;
188  resource_name_ = source.resource_name_;
189  line_number_ = source.line_number_;
190 }
191 
192 
193 uint32_t CodeEntry::GetCallUid() const {
194  uint32_t hash = ComputeIntegerHash(tag_, v8::internal::kZeroHashSeed);
195  if (shared_id_ != 0) {
196  hash ^= ComputeIntegerHash(static_cast<uint32_t>(shared_id_),
197  v8::internal::kZeroHashSeed);
198  } else {
199  hash ^= ComputeIntegerHash(
200  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_prefix_)),
201  v8::internal::kZeroHashSeed);
202  hash ^= ComputeIntegerHash(
203  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_)),
204  v8::internal::kZeroHashSeed);
205  hash ^= ComputeIntegerHash(
206  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(resource_name_)),
207  v8::internal::kZeroHashSeed);
208  hash ^= ComputeIntegerHash(line_number_, v8::internal::kZeroHashSeed);
209  }
210  return hash;
211 }
212 
213 
214 bool CodeEntry::IsSameAs(CodeEntry* entry) const {
215  return this == entry
216  || (tag_ == entry->tag_
217  && shared_id_ == entry->shared_id_
218  && (shared_id_ != 0
219  || (name_prefix_ == entry->name_prefix_
220  && name_ == entry->name_
221  && resource_name_ == entry->resource_name_
222  && line_number_ == entry->line_number_)));
223 }
224 
225 
227  HashMap::Entry* map_entry =
228  children_.Lookup(entry, CodeEntryHash(entry), false);
229  return map_entry != NULL ?
230  reinterpret_cast<ProfileNode*>(map_entry->value) : NULL;
231 }
232 
233 
235  HashMap::Entry* map_entry =
236  children_.Lookup(entry, CodeEntryHash(entry), true);
237  if (map_entry->value == NULL) {
238  // New node added.
239  ProfileNode* new_node = new ProfileNode(tree_, entry);
240  map_entry->value = new_node;
241  children_list_.Add(new_node);
242  }
243  return reinterpret_cast<ProfileNode*>(map_entry->value);
244 }
245 
246 
248  return tree_->TicksToMillis(self_ticks_);
249 }
250 
251 
253  return tree_->TicksToMillis(total_ticks_);
254 }
255 
256 
257 void ProfileNode::Print(int indent) {
258  OS::Print("%5u %5u %*c %s%s [%d]",
259  total_ticks_, self_ticks_,
260  indent, ' ',
261  entry_->name_prefix(),
262  entry_->name(),
263  entry_->security_token_id());
264  if (entry_->resource_name()[0] != '\0')
265  OS::Print(" %s:%d", entry_->resource_name(), entry_->line_number());
266  OS::Print("\n");
267  for (HashMap::Entry* p = children_.Start();
268  p != NULL;
269  p = children_.Next(p)) {
270  reinterpret_cast<ProfileNode*>(p->value)->Print(indent + 2);
271  }
272 }
273 
274 
276  public:
278 
280  delete node;
281  }
282 
284 };
285 
286 
288  : root_entry_(Logger::FUNCTION_TAG,
289  "",
290  "(root)",
291  "",
292  0,
293  TokenEnumerator::kNoSecurityToken),
294  root_(new ProfileNode(this, &root_entry_)) {
295 }
296 
297 
300  TraverseDepthFirst(&cb);
301 }
302 
303 
305  ProfileNode* node = root_;
306  for (CodeEntry** entry = path.start() + path.length() - 1;
307  entry != path.start() - 1;
308  --entry) {
309  if (*entry != NULL) {
310  node = node->FindOrAddChild(*entry);
311  }
312  }
313  node->IncrementSelfTicks();
314 }
315 
316 
318  ProfileNode* node = root_;
319  for (CodeEntry** entry = path.start();
320  entry != path.start() + path.length();
321  ++entry) {
322  if (*entry != NULL) {
323  node = node->FindOrAddChild(*entry);
324  }
325  }
326  node->IncrementSelfTicks();
327 }
328 
329 
330 struct NodesPair {
332  : src(src), dst(dst) { }
335 };
336 
337 
339  public:
340  FilteredCloneCallback(ProfileNode* dst_root, int security_token_id)
341  : stack_(10),
342  security_token_id_(security_token_id) {
343  stack_.Add(NodesPair(NULL, dst_root));
344  }
345 
347  if (IsTokenAcceptable(child->entry()->security_token_id(),
348  parent->entry()->security_token_id())) {
349  ProfileNode* clone = stack_.last().dst->FindOrAddChild(child->entry());
350  clone->IncreaseSelfTicks(child->self_ticks());
351  stack_.Add(NodesPair(child, clone));
352  } else {
353  // Attribute ticks to parent node.
354  stack_.last().dst->IncreaseSelfTicks(child->self_ticks());
355  }
356  }
357 
359 
361  if (stack_.last().src == child) {
362  stack_.RemoveLast();
363  }
364  }
365 
366  private:
367  bool IsTokenAcceptable(int token, int parent_token) {
369  || token == security_token_id_) return true;
372  return parent_token == TokenEnumerator::kNoSecurityToken
373  || parent_token == security_token_id_;
374  }
375  return false;
376  }
377 
378  List<NodesPair> stack_;
379  int security_token_id_;
380 };
381 
382 void ProfileTree::FilteredClone(ProfileTree* src, int security_token_id) {
383  ms_to_ticks_scale_ = src->ms_to_ticks_scale_;
384  FilteredCloneCallback cb(root_, security_token_id);
385  src->TraverseDepthFirst(&cb);
387 }
388 
389 
390 void ProfileTree::SetTickRatePerMs(double ticks_per_ms) {
391  ms_to_ticks_scale_ = ticks_per_ms > 0 ? 1.0 / ticks_per_ms : 1.0;
392 }
393 
394 
395 class Position {
396  public:
398  : node(node), child_idx_(0) { }
399  INLINE(ProfileNode* current_child()) {
400  return node->children()->at(child_idx_);
401  }
402  INLINE(bool has_current_child()) {
403  return child_idx_ < node->children()->length();
404  }
405  INLINE(void next_child()) { ++child_idx_; }
406 
408  private:
409  int child_idx_;
410 };
411 
412 
413 // Non-recursive implementation of a depth-first post-order tree traversal.
414 template <typename Callback>
415 void ProfileTree::TraverseDepthFirst(Callback* callback) {
416  List<Position> stack(10);
417  stack.Add(Position(root_));
418  while (stack.length() > 0) {
419  Position& current = stack.last();
420  if (current.has_current_child()) {
421  callback->BeforeTraversingChild(current.node, current.current_child());
422  stack.Add(Position(current.current_child()));
423  } else {
424  callback->AfterAllChildrenTraversed(current.node);
425  if (stack.length() > 1) {
426  Position& parent = stack[stack.length() - 2];
427  callback->AfterChildTraversed(parent.node, current.node);
428  parent.next_child();
429  }
430  // Remove child from the stack.
431  stack.RemoveLast();
432  }
433  }
434 }
435 
436 
438  public:
440 
442  node->IncreaseTotalTicks(node->self_ticks());
443  }
444 
446  parent->IncreaseTotalTicks(child->total_ticks());
447  }
448 };
449 
450 
453  TraverseDepthFirst(&cb);
454 }
455 
456 
458  OS::Print("root: %u %u %.2fms %.2fms\n",
459  root_->total_ticks(), root_->self_ticks(),
460  root_->GetTotalMillis(), root_->GetSelfMillis());
461 }
462 
463 
465  top_down_.AddPathFromEnd(path);
466  bottom_up_.AddPathFromStart(path);
467 }
468 
469 
471  top_down_.CalculateTotalTicks();
472  bottom_up_.CalculateTotalTicks();
473 }
474 
475 
476 void CpuProfile::SetActualSamplingRate(double actual_sampling_rate) {
477  top_down_.SetTickRatePerMs(actual_sampling_rate);
478  bottom_up_.SetTickRatePerMs(actual_sampling_rate);
479 }
480 
481 
482 CpuProfile* CpuProfile::FilteredClone(int security_token_id) {
483  ASSERT(security_token_id != TokenEnumerator::kNoSecurityToken);
484  CpuProfile* clone = new CpuProfile(title_, uid_);
485  clone->top_down_.FilteredClone(&top_down_, security_token_id);
486  clone->bottom_up_.FilteredClone(&bottom_up_, security_token_id);
487  return clone;
488 }
489 
490 
492  OS::Print("top down ");
493  top_down_.ShortPrint();
494  OS::Print("bottom up ");
495  bottom_up_.ShortPrint();
496 }
497 
498 
500  OS::Print("[Top down]:\n");
501  top_down_.Print();
502  OS::Print("[Bottom up]:\n");
503  bottom_up_.Print();
504 }
505 
506 
507 CodeEntry* const CodeMap::kSharedFunctionCodeEntry = NULL;
508 const CodeMap::CodeTreeConfig::Key CodeMap::CodeTreeConfig::kNoKey = NULL;
509 
510 
511 void CodeMap::AddCode(Address addr, CodeEntry* entry, unsigned size) {
512  DeleteAllCoveredCode(addr, addr + size);
513  CodeTree::Locator locator;
514  tree_.Insert(addr, &locator);
515  locator.set_value(CodeEntryInfo(entry, size));
516 }
517 
518 
519 void CodeMap::DeleteAllCoveredCode(Address start, Address end) {
520  List<Address> to_delete;
521  Address addr = end - 1;
522  while (addr >= start) {
523  CodeTree::Locator locator;
524  if (!tree_.FindGreatestLessThan(addr, &locator)) break;
525  Address start2 = locator.key(), end2 = start2 + locator.value().size;
526  if (start2 < end && start < end2) to_delete.Add(start2);
527  addr = start2 - 1;
528  }
529  for (int i = 0; i < to_delete.length(); ++i) tree_.Remove(to_delete[i]);
530 }
531 
532 
534  CodeTree::Locator locator;
535  if (tree_.FindGreatestLessThan(addr, &locator)) {
536  // locator.key() <= addr. Need to check that addr is within entry.
537  const CodeEntryInfo& entry = locator.value();
538  if (addr < (locator.key() + entry.size))
539  return entry.entry;
540  }
541  return NULL;
542 }
543 
544 
546  CodeTree::Locator locator;
547  // For shared function entries, 'size' field is used to store their IDs.
548  if (tree_.Find(addr, &locator)) {
549  const CodeEntryInfo& entry = locator.value();
550  ASSERT(entry.entry == kSharedFunctionCodeEntry);
551  return entry.size;
552  } else {
553  tree_.Insert(addr, &locator);
554  int id = next_shared_id_++;
555  locator.set_value(CodeEntryInfo(kSharedFunctionCodeEntry, id));
556  return id;
557  }
558 }
559 
560 
562  if (from == to) return;
563  CodeTree::Locator locator;
564  if (!tree_.Find(from, &locator)) return;
565  CodeEntryInfo entry = locator.value();
566  tree_.Remove(from);
567  AddCode(to, entry.entry, entry.size);
568 }
569 
570 
571 void CodeMap::CodeTreePrinter::Call(
572  const Address& key, const CodeMap::CodeEntryInfo& value) {
573  OS::Print("%p %5d %s\n", key, value.size, value.entry->name());
574 }
575 
576 
578  CodeTreePrinter printer;
579  tree_.ForEach(&printer);
580 }
581 
582 
584  : profiles_uids_(UidsMatch),
585  current_profiles_semaphore_(OS::CreateSemaphore(1)) {
586  // Create list of unabridged profiles.
587  profiles_by_token_.Add(new List<CpuProfile*>());
588 }
589 
590 
591 static void DeleteCodeEntry(CodeEntry** entry_ptr) {
592  delete *entry_ptr;
593 }
594 
595 static void DeleteCpuProfile(CpuProfile** profile_ptr) {
596  delete *profile_ptr;
597 }
598 
599 static void DeleteProfilesList(List<CpuProfile*>** list_ptr) {
600  if (*list_ptr != NULL) {
601  (*list_ptr)->Iterate(DeleteCpuProfile);
602  delete *list_ptr;
603  }
604 }
605 
607  delete current_profiles_semaphore_;
608  current_profiles_.Iterate(DeleteCpuProfile);
609  detached_profiles_.Iterate(DeleteCpuProfile);
610  profiles_by_token_.Iterate(DeleteProfilesList);
611  code_entries_.Iterate(DeleteCodeEntry);
612 }
613 
614 
615 bool CpuProfilesCollection::StartProfiling(const char* title, unsigned uid) {
616  ASSERT(uid > 0);
617  current_profiles_semaphore_->Wait();
618  if (current_profiles_.length() >= kMaxSimultaneousProfiles) {
619  current_profiles_semaphore_->Signal();
620  return false;
621  }
622  for (int i = 0; i < current_profiles_.length(); ++i) {
623  if (strcmp(current_profiles_[i]->title(), title) == 0) {
624  // Ignore attempts to start profile with the same title.
625  current_profiles_semaphore_->Signal();
626  return false;
627  }
628  }
629  current_profiles_.Add(new CpuProfile(title, uid));
630  current_profiles_semaphore_->Signal();
631  return true;
632 }
633 
634 
635 bool CpuProfilesCollection::StartProfiling(String* title, unsigned uid) {
636  return StartProfiling(GetName(title), uid);
637 }
638 
639 
641  const char* title,
642  double actual_sampling_rate) {
643  const int title_len = StrLength(title);
644  CpuProfile* profile = NULL;
645  current_profiles_semaphore_->Wait();
646  for (int i = current_profiles_.length() - 1; i >= 0; --i) {
647  if (title_len == 0 || strcmp(current_profiles_[i]->title(), title) == 0) {
648  profile = current_profiles_.Remove(i);
649  break;
650  }
651  }
652  current_profiles_semaphore_->Signal();
653 
654  if (profile != NULL) {
655  profile->CalculateTotalTicks();
656  profile->SetActualSamplingRate(actual_sampling_rate);
657  List<CpuProfile*>* unabridged_list =
658  profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
659  unabridged_list->Add(profile);
660  HashMap::Entry* entry =
661  profiles_uids_.Lookup(reinterpret_cast<void*>(profile->uid()),
662  static_cast<uint32_t>(profile->uid()),
663  true);
664  ASSERT(entry->value == NULL);
665  entry->value = reinterpret_cast<void*>(unabridged_list->length() - 1);
666  return GetProfile(security_token_id, profile->uid());
667  }
668  return NULL;
669 }
670 
671 
673  unsigned uid) {
674  int index = GetProfileIndex(uid);
675  if (index < 0) return NULL;
676  List<CpuProfile*>* unabridged_list =
677  profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
678  if (security_token_id == TokenEnumerator::kNoSecurityToken) {
679  return unabridged_list->at(index);
680  }
681  List<CpuProfile*>* list = GetProfilesList(security_token_id);
682  if (list->at(index) == NULL) {
683  (*list)[index] =
684  unabridged_list->at(index)->FilteredClone(security_token_id);
685  }
686  return list->at(index);
687 }
688 
689 
690 int CpuProfilesCollection::GetProfileIndex(unsigned uid) {
691  HashMap::Entry* entry = profiles_uids_.Lookup(reinterpret_cast<void*>(uid),
692  static_cast<uint32_t>(uid),
693  false);
694  return entry != NULL ?
695  static_cast<int>(reinterpret_cast<intptr_t>(entry->value)) : -1;
696 }
697 
698 
699 bool CpuProfilesCollection::IsLastProfile(const char* title) {
700  // Called from VM thread, and only it can mutate the list,
701  // so no locking is needed here.
702  if (current_profiles_.length() != 1) return false;
703  return StrLength(title) == 0
704  || strcmp(current_profiles_[0]->title(), title) == 0;
705 }
706 
707 
709  // Called from VM thread for a completed profile.
710  unsigned uid = profile->uid();
711  int index = GetProfileIndex(uid);
712  if (index < 0) {
713  detached_profiles_.RemoveElement(profile);
714  return;
715  }
716  profiles_uids_.Remove(reinterpret_cast<void*>(uid),
717  static_cast<uint32_t>(uid));
718  // Decrement all indexes above the deleted one.
719  for (HashMap::Entry* p = profiles_uids_.Start();
720  p != NULL;
721  p = profiles_uids_.Next(p)) {
722  intptr_t p_index = reinterpret_cast<intptr_t>(p->value);
723  if (p_index > index) {
724  p->value = reinterpret_cast<void*>(p_index - 1);
725  }
726  }
727  for (int i = 0; i < profiles_by_token_.length(); ++i) {
728  List<CpuProfile*>* list = profiles_by_token_[i];
729  if (list != NULL && index < list->length()) {
730  // Move all filtered clones into detached_profiles_,
731  // so we can know that they are still in use.
732  CpuProfile* cloned_profile = list->Remove(index);
733  if (cloned_profile != NULL && cloned_profile != profile) {
734  detached_profiles_.Add(cloned_profile);
735  }
736  }
737  }
738 }
739 
740 
741 int CpuProfilesCollection::TokenToIndex(int security_token_id) {
743  return security_token_id + 1; // kNoSecurityToken -> 0, 0 -> 1, ...
744 }
745 
746 
747 List<CpuProfile*>* CpuProfilesCollection::GetProfilesList(
748  int security_token_id) {
749  const int index = TokenToIndex(security_token_id);
750  const int lists_to_add = index - profiles_by_token_.length() + 1;
751  if (lists_to_add > 0) profiles_by_token_.AddBlock(NULL, lists_to_add);
752  List<CpuProfile*>* unabridged_list =
753  profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
754  const int current_count = unabridged_list->length();
755  if (profiles_by_token_[index] == NULL) {
756  profiles_by_token_[index] = new List<CpuProfile*>(current_count);
757  }
758  List<CpuProfile*>* list = profiles_by_token_[index];
759  const int profiles_to_add = current_count - list->length();
760  if (profiles_to_add > 0) list->AddBlock(NULL, profiles_to_add);
761  return list;
762 }
763 
764 
766  List<CpuProfile*>* unabridged_list =
767  profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
768  if (security_token_id == TokenEnumerator::kNoSecurityToken) {
769  return unabridged_list;
770  }
771  List<CpuProfile*>* list = GetProfilesList(security_token_id);
772  const int current_count = unabridged_list->length();
773  for (int i = 0; i < current_count; ++i) {
774  if (list->at(i) == NULL) {
775  (*list)[i] = unabridged_list->at(i)->FilteredClone(security_token_id);
776  }
777  }
778  return list;
779 }
780 
781 
783  String* name,
784  String* resource_name,
785  int line_number) {
786  CodeEntry* entry = new CodeEntry(tag,
788  GetFunctionName(name),
789  GetName(resource_name),
790  line_number,
792  code_entries_.Add(entry);
793  return entry;
794 }
795 
796 
798  const char* name) {
799  CodeEntry* entry = new CodeEntry(tag,
801  GetFunctionName(name),
802  "",
805  code_entries_.Add(entry);
806  return entry;
807 }
808 
809 
811  const char* name_prefix,
812  String* name) {
813  CodeEntry* entry = new CodeEntry(tag,
814  name_prefix,
815  GetName(name),
816  "",
819  code_entries_.Add(entry);
820  return entry;
821 }
822 
823 
825  int args_count) {
826  CodeEntry* entry = new CodeEntry(tag,
827  "args_count: ",
828  GetName(args_count),
829  "",
832  code_entries_.Add(entry);
833  return entry;
834 }
835 
836 
838  const Vector<CodeEntry*>& path) {
839  // As starting / stopping profiles is rare relatively to this
840  // method, we don't bother minimizing the duration of lock holding,
841  // e.g. copying contents of the list to a local vector.
842  current_profiles_semaphore_->Wait();
843  for (int i = 0; i < current_profiles_.length(); ++i) {
844  current_profiles_[i]->AddPath(path);
845  }
846  current_profiles_semaphore_->Signal();
847 }
848 
849 
851  if (--wall_time_query_countdown_ == 0)
853 }
854 
855 
856 void SampleRateCalculator::UpdateMeasurements(double current_time) {
857  if (measurements_count_++ != 0) {
858  const double measured_ticks_per_ms =
859  (kWallTimeQueryIntervalMs * ticks_per_ms_) /
860  (current_time - last_wall_time_);
861  // Update the average value.
862  ticks_per_ms_ +=
863  (measured_ticks_per_ms - ticks_per_ms_) / measurements_count_;
864  // Update the externally accessible result.
865  result_ = static_cast<AtomicWord>(ticks_per_ms_ * kResultScale);
866  }
867  last_wall_time_ = current_time;
868  wall_time_query_countdown_ =
869  static_cast<unsigned>(kWallTimeQueryIntervalMs * ticks_per_ms_);
870 }
871 
872 
874  "(anonymous function)";
875 const char* const ProfileGenerator::kProgramEntryName =
876  "(program)";
878  "(garbage collector)";
879 
880 
882  : profiles_(profiles),
883  program_entry_(
884  profiles->NewCodeEntry(Logger::FUNCTION_TAG, kProgramEntryName)),
885  gc_entry_(
886  profiles->NewCodeEntry(Logger::BUILTIN_TAG,
887  kGarbageCollectorEntryName)) {
888 }
889 
890 
892  // Allocate space for stack frames + pc + function + vm-state.
893  ScopedVector<CodeEntry*> entries(sample.frames_count + 3);
894  // As actual number of decoded code entries may vary, initialize
895  // entries vector with NULL values.
896  CodeEntry** entry = entries.start();
897  memset(entry, 0, entries.length() * sizeof(*entry));
898  if (sample.pc != NULL) {
899  *entry++ = code_map_.FindEntry(sample.pc);
900 
901  if (sample.has_external_callback) {
902  // Don't use PC when in external callback code, as it can point
903  // inside callback's code, and we will erroneously report
904  // that a callback calls itself.
905  *(entries.start()) = NULL;
906  *entry++ = code_map_.FindEntry(sample.external_callback);
907  } else if (sample.tos != NULL) {
908  // Find out, if top of stack was pointing inside a JS function
909  // meaning that we have encountered a frameless invocation.
910  *entry = code_map_.FindEntry(sample.tos);
911  if (*entry != NULL && !(*entry)->is_js_function()) {
912  *entry = NULL;
913  }
914  entry++;
915  }
916 
917  for (const Address* stack_pos = sample.stack,
918  *stack_end = stack_pos + sample.frames_count;
919  stack_pos != stack_end;
920  ++stack_pos) {
921  *entry++ = code_map_.FindEntry(*stack_pos);
922  }
923  }
924 
925  if (FLAG_prof_browser_mode) {
926  bool no_symbolized_entries = true;
927  for (CodeEntry** e = entries.start(); e != entry; ++e) {
928  if (*e != NULL) {
929  no_symbolized_entries = false;
930  break;
931  }
932  }
933  // If no frames were symbolized, put the VM state entry in.
934  if (no_symbolized_entries) {
935  *entry++ = EntryForVMState(sample.state);
936  }
937  }
938 
939  profiles_->AddPathToCurrentProfiles(entries);
940 }
941 
942 
943 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
944  : type_(type),
945  from_index_(from),
946  to_index_(to),
947  name_(name) {
948  ASSERT(type == kContextVariable
949  || type == kProperty
950  || type == kInternal
951  || type == kShortcut);
952 }
953 
954 
955 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
956  : type_(type),
957  from_index_(from),
958  to_index_(to),
959  index_(index) {
960  ASSERT(type == kElement || type == kHidden || type == kWeak);
961 }
962 
963 
964 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
965  to_entry_ = &snapshot->entries()[to_index_];
966 }
967 
968 
969 const int HeapEntry::kNoEntry = -1;
970 
971 HeapEntry::HeapEntry(HeapSnapshot* snapshot,
972  Type type,
973  const char* name,
974  SnapshotObjectId id,
975  int self_size)
976  : type_(type),
977  children_count_(0),
978  children_index_(-1),
979  self_size_(self_size),
980  id_(id),
981  snapshot_(snapshot),
982  name_(name) { }
983 
984 
985 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
986  const char* name,
987  HeapEntry* entry) {
988  HeapGraphEdge edge(type, name, this->index(), entry->index());
989  snapshot_->edges().Add(edge);
990  ++children_count_;
991 }
992 
993 
994 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
995  int index,
996  HeapEntry* entry) {
997  HeapGraphEdge edge(type, index, this->index(), entry->index());
998  snapshot_->edges().Add(edge);
999  ++children_count_;
1000 }
1001 
1002 
1003 Handle<HeapObject> HeapEntry::GetHeapObject() {
1004  return snapshot_->collection()->FindHeapObjectById(id());
1005 }
1006 
1007 
1008 void HeapEntry::Print(
1009  const char* prefix, const char* edge_name, int max_depth, int indent) {
1010  STATIC_CHECK(sizeof(unsigned) == sizeof(id()));
1011  OS::Print("%6d @%6u %*c %s%s: ",
1012  self_size(), id(), indent, ' ', prefix, edge_name);
1013  if (type() != kString) {
1014  OS::Print("%s %.40s\n", TypeAsString(), name_);
1015  } else {
1016  OS::Print("\"");
1017  const char* c = name_;
1018  while (*c && (c - name_) <= 40) {
1019  if (*c != '\n')
1020  OS::Print("%c", *c);
1021  else
1022  OS::Print("\\n");
1023  ++c;
1024  }
1025  OS::Print("\"\n");
1026  }
1027  if (--max_depth == 0) return;
1028  Vector<HeapGraphEdge*> ch = children();
1029  for (int i = 0; i < ch.length(); ++i) {
1030  HeapGraphEdge& edge = *ch[i];
1031  const char* edge_prefix = "";
1032  EmbeddedVector<char, 64> index;
1033  const char* edge_name = index.start();
1034  switch (edge.type()) {
1036  edge_prefix = "#";
1037  edge_name = edge.name();
1038  break;
1040  OS::SNPrintF(index, "%d", edge.index());
1041  break;
1043  edge_prefix = "$";
1044  edge_name = edge.name();
1045  break;
1047  edge_name = edge.name();
1048  break;
1050  edge_prefix = "$";
1051  OS::SNPrintF(index, "%d", edge.index());
1052  break;
1054  edge_prefix = "^";
1055  edge_name = edge.name();
1056  break;
1057  case HeapGraphEdge::kWeak:
1058  edge_prefix = "w";
1059  OS::SNPrintF(index, "%d", edge.index());
1060  break;
1061  default:
1062  OS::SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
1063  }
1064  edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
1065  }
1066 }
1067 
1068 
1069 const char* HeapEntry::TypeAsString() {
1070  switch (type()) {
1071  case kHidden: return "/hidden/";
1072  case kObject: return "/object/";
1073  case kClosure: return "/closure/";
1074  case kString: return "/string/";
1075  case kCode: return "/code/";
1076  case kArray: return "/array/";
1077  case kRegExp: return "/regexp/";
1078  case kHeapNumber: return "/number/";
1079  case kNative: return "/native/";
1080  case kSynthetic: return "/synthetic/";
1081  default: return "???";
1082  }
1083 }
1084 
1085 
1086 // It is very important to keep objects that form a heap snapshot
1087 // as small as possible.
1088 namespace { // Avoid littering the global namespace.
1089 
1090 template <size_t ptr_size> struct SnapshotSizeConstants;
1091 
1092 template <> struct SnapshotSizeConstants<4> {
1093  static const int kExpectedHeapGraphEdgeSize = 12;
1094  static const int kExpectedHeapEntrySize = 24;
1095  static const int kExpectedHeapSnapshotsCollectionSize = 96;
1096  static const int kExpectedHeapSnapshotSize = 136;
1097  static const size_t kMaxSerializableSnapshotRawSize = 256 * MB;
1098 };
1099 
1100 template <> struct SnapshotSizeConstants<8> {
1101  static const int kExpectedHeapGraphEdgeSize = 24;
1102  static const int kExpectedHeapEntrySize = 32;
1103  static const int kExpectedHeapSnapshotsCollectionSize = 144;
1104  static const int kExpectedHeapSnapshotSize = 168;
1105  static const uint64_t kMaxSerializableSnapshotRawSize =
1106  static_cast<uint64_t>(6000) * MB;
1107 };
1108 
1109 } // namespace
1110 
1112  HeapSnapshot::Type type,
1113  const char* title,
1114  unsigned uid)
1115  : collection_(collection),
1116  type_(type),
1117  title_(title),
1118  uid_(uid),
1119  root_index_(HeapEntry::kNoEntry),
1120  gc_roots_index_(HeapEntry::kNoEntry),
1121  natives_root_index_(HeapEntry::kNoEntry),
1122  max_snapshot_js_object_id_(0) {
1123  STATIC_CHECK(
1124  sizeof(HeapGraphEdge) ==
1125  SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
1126  STATIC_CHECK(
1127  sizeof(HeapEntry) ==
1128  SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
1129  for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
1130  gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
1131  }
1132 }
1133 
1134 
1136  collection_->RemoveSnapshot(this);
1137  delete this;
1138 }
1139 
1140 
1142  max_snapshot_js_object_id_ = collection_->last_assigned_id();
1143 }
1144 
1145 
1147  ASSERT(root_index_ == HeapEntry::kNoEntry);
1148  ASSERT(entries_.is_empty()); // Root entry must be the first one.
1149  HeapEntry* entry = AddEntry(HeapEntry::kObject,
1150  "",
1152  0);
1153  root_index_ = entry->index();
1154  ASSERT(root_index_ == 0);
1155  return entry;
1156 }
1157 
1158 
1160  ASSERT(gc_roots_index_ == HeapEntry::kNoEntry);
1161  HeapEntry* entry = AddEntry(HeapEntry::kObject,
1162  "(GC roots)",
1164  0);
1165  gc_roots_index_ = entry->index();
1166  return entry;
1167 }
1168 
1169 
1170 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) {
1171  ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
1173  HeapEntry* entry = AddEntry(
1174  HeapEntry::kObject,
1177  0);
1178  gc_subroot_indexes_[tag] = entry->index();
1179  return entry;
1180 }
1181 
1182 
1183 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
1184  const char* name,
1185  SnapshotObjectId id,
1186  int size) {
1187  HeapEntry entry(this, type, name, id, size);
1188  entries_.Add(entry);
1189  return &entries_.last();
1190 }
1191 
1192 
1194  ASSERT(children().is_empty());
1195  children().Allocate(edges().length());
1196  int children_index = 0;
1197  for (int i = 0; i < entries().length(); ++i) {
1198  HeapEntry* entry = &entries()[i];
1199  children_index = entry->set_children_index(children_index);
1200  }
1201  ASSERT(edges().length() == children_index);
1202  for (int i = 0; i < edges().length(); ++i) {
1203  HeapGraphEdge* edge = &edges()[i];
1204  edge->ReplaceToIndexWithEntry(this);
1205  edge->from()->add_child(edge);
1206  }
1207 }
1208 
1209 
1211  public:
1212  explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
1213  int operator()(HeapEntry* const* entry) {
1214  if ((*entry)->id() == id_) return 0;
1215  return (*entry)->id() < id_ ? -1 : 1;
1216  }
1217  private:
1218  SnapshotObjectId id_;
1219 };
1220 
1221 
1223  List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
1224  // Perform a binary search by id.
1225  int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
1226  if (index == -1)
1227  return NULL;
1228  return entries_by_id->at(index);
1229 }
1230 
1231 
1232 template<class T>
1233 static int SortByIds(const T* entry1_ptr,
1234  const T* entry2_ptr) {
1235  if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
1236  return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
1237 }
1238 
1239 
1241  if (sorted_entries_.is_empty()) {
1242  sorted_entries_.Allocate(entries_.length());
1243  for (int i = 0; i < entries_.length(); ++i) {
1244  sorted_entries_[i] = &entries_[i];
1245  }
1246  sorted_entries_.Sort(SortByIds);
1247  }
1248  return &sorted_entries_;
1249 }
1250 
1251 
1252 void HeapSnapshot::Print(int max_depth) {
1253  root()->Print("", "", max_depth, 0);
1254 }
1255 
1256 
1257 template<typename T, class P>
1258 static size_t GetMemoryUsedByList(const List<T, P>& list) {
1259  return list.length() * sizeof(T) + sizeof(list);
1260 }
1261 
1262 
1264  STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::kExpectedHeapSnapshotSize ==
1265  sizeof(HeapSnapshot)); // NOLINT
1266  return
1267  sizeof(*this) +
1268  GetMemoryUsedByList(entries_) +
1269  GetMemoryUsedByList(edges_) +
1270  GetMemoryUsedByList(children_) +
1271  GetMemoryUsedByList(sorted_entries_);
1272 }
1273 
1274 
1275 // We split IDs on evens for embedder objects (see
1276 // HeapObjectsMap::GenerateId) and odds for native objects.
1285 
1287  : next_id_(kFirstAvailableObjectId),
1288  entries_map_(AddressesMatch) {
1289  // This dummy element solves a problem with entries_map_.
1290  // When we do lookup in HashMap we see no difference between two cases:
1291  // it has an entry with NULL as the value or it has created
1292  // a new entry on the fly with NULL as the default value.
1293  // With such dummy element we have a guaranty that all entries_map_ entries
1294  // will have the value field grater than 0.
1295  // This fact is using in MoveObject method.
1296  entries_.Add(EntryInfo(0, NULL, 0));
1297 }
1298 
1299 
1301  RemoveDeadEntries();
1302 }
1303 
1304 
1306  ASSERT(to != NULL);
1307  ASSERT(from != NULL);
1308  if (from == to) return;
1309  void* from_value = entries_map_.Remove(from, AddressHash(from));
1310  if (from_value == NULL) return;
1311  int from_entry_info_index =
1312  static_cast<int>(reinterpret_cast<intptr_t>(from_value));
1313  entries_.at(from_entry_info_index).addr = to;
1314  HashMap::Entry* to_entry = entries_map_.Lookup(to, AddressHash(to), true);
1315  if (to_entry->value != NULL) {
1316  int to_entry_info_index =
1317  static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
1318  // Without this operation we will have two EntryInfo's with the same
1319  // value in addr field. It is bad because later at RemoveDeadEntries
1320  // one of this entry will be removed with the corresponding entries_map_
1321  // entry.
1322  entries_.at(to_entry_info_index).addr = NULL;
1323  }
1324  to_entry->value = reinterpret_cast<void*>(from_entry_info_index);
1325 }
1326 
1327 
1329  HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), false);
1330  if (entry == NULL) return 0;
1331  int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
1332  EntryInfo& entry_info = entries_.at(entry_index);
1333  ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
1334  return entry_info.id;
1335 }
1336 
1337 
1339  unsigned int size) {
1340  ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
1341  HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), true);
1342  if (entry->value != NULL) {
1343  int entry_index =
1344  static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
1345  EntryInfo& entry_info = entries_.at(entry_index);
1346  entry_info.accessed = true;
1347  entry_info.size = size;
1348  return entry_info.id;
1349  }
1350  entry->value = reinterpret_cast<void*>(entries_.length());
1351  SnapshotObjectId id = next_id_;
1352  next_id_ += kObjectIdStep;
1353  entries_.Add(EntryInfo(id, addr, size));
1354  ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
1355  return id;
1356 }
1357 
1358 
1360  time_intervals_.Clear();
1361 }
1362 
1363 void HeapObjectsMap::UpdateHeapObjectsMap() {
1364  HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
1365  "HeapSnapshotsCollection::UpdateHeapObjectsMap");
1366  HeapIterator iterator;
1367  for (HeapObject* obj = iterator.next();
1368  obj != NULL;
1369  obj = iterator.next()) {
1370  FindOrAddEntry(obj->address(), obj->Size());
1371  }
1372  RemoveDeadEntries();
1373 }
1374 
1375 
1377  UpdateHeapObjectsMap();
1378  time_intervals_.Add(TimeInterval(next_id_));
1379  int prefered_chunk_size = stream->GetChunkSize();
1380  List<v8::HeapStatsUpdate> stats_buffer;
1381  ASSERT(!entries_.is_empty());
1382  EntryInfo* entry_info = &entries_.first();
1383  EntryInfo* end_entry_info = &entries_.last() + 1;
1384  for (int time_interval_index = 0;
1385  time_interval_index < time_intervals_.length();
1386  ++time_interval_index) {
1387  TimeInterval& time_interval = time_intervals_[time_interval_index];
1388  SnapshotObjectId time_interval_id = time_interval.id;
1389  uint32_t entries_size = 0;
1390  EntryInfo* start_entry_info = entry_info;
1391  while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
1392  entries_size += entry_info->size;
1393  ++entry_info;
1394  }
1395  uint32_t entries_count =
1396  static_cast<uint32_t>(entry_info - start_entry_info);
1397  if (time_interval.count != entries_count ||
1398  time_interval.size != entries_size) {
1399  stats_buffer.Add(v8::HeapStatsUpdate(
1400  time_interval_index,
1401  time_interval.count = entries_count,
1402  time_interval.size = entries_size));
1403  if (stats_buffer.length() >= prefered_chunk_size) {
1405  &stats_buffer.first(), stats_buffer.length());
1406  if (result == OutputStream::kAbort) return last_assigned_id();
1407  stats_buffer.Clear();
1408  }
1409  }
1410  }
1411  ASSERT(entry_info == end_entry_info);
1412  if (!stats_buffer.is_empty()) {
1414  &stats_buffer.first(), stats_buffer.length());
1415  if (result == OutputStream::kAbort) return last_assigned_id();
1416  }
1417  stream->EndOfStream();
1418  return last_assigned_id();
1419 }
1420 
1421 
1422 void HeapObjectsMap::RemoveDeadEntries() {
1423  ASSERT(entries_.length() > 0 &&
1424  entries_.at(0).id == 0 &&
1425  entries_.at(0).addr == NULL);
1426  int first_free_entry = 1;
1427  for (int i = 1; i < entries_.length(); ++i) {
1428  EntryInfo& entry_info = entries_.at(i);
1429  if (entry_info.accessed) {
1430  if (first_free_entry != i) {
1431  entries_.at(first_free_entry) = entry_info;
1432  }
1433  entries_.at(first_free_entry).accessed = false;
1434  HashMap::Entry* entry = entries_map_.Lookup(
1435  entry_info.addr, AddressHash(entry_info.addr), false);
1436  ASSERT(entry);
1437  entry->value = reinterpret_cast<void*>(first_free_entry);
1438  ++first_free_entry;
1439  } else {
1440  if (entry_info.addr) {
1441  entries_map_.Remove(entry_info.addr, AddressHash(entry_info.addr));
1442  }
1443  }
1444  }
1445  entries_.Rewind(first_free_entry);
1446  ASSERT(static_cast<uint32_t>(entries_.length()) - 1 ==
1447  entries_map_.occupancy());
1448 }
1449 
1450 
1452  SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
1453  const char* label = info->GetLabel();
1454  id ^= HashSequentialString(label,
1455  static_cast<int>(strlen(label)),
1456  HEAP->HashSeed());
1457  intptr_t element_count = info->GetElementCount();
1458  if (element_count != -1)
1459  id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
1460  v8::internal::kZeroHashSeed);
1461  return id << 1;
1462 }
1463 
1464 
1466  return
1467  sizeof(*this) +
1468  sizeof(HashMap::Entry) * entries_map_.capacity() +
1469  GetMemoryUsedByList(entries_) +
1470  GetMemoryUsedByList(time_intervals_);
1471 }
1472 
1473 
1475  : is_tracking_objects_(false),
1476  snapshots_uids_(HeapSnapshotsMatch),
1477  token_enumerator_(new TokenEnumerator()) {
1478 }
1479 
1480 
1481 static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) {
1482  delete *snapshot_ptr;
1483 }
1484 
1485 
1487  delete token_enumerator_;
1488  snapshots_.Iterate(DeleteHeapSnapshot);
1489 }
1490 
1491 
1493  const char* name,
1494  unsigned uid) {
1495  is_tracking_objects_ = true; // Start watching for heap objects moves.
1496  return new HeapSnapshot(this, type, name, uid);
1497 }
1498 
1499 
1501  HeapSnapshot* snapshot) {
1503  if (snapshot != NULL) {
1504  snapshots_.Add(snapshot);
1505  HashMap::Entry* entry =
1506  snapshots_uids_.Lookup(reinterpret_cast<void*>(snapshot->uid()),
1507  static_cast<uint32_t>(snapshot->uid()),
1508  true);
1509  ASSERT(entry->value == NULL);
1510  entry->value = snapshot;
1511  }
1512 }
1513 
1514 
1516  HashMap::Entry* entry = snapshots_uids_.Lookup(reinterpret_cast<void*>(uid),
1517  static_cast<uint32_t>(uid),
1518  false);
1519  return entry != NULL ? reinterpret_cast<HeapSnapshot*>(entry->value) : NULL;
1520 }
1521 
1522 
1524  snapshots_.RemoveElement(snapshot);
1525  unsigned uid = snapshot->uid();
1526  snapshots_uids_.Remove(reinterpret_cast<void*>(uid),
1527  static_cast<uint32_t>(uid));
1528 }
1529 
1530 
1532  SnapshotObjectId id) {
1533  // First perform a full GC in order to avoid dead objects.
1534  HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
1535  "HeapSnapshotsCollection::FindHeapObjectById");
1536  AssertNoAllocation no_allocation;
1537  HeapObject* object = NULL;
1538  HeapIterator iterator(HeapIterator::kFilterUnreachable);
1539  // Make sure that object with the given id is still reachable.
1540  for (HeapObject* obj = iterator.next();
1541  obj != NULL;
1542  obj = iterator.next()) {
1543  if (ids_.FindEntry(obj->address()) == id) {
1544  ASSERT(object == NULL);
1545  object = obj;
1546  // Can't break -- kFilterUnreachable requires full heap traversal.
1547  }
1548  }
1549  return object != NULL ? Handle<HeapObject>(object) : Handle<HeapObject>();
1550 }
1551 
1552 
1554  STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::
1555  kExpectedHeapSnapshotsCollectionSize ==
1556  sizeof(HeapSnapshotsCollection)); // NOLINT
1557  size_t size = sizeof(*this);
1558  size += names_.GetUsedMemorySize();
1559  size += ids_.GetUsedMemorySize();
1560  size += sizeof(HashMap::Entry) * snapshots_uids_.capacity();
1561  size += GetMemoryUsedByList(snapshots_);
1562  for (int i = 0; i < snapshots_.length(); ++i) {
1563  size += snapshots_[i]->RawSnapshotSize();
1564  }
1565  return size;
1566 }
1567 
1568 
1570  : entries_(HeapThingsMatch) {
1571 }
1572 
1573 
1575  HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
1576  if (cache_entry == NULL) return HeapEntry::kNoEntry;
1577  return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
1578 }
1579 
1580 
1581 void HeapEntriesMap::Pair(HeapThing thing, int entry) {
1582  HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
1583  ASSERT(cache_entry->value == NULL);
1584  cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
1585 }
1586 
1587 
1589  : entries_(HeapEntriesMap::HeapThingsMatch) {
1590 }
1591 
1592 
1594  entries_.Clear();
1595 }
1596 
1597 
1599  if (!obj->IsHeapObject()) return false;
1600  HeapObject* object = HeapObject::cast(obj);
1601  return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL;
1602 }
1603 
1604 
1606  if (!obj->IsHeapObject()) return;
1607  HeapObject* object = HeapObject::cast(obj);
1608  entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
1609 }
1610 
1611 
1612 const char* HeapObjectsSet::GetTag(Object* obj) {
1613  HeapObject* object = HeapObject::cast(obj);
1614  HashMap::Entry* cache_entry =
1615  entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
1616  return cache_entry != NULL
1617  ? reinterpret_cast<const char*>(cache_entry->value)
1618  : NULL;
1619 }
1620 
1621 
1622 void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
1623  if (!obj->IsHeapObject()) return;
1624  HeapObject* object = HeapObject::cast(obj);
1625  HashMap::Entry* cache_entry =
1626  entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
1627  cache_entry->value = const_cast<char*>(tag);
1628 }
1629 
1630 
1632  reinterpret_cast<HeapObject*>(
1633  static_cast<intptr_t>(HeapObjectsMap::kInternalRootObjectId));
1634 HeapObject* const V8HeapExplorer::kGcRootsObject =
1635  reinterpret_cast<HeapObject*>(
1636  static_cast<intptr_t>(HeapObjectsMap::kGcRootsObjectId));
1637 HeapObject* const V8HeapExplorer::kFirstGcSubrootObject =
1638  reinterpret_cast<HeapObject*>(
1639  static_cast<intptr_t>(HeapObjectsMap::kGcRootsFirstSubrootId));
1640 HeapObject* const V8HeapExplorer::kLastGcSubrootObject =
1641  reinterpret_cast<HeapObject*>(
1642  static_cast<intptr_t>(HeapObjectsMap::kFirstAvailableObjectId));
1643 
1644 
1646  HeapSnapshot* snapshot,
1648  : heap_(Isolate::Current()->heap()),
1649  snapshot_(snapshot),
1650  collection_(snapshot_->collection()),
1651  progress_(progress),
1652  filler_(NULL) {
1653 }
1654 
1655 
1657 }
1658 
1659 
1661  return AddEntry(reinterpret_cast<HeapObject*>(ptr));
1662 }
1663 
1664 
1665 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
1666  if (object == kInternalRootObject) {
1667  snapshot_->AddRootEntry();
1668  return snapshot_->root();
1669  } else if (object == kGcRootsObject) {
1670  HeapEntry* entry = snapshot_->AddGcRootsEntry();
1671  return entry;
1672  } else if (object >= kFirstGcSubrootObject && object < kLastGcSubrootObject) {
1673  HeapEntry* entry = snapshot_->AddGcSubrootEntry(GetGcSubrootOrder(object));
1674  return entry;
1675  } else if (object->IsJSFunction()) {
1676  JSFunction* func = JSFunction::cast(object);
1677  SharedFunctionInfo* shared = func->shared();
1678  const char* name = shared->bound() ? "native_bind" :
1679  collection_->names()->GetName(String::cast(shared->name()));
1680  return AddEntry(object, HeapEntry::kClosure, name);
1681  } else if (object->IsJSRegExp()) {
1682  JSRegExp* re = JSRegExp::cast(object);
1683  return AddEntry(object,
1684  HeapEntry::kRegExp,
1685  collection_->names()->GetName(re->Pattern()));
1686  } else if (object->IsJSObject()) {
1687  const char* name = collection_->names()->GetName(
1689  if (object->IsJSGlobalObject()) {
1690  const char* tag = objects_tags_.GetTag(object);
1691  if (tag != NULL) {
1692  name = collection_->names()->GetFormatted("%s / %s", name, tag);
1693  }
1694  }
1695  return AddEntry(object, HeapEntry::kObject, name);
1696  } else if (object->IsString()) {
1697  return AddEntry(object,
1698  HeapEntry::kString,
1699  collection_->names()->GetName(String::cast(object)));
1700  } else if (object->IsCode()) {
1701  return AddEntry(object, HeapEntry::kCode, "");
1702  } else if (object->IsSharedFunctionInfo()) {
1703  String* name = String::cast(SharedFunctionInfo::cast(object)->name());
1704  return AddEntry(object,
1705  HeapEntry::kCode,
1706  collection_->names()->GetName(name));
1707  } else if (object->IsScript()) {
1708  Object* name = Script::cast(object)->name();
1709  return AddEntry(object,
1710  HeapEntry::kCode,
1711  name->IsString()
1712  ? collection_->names()->GetName(String::cast(name))
1713  : "");
1714  } else if (object->IsGlobalContext()) {
1715  return AddEntry(object, HeapEntry::kHidden, "system / GlobalContext");
1716  } else if (object->IsContext()) {
1717  return AddEntry(object, HeapEntry::kHidden, "system / Context");
1718  } else if (object->IsFixedArray() ||
1719  object->IsFixedDoubleArray() ||
1720  object->IsByteArray() ||
1721  object->IsExternalArray()) {
1722  return AddEntry(object, HeapEntry::kArray, "");
1723  } else if (object->IsHeapNumber()) {
1724  return AddEntry(object, HeapEntry::kHeapNumber, "number");
1725  }
1726  return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
1727 }
1728 
1729 
1730 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
1731  HeapEntry::Type type,
1732  const char* name) {
1733  int object_size = object->Size();
1734  SnapshotObjectId object_id =
1735  collection_->GetObjectId(object->address(), object_size);
1736  return snapshot_->AddEntry(type, name, object_id, object_size);
1737 }
1738 
1739 
1740 class GcSubrootsEnumerator : public ObjectVisitor {
1741  public:
1743  SnapshotFillerInterface* filler, V8HeapExplorer* explorer)
1744  : filler_(filler),
1745  explorer_(explorer),
1746  previous_object_count_(0),
1747  object_count_(0) {
1748  }
1749  void VisitPointers(Object** start, Object** end) {
1750  object_count_ += end - start;
1751  }
1753  // Skip empty subroots.
1754  if (previous_object_count_ != object_count_) {
1755  previous_object_count_ = object_count_;
1756  filler_->AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_);
1757  }
1758  }
1759  private:
1760  SnapshotFillerInterface* filler_;
1761  V8HeapExplorer* explorer_;
1762  intptr_t previous_object_count_;
1763  intptr_t object_count_;
1764 };
1765 
1766 
1768  filler->AddEntry(kInternalRootObject, this);
1769  filler->AddEntry(kGcRootsObject, this);
1770  GcSubrootsEnumerator enumerator(filler, this);
1771  heap_->IterateRoots(&enumerator, VISIT_ALL);
1772 }
1773 
1774 
1775 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
1776  switch (object->map()->instance_type()) {
1777  case MAP_TYPE: return "system / Map";
1778  case JS_GLOBAL_PROPERTY_CELL_TYPE: return "system / JSGlobalPropertyCell";
1779  case FOREIGN_TYPE: return "system / Foreign";
1780  case ODDBALL_TYPE: return "system / Oddball";
1781 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1782  case NAME##_TYPE: return "system / "#Name;
1784 #undef MAKE_STRUCT_CASE
1785  default: return "system";
1786  }
1787 }
1788 
1789 
1790 int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
1791  int objects_count = 0;
1792  for (HeapObject* obj = iterator->next();
1793  obj != NULL;
1794  obj = iterator->next()) {
1795  objects_count++;
1796  }
1797  return objects_count;
1798 }
1799 
1800 
1801 class IndexedReferencesExtractor : public ObjectVisitor {
1802  public:
1804  HeapObject* parent_obj,
1805  int parent)
1806  : generator_(generator),
1807  parent_obj_(parent_obj),
1808  parent_(parent),
1809  next_index_(1) {
1810  }
1811  void VisitPointers(Object** start, Object** end) {
1812  for (Object** p = start; p < end; p++) {
1813  if (CheckVisitedAndUnmark(p)) continue;
1814  generator_->SetHiddenReference(parent_obj_, parent_, next_index_++, *p);
1815  }
1816  }
1817  static void MarkVisitedField(HeapObject* obj, int offset) {
1818  if (offset < 0) return;
1819  Address field = obj->address() + offset;
1820  ASSERT(!Memory::Object_at(field)->IsFailure());
1821  ASSERT(Memory::Object_at(field)->IsHeapObject());
1822  *field |= kFailureTag;
1823  }
1824 
1825  private:
1826  bool CheckVisitedAndUnmark(Object** field) {
1827  if ((*field)->IsFailure()) {
1828  intptr_t untagged = reinterpret_cast<intptr_t>(*field) & ~kFailureTagMask;
1829  *field = reinterpret_cast<Object*>(untagged | kHeapObjectTag);
1830  ASSERT((*field)->IsHeapObject());
1831  return true;
1832  }
1833  return false;
1834  }
1835  V8HeapExplorer* generator_;
1836  HeapObject* parent_obj_;
1837  int parent_;
1838  int next_index_;
1839 };
1840 
1841 
1842 void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
1843  HeapEntry* heap_entry = GetEntry(obj);
1844  if (heap_entry == NULL) return; // No interest in this object.
1845  int entry = heap_entry->index();
1846 
1847  bool extract_indexed_refs = true;
1848  if (obj->IsJSGlobalProxy()) {
1849  ExtractJSGlobalProxyReferences(JSGlobalProxy::cast(obj));
1850  } else if (obj->IsJSObject()) {
1851  ExtractJSObjectReferences(entry, JSObject::cast(obj));
1852  } else if (obj->IsString()) {
1853  ExtractStringReferences(entry, String::cast(obj));
1854  extract_indexed_refs = false;
1855  } else if (obj->IsContext()) {
1856  ExtractContextReferences(entry, Context::cast(obj));
1857  } else if (obj->IsMap()) {
1858  ExtractMapReferences(entry, Map::cast(obj));
1859  } else if (obj->IsSharedFunctionInfo()) {
1860  ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1861  } else if (obj->IsScript()) {
1862  ExtractScriptReferences(entry, Script::cast(obj));
1863  } else if (obj->IsCodeCache()) {
1864  ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
1865  } else if (obj->IsCode()) {
1866  ExtractCodeReferences(entry, Code::cast(obj));
1867  } else if (obj->IsJSGlobalPropertyCell()) {
1868  ExtractJSGlobalPropertyCellReferences(
1869  entry, JSGlobalPropertyCell::cast(obj));
1870  extract_indexed_refs = false;
1871  }
1872  if (extract_indexed_refs) {
1873  SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
1874  IndexedReferencesExtractor refs_extractor(this, obj, entry);
1875  obj->Iterate(&refs_extractor);
1876  }
1877 }
1878 
1879 
1880 void V8HeapExplorer::ExtractJSGlobalProxyReferences(JSGlobalProxy* proxy) {
1881  // We need to reference JS global objects from snapshot's root.
1882  // We use JSGlobalProxy because this is what embedder (e.g. browser)
1883  // uses for the global object.
1884  Object* object = proxy->map()->prototype();
1885  bool is_debug_object = false;
1886 #ifdef ENABLE_DEBUGGER_SUPPORT
1887  is_debug_object = object->IsGlobalObject() &&
1888  Isolate::Current()->debug()->IsDebugGlobal(GlobalObject::cast(object));
1889 #endif
1890  if (!is_debug_object) {
1891  SetUserGlobalReference(object);
1892  }
1893 }
1894 
1895 
1896 void V8HeapExplorer::ExtractJSObjectReferences(
1897  int entry, JSObject* js_obj) {
1898  HeapObject* obj = js_obj;
1899  ExtractClosureReferences(js_obj, entry);
1900  ExtractPropertyReferences(js_obj, entry);
1901  ExtractElementReferences(js_obj, entry);
1902  ExtractInternalReferences(js_obj, entry);
1903  SetPropertyReference(
1904  obj, entry, heap_->Proto_symbol(), js_obj->GetPrototype());
1905  if (obj->IsJSFunction()) {
1906  JSFunction* js_fun = JSFunction::cast(js_obj);
1907  Object* proto_or_map = js_fun->prototype_or_initial_map();
1908  if (!proto_or_map->IsTheHole()) {
1909  if (!proto_or_map->IsMap()) {
1910  SetPropertyReference(
1911  obj, entry,
1912  heap_->prototype_symbol(), proto_or_map,
1913  NULL,
1915  } else {
1916  SetPropertyReference(
1917  obj, entry,
1918  heap_->prototype_symbol(), js_fun->prototype());
1919  }
1920  }
1921  SharedFunctionInfo* shared_info = js_fun->shared();
1922  // JSFunction has either bindings or literals and never both.
1923  bool bound = shared_info->bound();
1924  TagObject(js_fun->literals_or_bindings(),
1925  bound ? "(function bindings)" : "(function literals)");
1926  SetInternalReference(js_fun, entry,
1927  bound ? "bindings" : "literals",
1928  js_fun->literals_or_bindings(),
1930  TagObject(shared_info, "(shared function info)");
1931  SetInternalReference(js_fun, entry,
1932  "shared", shared_info,
1934  TagObject(js_fun->unchecked_context(), "(context)");
1935  SetInternalReference(js_fun, entry,
1936  "context", js_fun->unchecked_context(),
1939  i < JSFunction::kSize;
1940  i += kPointerSize) {
1941  SetWeakReference(js_fun, entry, i, *HeapObject::RawField(js_fun, i), i);
1942  }
1943  } else if (obj->IsGlobalObject()) {
1944  GlobalObject* global_obj = GlobalObject::cast(obj);
1945  SetInternalReference(global_obj, entry,
1946  "builtins", global_obj->builtins(),
1948  SetInternalReference(global_obj, entry,
1949  "global_context", global_obj->global_context(),
1951  SetInternalReference(global_obj, entry,
1952  "global_receiver", global_obj->global_receiver(),
1954  }
1955  TagObject(js_obj->properties(), "(object properties)");
1956  SetInternalReference(obj, entry,
1957  "properties", js_obj->properties(),
1959  TagObject(js_obj->elements(), "(object elements)");
1960  SetInternalReference(obj, entry,
1961  "elements", js_obj->elements(),
1963 }
1964 
1965 
1966 void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1967  if (string->IsConsString()) {
1968  ConsString* cs = ConsString::cast(string);
1969  SetInternalReference(cs, entry, "first", cs->first());
1970  SetInternalReference(cs, entry, "second", cs->second());
1971  } else if (string->IsSlicedString()) {
1972  SlicedString* ss = SlicedString::cast(string);
1973  SetInternalReference(ss, entry, "parent", ss->parent());
1974  }
1975 }
1976 
1977 
1978 void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1979 #define EXTRACT_CONTEXT_FIELD(index, type, name) \
1980  SetInternalReference(context, entry, #name, context->get(Context::index), \
1981  FixedArray::OffsetOfElementAt(Context::index));
1982  EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1983  EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1984  EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
1985  EXTRACT_CONTEXT_FIELD(GLOBAL_INDEX, GlobalObject, global);
1986  if (context->IsGlobalContext()) {
1987  TagObject(context->jsfunction_result_caches(),
1988  "(context func. result caches)");
1989  TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1990  TagObject(context->runtime_context(), "(runtime context)");
1991  TagObject(context->data(), "(context data)");
1993 #undef EXTRACT_CONTEXT_FIELD
1994  for (int i = Context::FIRST_WEAK_SLOT;
1996  ++i) {
1997  SetWeakReference(context, entry, i, context->get(i),
1999  }
2000  }
2001 }
2002 
2003 
2004 void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
2005  SetInternalReference(map, entry,
2006  "prototype", map->prototype(), Map::kPrototypeOffset);
2007  SetInternalReference(map, entry,
2008  "constructor", map->constructor(),
2010  if (!map->instance_descriptors()->IsEmpty()) {
2011  TagObject(map->instance_descriptors(), "(map descriptors)");
2012  SetInternalReference(map, entry,
2013  "descriptors", map->instance_descriptors(),
2015  }
2016  if (map->unchecked_prototype_transitions()->IsFixedArray()) {
2017  TagObject(map->prototype_transitions(), "(prototype transitions)");
2018  SetInternalReference(map, entry,
2019  "prototype_transitions", map->prototype_transitions(),
2021  } else {
2022  SetInternalReference(map, entry,
2023  "back_pointer", map->GetBackPointer(),
2025  }
2026  SetInternalReference(map, entry,
2027  "code_cache", map->code_cache(),
2029 }
2030 
2031 
2032 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
2033  int entry, SharedFunctionInfo* shared) {
2034  HeapObject* obj = shared;
2035  SetInternalReference(obj, entry,
2036  "name", shared->name(),
2038  TagObject(shared->code(), "(code)");
2039  SetInternalReference(obj, entry,
2040  "code", shared->code(),
2042  TagObject(shared->scope_info(), "(function scope info)");
2043  SetInternalReference(obj, entry,
2044  "scope_info", shared->scope_info(),
2046  SetInternalReference(obj, entry,
2047  "instance_class_name", shared->instance_class_name(),
2049  SetInternalReference(obj, entry,
2050  "script", shared->script(),
2052  TagObject(shared->construct_stub(), "(code)");
2053  SetInternalReference(obj, entry,
2054  "construct_stub", shared->construct_stub(),
2056  SetInternalReference(obj, entry,
2057  "function_data", shared->function_data(),
2059  SetInternalReference(obj, entry,
2060  "debug_info", shared->debug_info(),
2062  SetInternalReference(obj, entry,
2063  "inferred_name", shared->inferred_name(),
2065  SetInternalReference(obj, entry,
2066  "this_property_assignments",
2067  shared->this_property_assignments(),
2069  SetWeakReference(obj, entry,
2070  1, shared->initial_map(),
2072 }
2073 
2074 
2075 void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
2076  HeapObject* obj = script;
2077  SetInternalReference(obj, entry,
2078  "source", script->source(),
2080  SetInternalReference(obj, entry,
2081  "name", script->name(),
2083  SetInternalReference(obj, entry,
2084  "data", script->data(),
2086  SetInternalReference(obj, entry,
2087  "context_data", script->context_data(),
2089  TagObject(script->line_ends(), "(script line ends)");
2090  SetInternalReference(obj, entry,
2091  "line_ends", script->line_ends(),
2093 }
2094 
2095 
2096 void V8HeapExplorer::ExtractCodeCacheReferences(
2097  int entry, CodeCache* code_cache) {
2098  TagObject(code_cache->default_cache(), "(default code cache)");
2099  SetInternalReference(code_cache, entry,
2100  "default_cache", code_cache->default_cache(),
2102  TagObject(code_cache->normal_type_cache(), "(code type cache)");
2103  SetInternalReference(code_cache, entry,
2104  "type_cache", code_cache->normal_type_cache(),
2106 }
2107 
2108 
2109 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
2110  TagObject(code->relocation_info(), "(code relocation info)");
2111  SetInternalReference(code, entry,
2112  "relocation_info", code->relocation_info(),
2114  SetInternalReference(code, entry,
2115  "handler_table", code->handler_table(),
2117  TagObject(code->deoptimization_data(), "(code deopt data)");
2118  SetInternalReference(code, entry,
2119  "deoptimization_data", code->deoptimization_data(),
2121  SetInternalReference(code, entry,
2122  "type_feedback_info", code->type_feedback_info(),
2124  SetInternalReference(code, entry,
2125  "gc_metadata", code->gc_metadata(),
2127 }
2128 
2129 
2130 void V8HeapExplorer::ExtractJSGlobalPropertyCellReferences(
2131  int entry, JSGlobalPropertyCell* cell) {
2132  SetInternalReference(cell, entry, "value", cell->value());
2133 }
2134 
2135 
2136 void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
2137  if (!js_obj->IsJSFunction()) return;
2138 
2139  JSFunction* func = JSFunction::cast(js_obj);
2140  if (func->shared()->bound()) {
2141  FixedArray* bindings = func->function_bindings();
2142  SetNativeBindReference(js_obj, entry, "bound_this",
2143  bindings->get(JSFunction::kBoundThisIndex));
2144  SetNativeBindReference(js_obj, entry, "bound_function",
2145  bindings->get(JSFunction::kBoundFunctionIndex));
2147  i < bindings->length(); i++) {
2148  const char* reference_name = collection_->names()->GetFormatted(
2149  "bound_argument_%d",
2151  SetNativeBindReference(js_obj, entry, reference_name,
2152  bindings->get(i));
2153  }
2154  } else {
2155  Context* context = func->context()->declaration_context();
2156  ScopeInfo* scope_info = context->closure()->shared()->scope_info();
2157  // Add context allocated locals.
2158  int context_locals = scope_info->ContextLocalCount();
2159  for (int i = 0; i < context_locals; ++i) {
2160  String* local_name = scope_info->ContextLocalName(i);
2161  int idx = Context::MIN_CONTEXT_SLOTS + i;
2162  SetClosureReference(js_obj, entry, local_name, context->get(idx));
2163  }
2164 
2165  // Add function variable.
2166  if (scope_info->HasFunctionName()) {
2167  String* name = scope_info->FunctionName();
2168  VariableMode mode;
2169  int idx = scope_info->FunctionContextSlotIndex(name, &mode);
2170  if (idx >= 0) {
2171  SetClosureReference(js_obj, entry, name, context->get(idx));
2172  }
2173  }
2174  }
2175 }
2176 
2177 
2178 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
2179  if (js_obj->HasFastProperties()) {
2180  DescriptorArray* descs = js_obj->map()->instance_descriptors();
2181  for (int i = 0; i < descs->number_of_descriptors(); i++) {
2182  switch (descs->GetType(i)) {
2183  case FIELD: {
2184  int index = descs->GetFieldIndex(i);
2185  if (index < js_obj->map()->inobject_properties()) {
2186  SetPropertyReference(
2187  js_obj, entry,
2188  descs->GetKey(i), js_obj->InObjectPropertyAt(index),
2189  NULL,
2190  js_obj->GetInObjectPropertyOffset(index));
2191  } else {
2192  SetPropertyReference(
2193  js_obj, entry,
2194  descs->GetKey(i), js_obj->FastPropertyAt(index));
2195  }
2196  break;
2197  }
2198  case CONSTANT_FUNCTION:
2199  SetPropertyReference(
2200  js_obj, entry,
2201  descs->GetKey(i), descs->GetConstantFunction(i));
2202  break;
2203  case CALLBACKS: {
2204  Object* callback_obj = descs->GetValue(i);
2205  if (callback_obj->IsAccessorPair()) {
2206  AccessorPair* accessors = AccessorPair::cast(callback_obj);
2207  if (Object* getter = accessors->getter()) {
2208  SetPropertyReference(js_obj, entry, descs->GetKey(i),
2209  getter, "get-%s");
2210  }
2211  if (Object* setter = accessors->setter()) {
2212  SetPropertyReference(js_obj, entry, descs->GetKey(i),
2213  setter, "set-%s");
2214  }
2215  }
2216  break;
2217  }
2218  case NORMAL: // only in slow mode
2219  case HANDLER: // only in lookup results, not in descriptors
2220  case INTERCEPTOR: // only in lookup results, not in descriptors
2221  case MAP_TRANSITION: // we do not care about transitions here...
2222  case CONSTANT_TRANSITION:
2223  case NULL_DESCRIPTOR: // ... and not about "holes"
2224  break;
2225  }
2226  }
2227  } else {
2228  StringDictionary* dictionary = js_obj->property_dictionary();
2229  int length = dictionary->Capacity();
2230  for (int i = 0; i < length; ++i) {
2231  Object* k = dictionary->KeyAt(i);
2232  if (dictionary->IsKey(k)) {
2233  Object* target = dictionary->ValueAt(i);
2234  // We assume that global objects can only have slow properties.
2235  Object* value = target->IsJSGlobalPropertyCell()
2236  ? JSGlobalPropertyCell::cast(target)->value()
2237  : target;
2238  if (String::cast(k)->length() > 0) {
2239  SetPropertyReference(js_obj, entry, String::cast(k), value);
2240  } else {
2241  TagObject(value, "(hidden properties)");
2242  SetInternalReference(js_obj, entry, "hidden_properties", value);
2243  }
2244  }
2245  }
2246  }
2247 }
2248 
2249 
2250 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
2251  if (js_obj->HasFastObjectElements()) {
2252  FixedArray* elements = FixedArray::cast(js_obj->elements());
2253  int length = js_obj->IsJSArray() ?
2254  Smi::cast(JSArray::cast(js_obj)->length())->value() :
2255  elements->length();
2256  for (int i = 0; i < length; ++i) {
2257  if (!elements->get(i)->IsTheHole()) {
2258  SetElementReference(js_obj, entry, i, elements->get(i));
2259  }
2260  }
2261  } else if (js_obj->HasDictionaryElements()) {
2262  SeededNumberDictionary* dictionary = js_obj->element_dictionary();
2263  int length = dictionary->Capacity();
2264  for (int i = 0; i < length; ++i) {
2265  Object* k = dictionary->KeyAt(i);
2266  if (dictionary->IsKey(k)) {
2267  ASSERT(k->IsNumber());
2268  uint32_t index = static_cast<uint32_t>(k->Number());
2269  SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
2270  }
2271  }
2272  }
2273 }
2274 
2275 
2276 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
2277  int length = js_obj->GetInternalFieldCount();
2278  for (int i = 0; i < length; ++i) {
2279  Object* o = js_obj->GetInternalField(i);
2280  SetInternalReference(
2281  js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
2282  }
2283 }
2284 
2285 
2287  Heap* heap = object->GetHeap();
2288  if (object->IsJSFunction()) return heap->closure_symbol();
2289  String* constructor_name = object->constructor_name();
2290  if (constructor_name == heap->Object_symbol()) {
2291  // Look up an immediate "constructor" property, if it is a function,
2292  // return its name. This is for instances of binding objects, which
2293  // have prototype constructor type "Object".
2294  Object* constructor_prop = NULL;
2295  LookupResult result(heap->isolate());
2296  object->LocalLookupRealNamedProperty(heap->constructor_symbol(), &result);
2297  if (result.IsProperty()) {
2298  constructor_prop = result.GetLazyValue();
2299  }
2300  if (constructor_prop->IsJSFunction()) {
2301  Object* maybe_name = JSFunction::cast(constructor_prop)->shared()->name();
2302  if (maybe_name->IsString()) {
2303  String* name = String::cast(maybe_name);
2304  if (name->length() > 0) return name;
2305  }
2306  }
2307  }
2308  return object->constructor_name();
2309 }
2310 
2311 
2312 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
2313  if (!obj->IsHeapObject()) return NULL;
2314  return filler_->FindOrAddEntry(obj, this);
2315 }
2316 
2317 
2318 class RootsReferencesExtractor : public ObjectVisitor {
2319  private:
2320  struct IndexTag {
2321  IndexTag(int index, VisitorSynchronization::SyncTag tag)
2322  : index(index), tag(tag) { }
2323  int index;
2325  };
2326 
2327  public:
2329  : collecting_all_references_(false),
2330  previous_reference_count_(0) {
2331  }
2332 
2333  void VisitPointers(Object** start, Object** end) {
2334  if (collecting_all_references_) {
2335  for (Object** p = start; p < end; p++) all_references_.Add(*p);
2336  } else {
2337  for (Object** p = start; p < end; p++) strong_references_.Add(*p);
2338  }
2339  }
2340 
2341  void SetCollectingAllReferences() { collecting_all_references_ = true; }
2342 
2343  void FillReferences(V8HeapExplorer* explorer) {
2344  ASSERT(strong_references_.length() <= all_references_.length());
2345  for (int i = 0; i < reference_tags_.length(); ++i) {
2346  explorer->SetGcRootsReference(reference_tags_[i].tag);
2347  }
2348  int strong_index = 0, all_index = 0, tags_index = 0;
2349  while (all_index < all_references_.length()) {
2350  if (strong_index < strong_references_.length() &&
2351  strong_references_[strong_index] == all_references_[all_index]) {
2352  explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
2353  false,
2354  all_references_[all_index++]);
2355  ++strong_index;
2356  } else {
2357  explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
2358  true,
2359  all_references_[all_index++]);
2360  }
2361  if (reference_tags_[tags_index].index == all_index) ++tags_index;
2362  }
2363  }
2364 
2366  if (collecting_all_references_ &&
2367  previous_reference_count_ != all_references_.length()) {
2368  previous_reference_count_ = all_references_.length();
2369  reference_tags_.Add(IndexTag(previous_reference_count_, tag));
2370  }
2371  }
2372 
2373  private:
2374  bool collecting_all_references_;
2375  List<Object*> strong_references_;
2376  List<Object*> all_references_;
2377  int previous_reference_count_;
2378  List<IndexTag> reference_tags_;
2379 };
2380 
2381 
2383  SnapshotFillerInterface* filler) {
2384  HeapIterator iterator(HeapIterator::kFilterUnreachable);
2385 
2386  filler_ = filler;
2387  bool interrupted = false;
2388 
2389  // Heap iteration with filtering must be finished in any case.
2390  for (HeapObject* obj = iterator.next();
2391  obj != NULL;
2392  obj = iterator.next(), progress_->ProgressStep()) {
2393  if (!interrupted) {
2394  ExtractReferences(obj);
2395  if (!progress_->ProgressReport(false)) interrupted = true;
2396  }
2397  }
2398  if (interrupted) {
2399  filler_ = NULL;
2400  return false;
2401  }
2402 
2403  SetRootGcRootsReference();
2404  RootsReferencesExtractor extractor;
2405  heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
2406  extractor.SetCollectingAllReferences();
2407  heap_->IterateRoots(&extractor, VISIT_ALL);
2408  extractor.FillReferences(this);
2409  filler_ = NULL;
2410  return progress_->ProgressReport(true);
2411 }
2412 
2413 
2414 bool V8HeapExplorer::IsEssentialObject(Object* object) {
2415  // We have to use raw_unchecked_* versions because checked versions
2416  // would fail during iteration over object properties.
2417  return object->IsHeapObject()
2418  && !object->IsOddball()
2419  && object != heap_->raw_unchecked_empty_byte_array()
2420  && object != heap_->raw_unchecked_empty_fixed_array()
2421  && object != heap_->raw_unchecked_empty_descriptor_array()
2422  && object != heap_->raw_unchecked_fixed_array_map()
2423  && object != heap_->raw_unchecked_global_property_cell_map()
2424  && object != heap_->raw_unchecked_shared_function_info_map()
2425  && object != heap_->raw_unchecked_free_space_map()
2426  && object != heap_->raw_unchecked_one_pointer_filler_map()
2427  && object != heap_->raw_unchecked_two_pointer_filler_map();
2428 }
2429 
2430 
2431 void V8HeapExplorer::SetClosureReference(HeapObject* parent_obj,
2432  int parent_entry,
2433  String* reference_name,
2434  Object* child_obj) {
2435  HeapEntry* child_entry = GetEntry(child_obj);
2436  if (child_entry != NULL) {
2438  parent_entry,
2439  collection_->names()->GetName(reference_name),
2440  child_entry);
2441  }
2442 }
2443 
2444 
2445 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
2446  int parent_entry,
2447  const char* reference_name,
2448  Object* child_obj) {
2449  HeapEntry* child_entry = GetEntry(child_obj);
2450  if (child_entry != NULL) {
2452  parent_entry,
2453  reference_name,
2454  child_entry);
2455  }
2456 }
2457 
2458 
2459 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
2460  int parent_entry,
2461  int index,
2462  Object* child_obj) {
2463  HeapEntry* child_entry = GetEntry(child_obj);
2464  if (child_entry != NULL) {
2466  parent_entry,
2467  index,
2468  child_entry);
2469  }
2470 }
2471 
2472 
2473 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
2474  int parent_entry,
2475  const char* reference_name,
2476  Object* child_obj,
2477  int field_offset) {
2478  HeapEntry* child_entry = GetEntry(child_obj);
2479  if (child_entry == NULL) return;
2480  if (IsEssentialObject(child_obj)) {
2482  parent_entry,
2483  reference_name,
2484  child_entry);
2485  }
2486  IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2487 }
2488 
2489 
2490 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
2491  int parent_entry,
2492  int index,
2493  Object* child_obj,
2494  int field_offset) {
2495  HeapEntry* child_entry = GetEntry(child_obj);
2496  if (child_entry == NULL) return;
2497  if (IsEssentialObject(child_obj)) {
2499  parent_entry,
2500  collection_->names()->GetName(index),
2501  child_entry);
2502  }
2503  IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2504 }
2505 
2506 
2507 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
2508  int parent_entry,
2509  int index,
2510  Object* child_obj) {
2511  HeapEntry* child_entry = GetEntry(child_obj);
2512  if (child_entry != NULL && IsEssentialObject(child_obj)) {
2514  parent_entry,
2515  index,
2516  child_entry);
2517  }
2518 }
2519 
2520 
2521 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
2522  int parent_entry,
2523  int index,
2524  Object* child_obj,
2525  int field_offset) {
2526  HeapEntry* child_entry = GetEntry(child_obj);
2527  if (child_entry != NULL) {
2529  parent_entry,
2530  index,
2531  child_entry);
2532  IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2533  }
2534 }
2535 
2536 
2537 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
2538  int parent_entry,
2539  String* reference_name,
2540  Object* child_obj,
2541  const char* name_format_string,
2542  int field_offset) {
2543  HeapEntry* child_entry = GetEntry(child_obj);
2544  if (child_entry != NULL) {
2545  HeapGraphEdge::Type type = reference_name->length() > 0 ?
2547  const char* name = name_format_string != NULL ?
2548  collection_->names()->GetFormatted(
2549  name_format_string,
2550  *reference_name->ToCString(DISALLOW_NULLS,
2552  collection_->names()->GetName(reference_name);
2553 
2554  filler_->SetNamedReference(type,
2555  parent_entry,
2556  name,
2557  child_entry);
2558  IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2559  }
2560 }
2561 
2562 
2563 void V8HeapExplorer::SetPropertyShortcutReference(HeapObject* parent_obj,
2564  int parent_entry,
2565  String* reference_name,
2566  Object* child_obj) {
2567  HeapEntry* child_entry = GetEntry(child_obj);
2568  if (child_entry != NULL) {
2570  parent_entry,
2571  collection_->names()->GetName(reference_name),
2572  child_entry);
2573  }
2574 }
2575 
2576 
2577 void V8HeapExplorer::SetRootGcRootsReference() {
2580  snapshot_->root()->index(),
2581  snapshot_->gc_roots());
2582 }
2583 
2584 
2585 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
2586  HeapEntry* child_entry = GetEntry(child_obj);
2587  ASSERT(child_entry != NULL);
2588  filler_->SetNamedAutoIndexReference(
2590  snapshot_->root()->index(),
2591  child_entry);
2592 }
2593 
2594 
2595 void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
2598  snapshot_->gc_roots()->index(),
2599  snapshot_->gc_subroot(tag));
2600 }
2601 
2602 
2603 void V8HeapExplorer::SetGcSubrootReference(
2604  VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
2605  HeapEntry* child_entry = GetEntry(child_obj);
2606  if (child_entry != NULL) {
2607  const char* name = GetStrongGcSubrootName(child_obj);
2608  if (name != NULL) {
2609  filler_->SetNamedReference(
2611  snapshot_->gc_subroot(tag)->index(),
2612  name,
2613  child_entry);
2614  } else {
2617  snapshot_->gc_subroot(tag)->index(),
2618  child_entry);
2619  }
2620  }
2621 }
2622 
2623 
2624 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
2625  if (strong_gc_subroot_names_.is_empty()) {
2626 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2627 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2629 #undef ROOT_NAME
2630 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2632 #undef STRUCT_MAP_NAME
2633 #define SYMBOL_NAME(name, str) NAME_ENTRY(name)
2635 #undef SYMBOL_NAME
2636 #undef NAME_ENTRY
2637  CHECK(!strong_gc_subroot_names_.is_empty());
2638  }
2639  return strong_gc_subroot_names_.GetTag(object);
2640 }
2641 
2642 
2643 void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2644  if (IsEssentialObject(obj)) {
2645  HeapEntry* entry = GetEntry(obj);
2646  if (entry->name()[0] == '\0') {
2647  entry->set_name(tag);
2648  }
2649  }
2650 }
2651 
2652 
2653 class GlobalObjectsEnumerator : public ObjectVisitor {
2654  public:
2655  virtual void VisitPointers(Object** start, Object** end) {
2656  for (Object** p = start; p < end; p++) {
2657  if ((*p)->IsGlobalContext()) {
2658  Context* context = Context::cast(*p);
2659  JSObject* proxy = context->global_proxy();
2660  if (proxy->IsJSGlobalProxy()) {
2661  Object* global = proxy->map()->prototype();
2662  if (global->IsJSGlobalObject()) {
2663  objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2664  }
2665  }
2666  }
2667  }
2668  }
2669  int count() { return objects_.length(); }
2670  Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2671 
2672  private:
2673  List<Handle<JSGlobalObject> > objects_;
2674 };
2675 
2676 
2677 // Modifies heap. Must not be run during heap traversal.
2679  HandleScope scope;
2680  Isolate* isolate = Isolate::Current();
2681  GlobalObjectsEnumerator enumerator;
2682  isolate->global_handles()->IterateAllRoots(&enumerator);
2683  Handle<String> document_string =
2684  isolate->factory()->NewStringFromAscii(CStrVector("document"));
2685  Handle<String> url_string =
2686  isolate->factory()->NewStringFromAscii(CStrVector("URL"));
2687  const char** urls = NewArray<const char*>(enumerator.count());
2688  for (int i = 0, l = enumerator.count(); i < l; ++i) {
2689  urls[i] = NULL;
2690  HandleScope scope;
2691  Handle<JSGlobalObject> global_obj = enumerator.at(i);
2692  Object* obj_document;
2693  if (global_obj->GetProperty(*document_string)->ToObject(&obj_document) &&
2694  obj_document->IsJSObject()) {
2695  JSObject* document = JSObject::cast(obj_document);
2696  Object* obj_url;
2697  if (document->GetProperty(*url_string)->ToObject(&obj_url) &&
2698  obj_url->IsString()) {
2699  urls[i] = collection_->names()->GetName(String::cast(obj_url));
2700  }
2701  }
2702  }
2703 
2704  AssertNoAllocation no_allocation;
2705  for (int i = 0, l = enumerator.count(); i < l; ++i) {
2706  objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2707  }
2708 
2709  DeleteArray(urls);
2710 }
2711 
2712 
2713 class GlobalHandlesExtractor : public ObjectVisitor {
2714  public:
2716  : explorer_(explorer) {}
2718  virtual void VisitPointers(Object** start, Object** end) {
2719  UNREACHABLE();
2720  }
2721  virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
2722  explorer_->VisitSubtreeWrapper(p, class_id);
2723  }
2724  private:
2725  NativeObjectsExplorer* explorer_;
2726 };
2727 
2728 
2730  public:
2732  HeapSnapshot* snapshot,
2733  HeapEntry::Type entries_type)
2734  : snapshot_(snapshot),
2735  collection_(snapshot_->collection()),
2736  entries_type_(entries_type) {
2737  }
2738  virtual HeapEntry* AllocateEntry(HeapThing ptr);
2739  private:
2740  HeapSnapshot* snapshot_;
2741  HeapSnapshotsCollection* collection_;
2742  HeapEntry::Type entries_type_;
2743 };
2744 
2745 
2747  v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2748  intptr_t elements = info->GetElementCount();
2749  intptr_t size = info->GetSizeInBytes();
2750  const char* name = elements != -1
2751  ? collection_->names()->GetFormatted(
2752  "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
2753  : collection_->names()->GetCopy(info->GetLabel());
2754  return snapshot_->AddEntry(
2755  entries_type_,
2756  name,
2758  size != -1 ? static_cast<int>(size) : 0);
2759 }
2760 
2761 
2764  : snapshot_(snapshot),
2765  collection_(snapshot_->collection()),
2766  progress_(progress),
2767  embedder_queried_(false),
2768  objects_by_info_(RetainedInfosMatch),
2769  native_groups_(StringsMatch),
2770  filler_(NULL) {
2771  synthetic_entries_allocator_ =
2772  new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2773  native_entries_allocator_ =
2774  new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2775 }
2776 
2777 
2779  for (HashMap::Entry* p = objects_by_info_.Start();
2780  p != NULL;
2781  p = objects_by_info_.Next(p)) {
2782  v8::RetainedObjectInfo* info =
2783  reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2784  info->Dispose();
2785  List<HeapObject*>* objects =
2786  reinterpret_cast<List<HeapObject*>* >(p->value);
2787  delete objects;
2788  }
2789  for (HashMap::Entry* p = native_groups_.Start();
2790  p != NULL;
2791  p = native_groups_.Next(p)) {
2792  v8::RetainedObjectInfo* info =
2793  reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2794  info->Dispose();
2795  }
2796  delete synthetic_entries_allocator_;
2797  delete native_entries_allocator_;
2798 }
2799 
2800 
2802  FillRetainedObjects();
2803  return objects_by_info_.occupancy();
2804 }
2805 
2806 
2807 void NativeObjectsExplorer::FillRetainedObjects() {
2808  if (embedder_queried_) return;
2809  Isolate* isolate = Isolate::Current();
2810  // Record objects that are joined into ObjectGroups.
2811  isolate->heap()->CallGlobalGCPrologueCallback();
2812  List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2813  for (int i = 0; i < groups->length(); ++i) {
2814  ObjectGroup* group = groups->at(i);
2815  if (group->info_ == NULL) continue;
2816  List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info_);
2817  for (size_t j = 0; j < group->length_; ++j) {
2818  HeapObject* obj = HeapObject::cast(*group->objects_[j]);
2819  list->Add(obj);
2820  in_groups_.Insert(obj);
2821  }
2822  group->info_ = NULL; // Acquire info object ownership.
2823  }
2824  isolate->global_handles()->RemoveObjectGroups();
2825  isolate->heap()->CallGlobalGCEpilogueCallback();
2826  // Record objects that are not in ObjectGroups, but have class ID.
2827  GlobalHandlesExtractor extractor(this);
2828  isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2829  embedder_queried_ = true;
2830 }
2831 
2832 void NativeObjectsExplorer::FillImplicitReferences() {
2833  Isolate* isolate = Isolate::Current();
2834  List<ImplicitRefGroup*>* groups =
2835  isolate->global_handles()->implicit_ref_groups();
2836  for (int i = 0; i < groups->length(); ++i) {
2837  ImplicitRefGroup* group = groups->at(i);
2838  HeapObject* parent = *group->parent_;
2839  int parent_entry =
2840  filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2841  ASSERT(parent_entry != HeapEntry::kNoEntry);
2842  Object*** children = group->children_;
2843  for (size_t j = 0; j < group->length_; ++j) {
2844  Object* child = *children[j];
2845  HeapEntry* child_entry =
2846  filler_->FindOrAddEntry(child, native_entries_allocator_);
2847  filler_->SetNamedReference(
2849  parent_entry,
2850  "native",
2851  child_entry);
2852  }
2853  }
2854 }
2855 
2856 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2857  v8::RetainedObjectInfo* info) {
2858  HashMap::Entry* entry =
2859  objects_by_info_.Lookup(info, InfoHash(info), true);
2860  if (entry->value != NULL) {
2861  info->Dispose();
2862  } else {
2863  entry->value = new List<HeapObject*>(4);
2864  }
2865  return reinterpret_cast<List<HeapObject*>* >(entry->value);
2866 }
2867 
2868 
2870  SnapshotFillerInterface* filler) {
2871  filler_ = filler;
2872  FillRetainedObjects();
2873  FillImplicitReferences();
2874  if (EstimateObjectsCount() > 0) {
2875  for (HashMap::Entry* p = objects_by_info_.Start();
2876  p != NULL;
2877  p = objects_by_info_.Next(p)) {
2878  v8::RetainedObjectInfo* info =
2879  reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2880  SetNativeRootReference(info);
2881  List<HeapObject*>* objects =
2882  reinterpret_cast<List<HeapObject*>* >(p->value);
2883  for (int i = 0; i < objects->length(); ++i) {
2884  SetWrapperNativeReferences(objects->at(i), info);
2885  }
2886  }
2887  SetRootNativeRootsReference();
2888  }
2889  filler_ = NULL;
2890  return true;
2891 }
2892 
2893 
2895  public:
2896  explicit NativeGroupRetainedObjectInfo(const char* label)
2897  : disposed_(false),
2898  hash_(reinterpret_cast<intptr_t>(label)),
2899  label_(label) {
2900  }
2901 
2903  virtual void Dispose() {
2904  CHECK(!disposed_);
2905  disposed_ = true;
2906  delete this;
2907  }
2908  virtual bool IsEquivalent(RetainedObjectInfo* other) {
2909  return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2910  }
2911  virtual intptr_t GetHash() { return hash_; }
2912  virtual const char* GetLabel() { return label_; }
2913 
2914  private:
2915  bool disposed_;
2916  intptr_t hash_;
2917  const char* label_;
2918 };
2919 
2920 
2921 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2922  const char* label) {
2923  const char* label_copy = collection_->names()->GetCopy(label);
2924  uint32_t hash = HashSequentialString(label_copy,
2925  static_cast<int>(strlen(label_copy)),
2926  HEAP->HashSeed());
2927  HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
2928  hash, true);
2929  if (entry->value == NULL) {
2930  entry->value = new NativeGroupRetainedObjectInfo(label);
2931  }
2932  return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2933 }
2934 
2935 
2936 void NativeObjectsExplorer::SetNativeRootReference(
2937  v8::RetainedObjectInfo* info) {
2938  HeapEntry* child_entry =
2939  filler_->FindOrAddEntry(info, native_entries_allocator_);
2940  ASSERT(child_entry != NULL);
2941  NativeGroupRetainedObjectInfo* group_info =
2942  FindOrAddGroupInfo(info->GetGroupLabel());
2943  HeapEntry* group_entry =
2944  filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2945  filler_->SetNamedAutoIndexReference(
2947  group_entry->index(),
2948  child_entry);
2949 }
2950 
2951 
2952 void NativeObjectsExplorer::SetWrapperNativeReferences(
2953  HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2954  HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2955  ASSERT(wrapper_entry != NULL);
2956  HeapEntry* info_entry =
2957  filler_->FindOrAddEntry(info, native_entries_allocator_);
2958  ASSERT(info_entry != NULL);
2960  wrapper_entry->index(),
2961  "native",
2962  info_entry);
2964  info_entry->index(),
2965  wrapper_entry);
2966 }
2967 
2968 
2969 void NativeObjectsExplorer::SetRootNativeRootsReference() {
2970  for (HashMap::Entry* entry = native_groups_.Start();
2971  entry;
2972  entry = native_groups_.Next(entry)) {
2973  NativeGroupRetainedObjectInfo* group_info =
2974  static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2975  HeapEntry* group_entry =
2976  filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2977  ASSERT(group_entry != NULL);
2980  snapshot_->root()->index(),
2981  group_entry);
2982  }
2983 }
2984 
2985 
2986 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2987  if (in_groups_.Contains(*p)) return;
2988  Isolate* isolate = Isolate::Current();
2989  v8::RetainedObjectInfo* info =
2990  isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2991  if (info == NULL) return;
2992  GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2993 }
2994 
2995 
2997  public:
2998  explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
2999  : snapshot_(snapshot),
3000  collection_(snapshot->collection()),
3001  entries_(entries) { }
3002  HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
3003  HeapEntry* entry = allocator->AllocateEntry(ptr);
3004  entries_->Pair(ptr, entry->index());
3005  return entry;
3006  }
3007  HeapEntry* FindEntry(HeapThing ptr) {
3008  int index = entries_->Map(ptr);
3009  return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
3010  }
3011  HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
3012  HeapEntry* entry = FindEntry(ptr);
3013  return entry != NULL ? entry : AddEntry(ptr, allocator);
3014  }
3016  int parent,
3017  int index,
3018  HeapEntry* child_entry) {
3019  HeapEntry* parent_entry = &snapshot_->entries()[parent];
3020  parent_entry->SetIndexedReference(type, index, child_entry);
3021  }
3023  int parent,
3024  HeapEntry* child_entry) {
3025  HeapEntry* parent_entry = &snapshot_->entries()[parent];
3026  int index = parent_entry->children_count() + 1;
3027  parent_entry->SetIndexedReference(type, index, child_entry);
3028  }
3030  int parent,
3031  const char* reference_name,
3032  HeapEntry* child_entry) {
3033  HeapEntry* parent_entry = &snapshot_->entries()[parent];
3034  parent_entry->SetNamedReference(type, reference_name, child_entry);
3035  }
3037  int parent,
3038  HeapEntry* child_entry) {
3039  HeapEntry* parent_entry = &snapshot_->entries()[parent];
3040  int index = parent_entry->children_count() + 1;
3041  parent_entry->SetNamedReference(
3042  type,
3043  collection_->names()->GetName(index),
3044  child_entry);
3045  }
3046 
3047  private:
3048  HeapSnapshot* snapshot_;
3049  HeapSnapshotsCollection* collection_;
3050  HeapEntriesMap* entries_;
3051 };
3052 
3053 
3055  v8::ActivityControl* control)
3056  : snapshot_(snapshot),
3057  control_(control),
3058  v8_heap_explorer_(snapshot_, this),
3059  dom_explorer_(snapshot_, this) {
3060 }
3061 
3062 
3064  v8_heap_explorer_.TagGlobalObjects();
3065 
3066  // TODO(1562) Profiler assumes that any object that is in the heap after
3067  // full GC is reachable from the root when computing dominators.
3068  // This is not true for weakly reachable objects.
3069  // As a temporary solution we call GC twice.
3070  Isolate::Current()->heap()->CollectAllGarbage(
3072  "HeapSnapshotGenerator::GenerateSnapshot");
3073  Isolate::Current()->heap()->CollectAllGarbage(
3075  "HeapSnapshotGenerator::GenerateSnapshot");
3076 
3077 #ifdef DEBUG
3078  Heap* debug_heap = Isolate::Current()->heap();
3079  ASSERT(!debug_heap->old_data_space()->was_swept_conservatively());
3081  ASSERT(!debug_heap->code_space()->was_swept_conservatively());
3082  ASSERT(!debug_heap->cell_space()->was_swept_conservatively());
3083  ASSERT(!debug_heap->map_space()->was_swept_conservatively());
3084 #endif
3085 
3086  // The following code uses heap iterators, so we want the heap to be
3087  // stable. It should follow TagGlobalObjects as that can allocate.
3088  AssertNoAllocation no_alloc;
3089 
3090 #ifdef DEBUG
3091  debug_heap->Verify();
3092 #endif
3093 
3094  SetProgressTotal(1); // 1 pass.
3095 
3096 #ifdef DEBUG
3097  debug_heap->Verify();
3098 #endif
3099 
3100  if (!FillReferences()) return false;
3101 
3102  snapshot_->FillChildren();
3103  snapshot_->RememberLastJSObjectId();
3104 
3105  progress_counter_ = progress_total_;
3106  if (!ProgressReport(true)) return false;
3107  return true;
3108 }
3109 
3110 
3111 void HeapSnapshotGenerator::ProgressStep() {
3112  ++progress_counter_;
3113 }
3114 
3115 
3116 bool HeapSnapshotGenerator::ProgressReport(bool force) {
3117  const int kProgressReportGranularity = 10000;
3118  if (control_ != NULL
3119  && (force || progress_counter_ % kProgressReportGranularity == 0)) {
3120  return
3121  control_->ReportProgressValue(progress_counter_, progress_total_) ==
3123  }
3124  return true;
3125 }
3126 
3127 
3128 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
3129  if (control_ == NULL) return;
3130  HeapIterator iterator(HeapIterator::kFilterUnreachable);
3131  progress_total_ = iterations_count * (
3132  v8_heap_explorer_.EstimateObjectsCount(&iterator) +
3133  dom_explorer_.EstimateObjectsCount());
3134  progress_counter_ = 0;
3135 }
3136 
3137 
3138 bool HeapSnapshotGenerator::FillReferences() {
3139  SnapshotFiller filler(snapshot_, &entries_);
3140  v8_heap_explorer_.AddRootEntries(&filler);
3141  return v8_heap_explorer_.IterateAndExtractReferences(&filler)
3142  && dom_explorer_.IterateAndExtractReferences(&filler);
3143 }
3144 
3145 
3146 template<int bytes> struct MaxDecimalDigitsIn;
3147 template<> struct MaxDecimalDigitsIn<4> {
3148  static const int kSigned = 11;
3149  static const int kUnsigned = 10;
3150 };
3151 template<> struct MaxDecimalDigitsIn<8> {
3152  static const int kSigned = 20;
3153  static const int kUnsigned = 20;
3154 };
3155 
3156 
3158  public:
3160  : stream_(stream),
3161  chunk_size_(stream->GetChunkSize()),
3162  chunk_(chunk_size_),
3163  chunk_pos_(0),
3164  aborted_(false) {
3165  ASSERT(chunk_size_ > 0);
3166  }
3167  bool aborted() { return aborted_; }
3168  void AddCharacter(char c) {
3169  ASSERT(c != '\0');
3170  ASSERT(chunk_pos_ < chunk_size_);
3171  chunk_[chunk_pos_++] = c;
3172  MaybeWriteChunk();
3173  }
3174  void AddString(const char* s) {
3175  AddSubstring(s, StrLength(s));
3176  }
3177  void AddSubstring(const char* s, int n) {
3178  if (n <= 0) return;
3179  ASSERT(static_cast<size_t>(n) <= strlen(s));
3180  const char* s_end = s + n;
3181  while (s < s_end) {
3182  int s_chunk_size = Min(
3183  chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
3184  ASSERT(s_chunk_size > 0);
3185  memcpy(chunk_.start() + chunk_pos_, s, s_chunk_size);
3186  s += s_chunk_size;
3187  chunk_pos_ += s_chunk_size;
3188  MaybeWriteChunk();
3189  }
3190  }
3191  void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
3192  void Finalize() {
3193  if (aborted_) return;
3194  ASSERT(chunk_pos_ < chunk_size_);
3195  if (chunk_pos_ != 0) {
3196  WriteChunk();
3197  }
3198  stream_->EndOfStream();
3199  }
3200 
3201  private:
3202  template<typename T>
3203  void AddNumberImpl(T n, const char* format) {
3204  // Buffer for the longest value plus trailing \0
3205  static const int kMaxNumberSize =
3207  if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
3208  int result = OS::SNPrintF(
3209  chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
3210  ASSERT(result != -1);
3211  chunk_pos_ += result;
3212  MaybeWriteChunk();
3213  } else {
3214  EmbeddedVector<char, kMaxNumberSize> buffer;
3215  int result = OS::SNPrintF(buffer, format, n);
3216  USE(result);
3217  ASSERT(result != -1);
3218  AddString(buffer.start());
3219  }
3220  }
3221  void MaybeWriteChunk() {
3222  ASSERT(chunk_pos_ <= chunk_size_);
3223  if (chunk_pos_ == chunk_size_) {
3224  WriteChunk();
3225  }
3226  }
3227  void WriteChunk() {
3228  if (aborted_) return;
3229  if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
3230  v8::OutputStream::kAbort) aborted_ = true;
3231  chunk_pos_ = 0;
3232  }
3233 
3234  v8::OutputStream* stream_;
3235  int chunk_size_;
3236  ScopedVector<char> chunk_;
3237  int chunk_pos_;
3238  bool aborted_;
3239 };
3240 
3241 
3242 // type, name|index, to_node.
3243 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
3244 // type, name, id, self_size, children_index.
3245 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 5;
3246 
3248  ASSERT(writer_ == NULL);
3249  writer_ = new OutputStreamWriter(stream);
3250 
3251  HeapSnapshot* original_snapshot = NULL;
3252  if (snapshot_->RawSnapshotSize() >=
3253  SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize) {
3254  // The snapshot is too big. Serialize a fake snapshot.
3255  original_snapshot = snapshot_;
3256  snapshot_ = CreateFakeSnapshot();
3257  }
3258 
3259  SerializeImpl();
3260 
3261  delete writer_;
3262  writer_ = NULL;
3263 
3264  if (original_snapshot != NULL) {
3265  delete snapshot_;
3266  snapshot_ = original_snapshot;
3267  }
3268 }
3269 
3270 
3271 HeapSnapshot* HeapSnapshotJSONSerializer::CreateFakeSnapshot() {
3272  HeapSnapshot* result = new HeapSnapshot(snapshot_->collection(),
3274  snapshot_->title(),
3275  snapshot_->uid());
3276  result->AddRootEntry();
3277  const char* text = snapshot_->collection()->names()->GetFormatted(
3278  "The snapshot is too big. "
3279  "Maximum snapshot size is %" V8_PTR_PREFIX "u MB. "
3280  "Actual snapshot size is %" V8_PTR_PREFIX "u MB.",
3281  SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize / MB,
3282  (snapshot_->RawSnapshotSize() + MB - 1) / MB);
3283  HeapEntry* message = result->AddEntry(HeapEntry::kString, text, 0, 4);
3284  result->root()->SetIndexedReference(HeapGraphEdge::kElement, 1, message);
3285  result->FillChildren();
3286  return result;
3287 }
3288 
3289 
3290 void HeapSnapshotJSONSerializer::SerializeImpl() {
3291  ASSERT(0 == snapshot_->root()->index());
3292  writer_->AddCharacter('{');
3293  writer_->AddString("\"snapshot\":{");
3294  SerializeSnapshot();
3295  if (writer_->aborted()) return;
3296  writer_->AddString("},\n");
3297  writer_->AddString("\"nodes\":[");
3298  SerializeNodes();
3299  if (writer_->aborted()) return;
3300  writer_->AddString("],\n");
3301  writer_->AddString("\"edges\":[");
3302  SerializeEdges();
3303  if (writer_->aborted()) return;
3304  writer_->AddString("],\n");
3305  writer_->AddString("\"strings\":[");
3306  SerializeStrings();
3307  if (writer_->aborted()) return;
3308  writer_->AddCharacter(']');
3309  writer_->AddCharacter('}');
3310  writer_->Finalize();
3311 }
3312 
3313 
3314 int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
3315  HashMap::Entry* cache_entry = strings_.Lookup(
3316  const_cast<char*>(s), ObjectHash(s), true);
3317  if (cache_entry->value == NULL) {
3318  cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
3319  }
3320  return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
3321 }
3322 
3323 
3324 static int utoa(unsigned value, const Vector<char>& buffer, int buffer_pos) {
3325  int number_of_digits = 0;
3326  unsigned t = value;
3327  do {
3328  ++number_of_digits;
3329  } while (t /= 10);
3330 
3331  buffer_pos += number_of_digits;
3332  int result = buffer_pos;
3333  do {
3334  int last_digit = value % 10;
3335  buffer[--buffer_pos] = '0' + last_digit;
3336  value /= 10;
3337  } while (value);
3338  return result;
3339 }
3340 
3341 
3342 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
3343  bool first_edge) {
3344  // The buffer needs space for 3 unsigned ints, 3 commas and \0
3345  static const int kBufferSize =
3346  MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 1; // NOLINT
3347  EmbeddedVector<char, kBufferSize> buffer;
3348  int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
3349  || edge->type() == HeapGraphEdge::kHidden
3350  || edge->type() == HeapGraphEdge::kWeak
3351  ? edge->index() : GetStringId(edge->name());
3352  int buffer_pos = 0;
3353  if (!first_edge) {
3354  buffer[buffer_pos++] = ',';
3355  }
3356  buffer_pos = utoa(edge->type(), buffer, buffer_pos);
3357  buffer[buffer_pos++] = ',';
3358  buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
3359  buffer[buffer_pos++] = ',';
3360  buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
3361  buffer[buffer_pos++] = '\0';
3362  writer_->AddString(buffer.start());
3363 }
3364 
3365 
3366 void HeapSnapshotJSONSerializer::SerializeEdges() {
3367  List<HeapGraphEdge*>& edges = snapshot_->children();
3368  for (int i = 0; i < edges.length(); ++i) {
3369  ASSERT(i == 0 ||
3370  edges[i - 1]->from()->index() <= edges[i]->from()->index());
3371  SerializeEdge(edges[i], i == 0);
3372  if (writer_->aborted()) return;
3373  }
3374 }
3375 
3376 
3377 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
3378  // The buffer needs space for 5 unsigned ints, 5 commas, \n and \0
3379  static const int kBufferSize =
3380  5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
3381  + 5 + 1 + 1;
3382  EmbeddedVector<char, kBufferSize> buffer;
3383  int buffer_pos = 0;
3384  if (entry_index(entry) != 0) {
3385  buffer[buffer_pos++] = ',';
3386  }
3387  buffer_pos = utoa(entry->type(), buffer, buffer_pos);
3388  buffer[buffer_pos++] = ',';
3389  buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
3390  buffer[buffer_pos++] = ',';
3391  buffer_pos = utoa(entry->id(), buffer, buffer_pos);
3392  buffer[buffer_pos++] = ',';
3393  buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
3394  buffer[buffer_pos++] = ',';
3395  buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
3396  buffer[buffer_pos++] = '\n';
3397  buffer[buffer_pos++] = '\0';
3398  writer_->AddString(buffer.start());
3399 }
3400 
3401 
3402 void HeapSnapshotJSONSerializer::SerializeNodes() {
3403  List<HeapEntry>& entries = snapshot_->entries();
3404  for (int i = 0; i < entries.length(); ++i) {
3405  SerializeNode(&entries[i]);
3406  if (writer_->aborted()) return;
3407  }
3408 }
3409 
3410 
3411 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
3412  writer_->AddString("\"title\":\"");
3413  writer_->AddString(snapshot_->title());
3414  writer_->AddString("\"");
3415  writer_->AddString(",\"uid\":");
3416  writer_->AddNumber(snapshot_->uid());
3417  writer_->AddString(",\"meta\":");
3418  // The object describing node serialization layout.
3419  // We use a set of macros to improve readability.
3420 #define JSON_A(s) "[" s "]"
3421 #define JSON_O(s) "{" s "}"
3422 #define JSON_S(s) "\"" s "\""
3423  writer_->AddString(JSON_O(
3424  JSON_S("node_fields") ":" JSON_A(
3425  JSON_S("type") ","
3426  JSON_S("name") ","
3427  JSON_S("id") ","
3428  JSON_S("self_size") ","
3429  JSON_S("edge_count")) ","
3430  JSON_S("node_types") ":" JSON_A(
3431  JSON_A(
3432  JSON_S("hidden") ","
3433  JSON_S("array") ","
3434  JSON_S("string") ","
3435  JSON_S("object") ","
3436  JSON_S("code") ","
3437  JSON_S("closure") ","
3438  JSON_S("regexp") ","
3439  JSON_S("number") ","
3440  JSON_S("native") ","
3441  JSON_S("synthetic")) ","
3442  JSON_S("string") ","
3443  JSON_S("number") ","
3444  JSON_S("number") ","
3445  JSON_S("number") ","
3446  JSON_S("number") ","
3447  JSON_S("number")) ","
3448  JSON_S("edge_fields") ":" JSON_A(
3449  JSON_S("type") ","
3450  JSON_S("name_or_index") ","
3451  JSON_S("to_node")) ","
3452  JSON_S("edge_types") ":" JSON_A(
3453  JSON_A(
3454  JSON_S("context") ","
3455  JSON_S("element") ","
3456  JSON_S("property") ","
3457  JSON_S("internal") ","
3458  JSON_S("hidden") ","
3459  JSON_S("shortcut") ","
3460  JSON_S("weak")) ","
3461  JSON_S("string_or_number") ","
3462  JSON_S("node"))));
3463 #undef JSON_S
3464 #undef JSON_O
3465 #undef JSON_A
3466  writer_->AddString(",\"node_count\":");
3467  writer_->AddNumber(snapshot_->entries().length());
3468  writer_->AddString(",\"edge_count\":");
3469  writer_->AddNumber(snapshot_->edges().length());
3470 }
3471 
3472 
3473 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
3474  static const char hex_chars[] = "0123456789ABCDEF";
3475  w->AddString("\\u");
3476  w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
3477  w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
3478  w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
3479  w->AddCharacter(hex_chars[u & 0xf]);
3480 }
3481 
3482 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3483  writer_->AddCharacter('\n');
3484  writer_->AddCharacter('\"');
3485  for ( ; *s != '\0'; ++s) {
3486  switch (*s) {
3487  case '\b':
3488  writer_->AddString("\\b");
3489  continue;
3490  case '\f':
3491  writer_->AddString("\\f");
3492  continue;
3493  case '\n':
3494  writer_->AddString("\\n");
3495  continue;
3496  case '\r':
3497  writer_->AddString("\\r");
3498  continue;
3499  case '\t':
3500  writer_->AddString("\\t");
3501  continue;
3502  case '\"':
3503  case '\\':
3504  writer_->AddCharacter('\\');
3505  writer_->AddCharacter(*s);
3506  continue;
3507  default:
3508  if (*s > 31 && *s < 128) {
3509  writer_->AddCharacter(*s);
3510  } else if (*s <= 31) {
3511  // Special character with no dedicated literal.
3512  WriteUChar(writer_, *s);
3513  } else {
3514  // Convert UTF-8 into \u UTF-16 literal.
3515  unsigned length = 1, cursor = 0;
3516  for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3517  unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3518  if (c != unibrow::Utf8::kBadChar) {
3519  WriteUChar(writer_, c);
3520  ASSERT(cursor != 0);
3521  s += cursor - 1;
3522  } else {
3523  writer_->AddCharacter('?');
3524  }
3525  }
3526  }
3527  }
3528  writer_->AddCharacter('\"');
3529 }
3530 
3531 
3532 void HeapSnapshotJSONSerializer::SerializeStrings() {
3533  List<HashMap::Entry*> sorted_strings;
3534  SortHashMap(&strings_, &sorted_strings);
3535  writer_->AddString("\"<dummy>\"");
3536  for (int i = 0; i < sorted_strings.length(); ++i) {
3537  writer_->AddCharacter(',');
3538  SerializeString(
3539  reinterpret_cast<const unsigned char*>(sorted_strings[i]->key));
3540  if (writer_->aborted()) return;
3541  }
3542 }
3543 
3544 
3545 template<typename T>
3546 inline static int SortUsingEntryValue(const T* x, const T* y) {
3547  uintptr_t x_uint = reinterpret_cast<uintptr_t>((*x)->value);
3548  uintptr_t y_uint = reinterpret_cast<uintptr_t>((*y)->value);
3549  if (x_uint > y_uint) {
3550  return 1;
3551  } else if (x_uint == y_uint) {
3552  return 0;
3553  } else {
3554  return -1;
3555  }
3556 }
3557 
3558 
3559 void HeapSnapshotJSONSerializer::SortHashMap(
3560  HashMap* map, List<HashMap::Entry*>* sorted_entries) {
3561  for (HashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p))
3562  sorted_entries->Add(p);
3563  sorted_entries->Sort(SortUsingEntryValue);
3564 }
3565 
3566 } } // namespace v8::internal
void VisitPointers(Object **start, Object **end)
byte * Address
Definition: globals.h:172
void AddSubstring(const char *s, int n)
#define ROOT_NAME(type, name, camel_name)
void SetIndexedReference(HeapGraphEdge::Type type, int parent, int index, HeapEntry *child_entry)
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
virtual HeapEntry * AllocateEntry(HeapThing ptr)=0
static const int kDefaultCacheOffset
Definition: objects.h:6588
void Destroy(Object **location)
OutputStreamWriter(v8::OutputStream *stream)
SnapshotFiller(HeapSnapshot *snapshot, HeapEntriesMap *entries)
bool Find(const Key &key, Locator *locator)
static const int kCodeOffset
Definition: objects.h:5606
static Object *& Object_at(Address addr)
Definition: v8memory.h:75
static const SnapshotObjectId kGcRootsFirstSubrootId
void CallGlobalGCEpilogueCallback()
Definition: heap.h:1554
SnapshotObjectId FindOrAddEntry(Address addr, unsigned int size)
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:5982
void RemoveSnapshot(HeapSnapshot *snapshot)
static const int kInheritsSecurityToken
static void MarkVisitedField(HeapObject *obj, int offset)
#define STRUCT_MAP_NAME(NAME, Name, name)
bool Insert(const Key &key, Locator *locator)
static const int kBuiltinsOffset
Definition: objects.h:6083
void BeforeTraversingChild(ProfileNode *parent, ProfileNode *child)
void Dispose()
Definition: v8.h:4065
Handle< HeapObject > FindHeapObjectById(SnapshotObjectId id)
#define SYMBOL_NAME(name, str)
virtual HeapEntry * AllocateEntry(HeapThing ptr)
void SetTickRatePerMs(double ticks_per_ms)
virtual intptr_t GetHash()=0
static int VSNPrintF(Vector< char > str, const char *format, va_list args)
uint32_t GetCallUid() const
bool was_swept_conservatively()
Definition: spaces.h:1574
static String * cast(Object *obj)
uint32_t HashSeed()
Definition: heap.h:1574
Isolate * isolate()
Definition: heap-inl.h:494
FindEntryById(SnapshotObjectId id)
GlobalHandlesExtractor(NativeObjectsExplorer *explorer)
void AfterChildTraversed(ProfileNode *, ProfileNode *child)
void BeforeTraversingChild(ProfileNode *, ProfileNode *)
ProfileGenerator(CpuProfilesCollection *profiles)
void VisitPointers(Object **start, Object **end)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
static SnapshotObjectId GetNthGcSubrootId(int delta)
static HeapObject * cast(Object *obj)
CpuProfile * GetProfile(int security_token_id, unsigned uid)
static const int kGlobalReceiverOffset
Definition: objects.h:6085
static const int kDeoptimizationDataOffset
Definition: objects.h:4497
static AccessorPair * cast(Object *obj)
NodesPair(ProfileNode *src, ProfileNode *dst)
IndexedReferencesExtractor(V8HeapExplorer *generator, HeapObject *parent_obj, int parent)
static Map * cast(Object *obj)
BasicHeapEntriesAllocator(HeapSnapshot *snapshot, HeapEntry::Type entries_type)
bool IterateAndExtractReferences(SnapshotFillerInterface *filler)
CodeEntry * NewCodeEntry(Logger::LogEventsAndTags tag, String *name, String *resource_name, int line_number)
void ForEach(Callback *callback)
void VisitPointers(Object **start, Object **end)
T & at(int i) const
Definition: list.h:85
Vector< T > SubVector(int from, int to)
Definition: utils.h:375
static const int kInstanceDescriptorsOrBitField3Offset
Definition: objects.h:4964
void FilteredClone(ProfileTree *src, int security_token_id)
TickSample * sample
virtual bool IsEquivalent(RetainedObjectInfo *other)
void SetNamedAutoIndexReference(HeapGraphEdge::Type type, int parent, HeapEntry *child_entry)
FlagType type_
Definition: flags.cc:1351
virtual void SetNamedAutoIndexReference(HeapGraphEdge::Type type, int parent_entry, HeapEntry *child_entry)=0
static const int kHandlerTableOffset
Definition: objects.h:4496
#define ASSERT(condition)
Definition: checks.h:270
void ClearWeakness(Object **location)
v8::Handle< v8::Value > Print(const v8::Arguments &args)
const char * GetFormatted(const char *format,...)
SnapshotObjectId PushHeapObjectsStats(OutputStream *stream)
static Script * cast(Object *obj)
unsigned short uint16_t
Definition: unicode.cc:46
static const int kDebugInfoOffset
Definition: objects.h:5614
static JSRegExp * cast(Object *obj)
static const int kDataOffset
Definition: objects.h:5144
#define STRONG_ROOT_LIST(V)
Definition: heap.h:49
static Context * cast(Object *context)
Definition: contexts.h:207
#define MAKE_STRUCT_CASE(NAME, Name, name)
static const int kInitialMapOffset
Definition: objects.h:5616
static SharedFunctionInfo * cast(Object *obj)
virtual HeapEntry * AddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)=0
void SetTag(Object *obj, const char *tag)
#define CHECK(condition)
Definition: checks.h:56
static uchar CalculateValue(const byte *str, unsigned length, unsigned *cursor)
Definition: unicode.cc:210
Address stack[kMaxFramesCount]
Definition: platform.h:701
static const int kInstanceClassNameOffset
Definition: objects.h:5609
static const int kPrototypeTransitionsOrBackPointerOffset
Definition: objects.h:4968
void SetActualSamplingRate(double actual_sampling_rate)
bool IterateAndExtractReferences(SnapshotFillerInterface *filler)
Factory * factory()
Definition: isolate.h:977
static const int kGlobalContextOffset
Definition: objects.h:6084
virtual ControlOption ReportProgressValue(int done, int total)=0
void CallGlobalGCPrologueCallback()
Definition: heap.h:1550
static const int kContextOffset
Definition: objects.h:5986
static Code * cast(Object *obj)
HeapEntry * FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)
HeapEntry * AddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)
void AfterChildTraversed(ProfileNode *, ProfileNode *)
void SetNamedReference(HeapGraphEdge::Type type, int parent, const char *reference_name, HeapEntry *child_entry)
static Object ** RawField(HeapObject *obj, int offset)
Definition: objects-inl.h:963
static Smi * cast(Object *object)
virtual void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, int parent_entry, HeapEntry *child_entry)=0
int operator()(HeapEntry *const *entry)
static const SnapshotObjectId kGcRootsObjectId
static SnapshotObjectId GenerateId(v8::RetainedObjectInfo *info)
void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, int parent, HeapEntry *child_entry)
SmartArrayPointer< char > ToCString(AllowNullsFlag allow_nulls, RobustnessFlag robustness_flag, int offset, int length, int *length_output=0)
Definition: objects.cc:6161
void BeforeTraversingChild(ProfileNode *, ProfileNode *)
T ** location() const
Definition: handles.h:75
const char * GetName(String *name)
static const int kLiteralsOffset
Definition: objects.h:5987
#define EXTRACT_CONTEXT_FIELD(index, type, name)
static const int kSourceOffset
Definition: objects.h:5140
#define UNREACHABLE()
Definition: checks.h:50
SnapshotObjectId last_assigned_id() const
virtual void SetNamedReference(HeapGraphEdge::Type type, int parent_entry, const char *reference_name, HeapEntry *child_entry)=0
T * start() const
Definition: utils.h:389
void AfterChildTraversed(ProfileNode *parent, ProfileNode *child)
T & last() const
Definition: list.h:86
CodeEntry * FindEntry(Address addr)
Position(ProfileNode *node)
static JSGlobalProxy * cast(Object *obj)
NativeObjectsExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress)
List< HeapEntry > & entries()
static const int kGCMetadataOffset
Definition: objects.h:4501
const intptr_t kFailureTagMask
Definition: v8globals.h:73
void RecordTickSample(const TickSample &sample)
const char * GetTag(Object *obj)
static SlicedString * cast(Object *obj)
int GetSharedId(Address addr)
static const int kScopeInfoOffset
Definition: objects.h:5607
virtual int GetChunkSize()
Definition: v8.h:3800
static String * GetConstructorName(JSObject *object)
Handle< Object > Create(Object *value)
HeapEntry * gc_subroot(int index)
virtual const char * GetLabel()=0
ProfileNode * FindChild(CodeEntry *entry)
HeapSnapshotsCollection * collection()
JSObject * global_proxy()
Definition: contexts.cc:78
const int kPointerSize
Definition: globals.h:234
uint32_t occupancy() const
Definition: hashmap.h:82
static HeapObject *const kInternalRootObject
const int kHeapObjectTag
Definition: v8.h:3848
T Remove(int i)
Definition: list-inl.h:116
GlobalHandles * global_handles()
Definition: isolate.h:865
virtual WriteResult WriteHeapStatsChunk(HeapStatsUpdate *data, int count)
Definition: v8.h:3814
Entry * Lookup(void *key, uint32_t hash, bool insert, AllocationPolicy allocator=AllocationPolicy())
Definition: hashmap.h:130
void MoveCode(Address from, Address to)
static const char *const kProgramEntryName
static const int kNameOffset
Definition: objects.h:5605
intptr_t AtomicWord
Definition: atomicops.h:72
#define JSON_A(s)
double TicksToMillis(unsigned ticks) const
int length() const
Definition: utils.h:383
OldSpace * old_pointer_space()
Definition: heap.h:500
static const int kPropertiesOffset
Definition: objects.h:2113
static const SnapshotObjectId kFirstAvailableObjectId
List< HeapGraphEdge > & edges()
static double TimeCurrentMillis()
HeapSnapshotGenerator(HeapSnapshot *snapshot, v8::ActivityControl *control)
#define SYMBOL_LIST(V)
Definition: heap.h:159
void IterateAllRoots(ObjectVisitor *v)
SnapshotObjectId FindEntry(Address addr)
OldSpace * code_space()
Definition: heap.h:502
static const int kMakeHeapIterableMask
Definition: heap.h:1056
bool IsSameAs(CodeEntry *entry) const
void AddPathFromEnd(const Vector< CodeEntry * > &path)
#define V8_PTR_PREFIX
Definition: globals.h:196
virtual void SetIndexedReference(HeapGraphEdge::Type type, int parent_entry, int index, HeapEntry *child_entry)=0
static const int kLineEndsOffset
Definition: objects.h:5151
V8HeapExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress)
static const int kElementsOffset
Definition: objects.h:2114
static Vector< T > New(int length)
Definition: utils.h:369
void RemoveProfile(CpuProfile *profile)
HeapEntry * FindEntry(HeapThing ptr)
static const int kTypeFeedbackInfoOffset
Definition: objects.h:4499
virtual void VisitPointers(Object **start, Object **end)
void IterateAllRootsWithClassIds(ObjectVisitor *v)
static const int kRelocationInfoOffset
Definition: objects.h:4495
static const int kNonWeakFieldsEndOffset
Definition: objects.h:5988
CpuProfile * FilteredClone(int security_token_id)
Vector< const char > CStrVector(const char *data)
Definition: utils.h:525
CellSpace * cell_space()
Definition: heap.h:504
int StrLength(const char *string)
Definition: utils.h:234
static int OffsetOfElementAt(int index)
Definition: objects.h:2291
static JSArray * cast(Object *obj)
static void Print(const char *format,...)
#define T(name, string, precedence)
Definition: token.cc:48
static const char *const kGarbageCollectorEntryName
INLINE(bool has_current_child())
void AddPathToCurrentProfiles(const Vector< CodeEntry * > &path)
HeapEntry * GetEntryById(SnapshotObjectId id)
virtual HeapEntry * FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)=0
void AddRootEntries(SnapshotFillerInterface *filler)
List< ObjectGroup * > * object_groups()
static int SNPrintF(Vector< char > str, const char *format,...)
void CopyData(const CodeEntry &source)
void UpdateMeasurements(double current_time)
static const unsigned kWallTimeQueryIntervalMs
static const int kMapOffset
Definition: objects.h:1219
#define JSON_S(s)
void AddPath(const Vector< CodeEntry * > &path)
static const int kFunctionDataOffset
Definition: objects.h:5611
void AddCode(Address addr, CodeEntry *entry, unsigned size)
Handle< String > NewStringFromAscii(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
Definition: factory.cc:199
INLINE(void next_child())
static const int kNormalTypeCacheOffset
Definition: objects.h:6589
void Serialize(v8::OutputStream *stream)
void AddPathFromStart(const Vector< CodeEntry * > &path)
virtual WriteResult WriteAsciiChunk(char *data, int size)=0
void IterateRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:5630
HeapEntry * AddEntry(HeapEntry::Type type, const char *name, SnapshotObjectId id, int size)
void Sort(int(*cmp)(const T *x, const T *y))
Definition: list-inl.h:198
bool Remove(const Key &key)
virtual void Dispose()=0
static const char *const kAnonymousFunctionName
uint32_t ComputeIntegerHash(uint32_t key, uint32_t seed)
Definition: utils.h:285
GcSubrootsEnumerator(SnapshotFillerInterface *filler, V8HeapExplorer *explorer)
#define STRUCT_LIST(V)
Definition: objects.h:429
bool FindGreatestLessThan(const Key &key, Locator *locator)
INLINE(ProfileNode *current_child())
FilteredCloneCallback(ProfileNode *dst_root, int security_token_id)
static const SnapshotObjectId kInternalRootObjectId
virtual void VisitEmbedderReference(Object **p, uint16_t class_id)
static JSGlobalPropertyCell * cast(Object *obj)
List< HeapEntry * > * GetSortedEntriesList()
uint32_t SnapshotObjectId
Definition: v8-profiler.h:67
virtual HeapEntry * FindEntry(HeapThing ptr)=0
static const int kInferredNameOffset
Definition: objects.h:5615
static const int kThisPropertyAssignmentsOffset
Definition: objects.h:5618
void SnapshotGenerationFinished(HeapSnapshot *snapshot)
const char * GetCopy(const char *src)
virtual void Signal()=0
uint32_t HashSequentialString(const schar *chars, int length, uint32_t seed)
Definition: objects-inl.h:4733
void Pair(HeapThing thing, int entry)
T & first() const
Definition: list.h:87
HeapEntry * AddGcSubrootEntry(int tag)
void * Remove(void *key, uint32_t hash)
Definition: hashmap.h:160
TemplateHashMapImpl< FreeStoreAllocationPolicy > HashMap
Definition: hashmap.h:112
void AfterAllChildrenTraversed(ProfileNode *node)
SnapshotObjectId last_assigned_id() const
#define HEAP
Definition: isolate.h:1408
static const char *const kEmptyNamePrefix
MUST_USE_RESULT MaybeObject * GetProperty(String *key)
Definition: objects-inl.h:851
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
uint32_t capacity() const
Definition: hashmap.h:87
static const int kNameOffset
Definition: objects.h:5141
virtual intptr_t GetElementCount()
Definition: v8-profiler.h:550
InstanceType instance_type()
Definition: objects-inl.h:2864
static const uchar kBadChar
Definition: unicode.h:162
void USE(T)
Definition: globals.h:303
static const int kConstructorOffset
Definition: objects.h:4954
int SortedListBSearch(const List< T > &list, P cmp)
Definition: list-inl.h:223
static void StrNCpy(Vector< char > dest, const char *src, size_t n)
Handle< JSGlobalObject > & at(int i)
static FixedArray * cast(Object *obj)
HeapSnapshot * NewSnapshot(HeapSnapshot::Type type, const char *name, unsigned uid)
bool StartProfiling(const char *title, unsigned uid)
void FillReferences(V8HeapExplorer *explorer)
MapSpace * map_space()
Definition: heap.h:503
HeapSnapshot * GetSnapshot(unsigned uid)
static const int kBoundFunctionIndex
Definition: objects.h:5997
virtual void Wait()=0
const int kFailureTag
Definition: v8globals.h:71
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:38
static const int kScriptOffset
Definition: objects.h:5613
static const int kPrototypeOffset
Definition: objects.h:4953
void Synchronize(VisitorSynchronization::SyncTag tag)
static const int kSize
Definition: objects.h:5990
List< HeapGraphEdge * > & children()
SnapshotObjectId GetObjectId(Address object_addr, int object_size)
virtual void VisitPointers(Object **start, Object **end)
const char * name_
Definition: flags.cc:1352
void Synchronize(VisitorSynchronization::SyncTag tag)
static const int kContextOffset
Definition: objects.h:5145
void MakeWeak(Object **location, void *parameter, WeakReferenceCallback callback)
int EstimateObjectsCount(HeapIterator *iterator)
virtual void EndOfStream()=0
const char * GetVFormatted(const char *format, va_list args)
CpuProfile * StopProfiling(int security_token_id, const char *title, double actual_sampling_rate)
static GlobalObject * cast(Object *obj)
static const int kBoundThisIndex
Definition: objects.h:5998
static const int kConstructStubOffset
Definition: objects.h:5608
void DeleteArray(T *array)
Definition: allocation.h:91
T Min(T a, T b)
Definition: utils.h:229
static const int kSharedFunctionInfoOffset
Definition: objects.h:5984
static ConsString * cast(Object *obj)
void AfterAllChildrenTraversed(ProfileNode *parent)
static const int kNoLineNumberInfo
Definition: v8-profiler.h:113
static CodeCache * cast(Object *obj)
virtual intptr_t GetSizeInBytes()
Definition: v8-profiler.h:553
virtual HeapEntry * AllocateEntry(HeapThing ptr)
const char * GetName(String *name)
void MoveObject(Address from, Address to)
static const int kCodeCacheOffset
Definition: objects.h:4966
ProfileNode * FindOrAddChild(CodeEntry *entry)
CpuProfile(const char *title, unsigned uid)
static const int kBoundArgumentsStartIndex
Definition: objects.h:5999
HeapSnapshot(HeapSnapshotsCollection *collection, Type type, const char *title, unsigned uid)
virtual const char * GetGroupLabel()
Definition: v8-profiler.h:544
static JSObject * cast(Object *obj)
FlagType type() const
Definition: flags.cc:1358
OldSpace * old_data_space()
Definition: heap.h:501
unsigned int uchar
Definition: unicode.h:40
Entry * Next(Entry *p) const
Definition: hashmap.h:241
static const char *const kTagNames[kNumberOfSyncTags]
Definition: objects.h:8665
v8::RetainedObjectInfo * info_
static v8::internal::Handle< v8::internal::TemplateInfo > OpenHandle(const Template *that)
List< CpuProfile * > * Profiles(int security_token_id)
#define GLOBAL_CONTEXT_FIELDS(V)
Definition: contexts.h:99
#define JSON_O(s)
static JSGlobalObject * cast(Object *obj)
const int MB
Definition: globals.h:222
static JSFunction * cast(Object *obj)