v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
heap-profiler.cc
Go to the documentation of this file.
1 // Copyright 2009-2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "heap-profiler.h"
31 #include "profile-generator.h"
32 
33 namespace v8 {
34 namespace internal {
35 
36 HeapProfiler::HeapProfiler()
37  : snapshots_(new HeapSnapshotsCollection()),
38  next_snapshot_uid_(1) {
39 }
40 
41 
42 HeapProfiler::~HeapProfiler() {
43  delete snapshots_;
44 }
45 
46 
47 void HeapProfiler::ResetSnapshots() {
48  delete snapshots_;
49  snapshots_ = new HeapSnapshotsCollection();
50 }
51 
52 
54  Isolate* isolate = Isolate::Current();
55  if (isolate->heap_profiler() == NULL) {
56  isolate->set_heap_profiler(new HeapProfiler());
57  }
58 }
59 
60 
62  Isolate* isolate = Isolate::Current();
63  delete isolate->heap_profiler();
64  isolate->set_heap_profiler(NULL);
65 }
66 
67 
69  int type,
70  v8::ActivityControl* control) {
71  ASSERT(Isolate::Current()->heap_profiler() != NULL);
72  return Isolate::Current()->heap_profiler()->TakeSnapshotImpl(name,
73  type,
74  control);
75 }
76 
77 
79  int type,
80  v8::ActivityControl* control) {
81  ASSERT(Isolate::Current()->heap_profiler() != NULL);
82  return Isolate::Current()->heap_profiler()->TakeSnapshotImpl(name,
83  type,
84  control);
85 }
86 
87 
89  ASSERT(Isolate::Current()->heap_profiler() != NULL);
90  Isolate::Current()->heap_profiler()->StartHeapObjectsTrackingImpl();
91 }
92 
93 
95  ASSERT(Isolate::Current()->heap_profiler() != NULL);
96  Isolate::Current()->heap_profiler()->StopHeapObjectsTrackingImpl();
97 }
98 
99 
101  ASSERT(Isolate::Current()->heap_profiler() != NULL);
102  return Isolate::Current()->heap_profiler()->PushHeapObjectsStatsImpl(stream);
103 }
104 
105 
107  uint16_t class_id, v8::HeapProfiler::WrapperInfoCallback callback) {
109  if (wrapper_callbacks_.length() <= class_id) {
110  wrapper_callbacks_.AddBlock(
111  NULL, class_id - wrapper_callbacks_.length() + 1);
112  }
113  wrapper_callbacks_[class_id] = callback;
114 }
115 
116 
118  uint16_t class_id, Object** wrapper) {
119  if (wrapper_callbacks_.length() <= class_id) return NULL;
120  return wrapper_callbacks_[class_id](
121  class_id, Utils::ToLocal(Handle<Object>(wrapper)));
122 }
123 
124 
125 HeapSnapshot* HeapProfiler::TakeSnapshotImpl(const char* name,
126  int type,
127  v8::ActivityControl* control) {
128  HeapSnapshot::Type s_type = static_cast<HeapSnapshot::Type>(type);
129  HeapSnapshot* result =
130  snapshots_->NewSnapshot(s_type, name, next_snapshot_uid_++);
131  bool generation_completed = true;
132  switch (s_type) {
133  case HeapSnapshot::kFull: {
134  HeapSnapshotGenerator generator(result, control);
135  generation_completed = generator.GenerateSnapshot();
136  break;
137  }
138  default:
139  UNREACHABLE();
140  }
141  if (!generation_completed) {
142  delete result;
143  result = NULL;
144  }
145  snapshots_->SnapshotGenerationFinished(result);
146  return result;
147 }
148 
149 
150 HeapSnapshot* HeapProfiler::TakeSnapshotImpl(String* name,
151  int type,
152  v8::ActivityControl* control) {
153  return TakeSnapshotImpl(snapshots_->names()->GetName(name), type, control);
154 }
155 
156 void HeapProfiler::StartHeapObjectsTrackingImpl() {
157  snapshots_->StartHeapObjectsTracking();
158 }
159 
160 
161 SnapshotObjectId HeapProfiler::PushHeapObjectsStatsImpl(OutputStream* stream) {
162  return snapshots_->PushHeapObjectsStats(stream);
163 }
164 
165 
166 void HeapProfiler::StopHeapObjectsTrackingImpl() {
167  snapshots_->StopHeapObjectsTracking();
168 }
169 
170 
172  HeapProfiler* profiler = Isolate::Current()->heap_profiler();
173  ASSERT(profiler != NULL);
174  size_t size = profiler->snapshots_->GetUsedMemorySize();
175  return size;
176 }
177 
178 
180  HeapProfiler* profiler = Isolate::Current()->heap_profiler();
181  ASSERT(profiler != NULL);
182  return profiler->snapshots_->snapshots()->length();
183 }
184 
185 
187  HeapProfiler* profiler = Isolate::Current()->heap_profiler();
188  ASSERT(profiler != NULL);
189  return profiler->snapshots_->snapshots()->at(index);
190 }
191 
192 
194  HeapProfiler* profiler = Isolate::Current()->heap_profiler();
195  ASSERT(profiler != NULL);
196  return profiler->snapshots_->GetSnapshot(uid);
197 }
198 
199 
201  if (!obj->IsHeapObject())
203  HeapProfiler* profiler = Isolate::Current()->heap_profiler();
204  ASSERT(profiler != NULL);
205  return profiler->snapshots_->FindObjectId(HeapObject::cast(*obj)->address());
206 }
207 
208 
210  HeapProfiler* profiler = Isolate::Current()->heap_profiler();
211  ASSERT(profiler != NULL);
212  profiler->ResetSnapshots();
213 }
214 
215 
217  snapshots_->ObjectMoveEvent(from, to);
218 }
219 
220 
221 } } // namespace v8::internal
byte * Address
Definition: globals.h:157
static HeapSnapshot * GetSnapshot(int index)
static SnapshotObjectId PushHeapObjectsStats(OutputStream *stream)
static HeapObject * cast(Object *obj)
List< HeapSnapshot * > * snapshots()
#define ASSERT(condition)
Definition: checks.h:270
unsigned short uint16_t
Definition: unicode.cc:46
const char * GetName(String *name)
#define UNREACHABLE()
Definition: checks.h:50
static size_t GetMemorySizeUsedByProfiler()
static const uint16_t kPersistentHandleNoClassId
Definition: v8-profiler.h:462
SnapshotObjectId FindObjectId(Address object_addr)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
static void DeleteAllSnapshots()
static SnapshotObjectId GetSnapshotObjectId(Handle< Object > obj)
static Local< Context > ToLocal(v8::internal::Handle< v8::internal::Context > obj)
SnapshotObjectId PushHeapObjectsStats(OutputStream *stream)
static void StopHeapObjectsTracking()
uint32_t SnapshotObjectId
Definition: v8-profiler.h:68
void ObjectMoveEvent(Address from, Address to)
void SnapshotGenerationFinished(HeapSnapshot *snapshot)
HeapSnapshot * NewSnapshot(HeapSnapshot::Type type, const char *name, unsigned uid)
HeapSnapshot * GetSnapshot(unsigned uid)
static HeapSnapshot * FindSnapshot(unsigned uid)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
RetainedObjectInfo *(* WrapperInfoCallback)(uint16_t class_id, Handle< Value > wrapper)
Definition: v8-profiler.h:385
static void StartHeapObjectsTracking()
v8::RetainedObjectInfo * ExecuteWrapperClassCallback(uint16_t class_id, Object **wrapper)
static HeapSnapshot * TakeSnapshot(const char *name, int type, v8::ActivityControl *control)
void DefineWrapperClass(uint16_t class_id, v8::HeapProfiler::WrapperInfoCallback callback)
void ObjectMoveEvent(Address from, Address to)
static const SnapshotObjectId kUnknownObjectId
Definition: v8-profiler.h:407