28 #ifndef V8_PROFILE_GENERATOR_H_
29 #define V8_PROFILE_GENERATOR_H_
33 #include "../include/v8-profiler.h"
50 void TokenRemoved(
Object** token_location);
68 const char*
GetCopy(
const char* src);
78 static const int kMaxNameSize = 1024;
80 INLINE(
static bool StringsMatch(
void* key1,
void* key2)) {
81 return strcmp(reinterpret_cast<char*>(key1),
82 reinterpret_cast<char*>(key2)) == 0;
84 const char* AddOrDisposeString(
char* str, uint32_t hash);
97 const char* name_prefix,
99 const char* resource_name,
101 int security_token_id));
103 INLINE(
bool is_js_function()
const) {
return is_js_function_tag(tag_); }
104 INLINE(
const char* name_prefix()
const) {
return name_prefix_; }
105 INLINE(
bool has_name_prefix()
const) {
return name_prefix_[0] !=
'\0'; }
106 INLINE(
const char* name()
const) {
return name_; }
107 INLINE(
const char* resource_name()
const) {
return resource_name_; }
108 INLINE(
int line_number()
const) {
return line_number_; }
109 INLINE(
int shared_id()
const) {
return shared_id_; }
110 INLINE(
void set_shared_id(
int shared_id)) { shared_id_ = shared_id; }
111 INLINE(
int security_token_id()
const) {
return security_token_id_; }
123 const char* name_prefix_;
125 const char* resource_name_;
128 int security_token_id_;
142 INLINE(
void IncrementSelfTicks()) { ++self_ticks_; }
143 INLINE(
void IncreaseSelfTicks(
unsigned amount)) { self_ticks_ += amount; }
144 INLINE(
void IncreaseTotalTicks(
unsigned amount)) { total_ticks_ += amount; }
147 INLINE(
unsigned self_ticks()
const) {
return self_ticks_; }
148 INLINE(
unsigned total_ticks()
const) {
return total_ticks_; }
153 void Print(
int indent);
156 INLINE(
static bool CodeEntriesMatch(
void* entry1,
void* entry2)) {
157 return reinterpret_cast<CodeEntry*
>(entry1)->IsSameAs(
158 reinterpret_cast<CodeEntry*>(entry2));
161 INLINE(
static uint32_t CodeEntryHash(CodeEntry* entry)) {
162 return entry->GetCallUid();
167 unsigned total_ticks_;
168 unsigned self_ticks_;
171 List<ProfileNode*> children_list_;
173 DISALLOW_COPY_AND_ASSIGN(ProfileNode);
188 return ticks * ms_to_ticks_scale_;
199 template <
typename Callback>
200 void TraverseDepthFirst(Callback* callback);
204 double ms_to_ticks_scale_;
213 : title_(title), uid_(uid) { }
221 INLINE(
const char* title()
const) {
return title_; }
222 INLINE(
unsigned uid()
const) {
return uid_; }
252 struct CodeEntryInfo {
253 CodeEntryInfo(
CodeEntry* an_entry,
unsigned a_size)
254 : entry(an_entry), size(a_size) { }
259 struct CodeTreeConfig {
261 typedef CodeEntryInfo Value;
262 static const Key kNoKey;
263 static const Value NoValue() {
return CodeEntryInfo(
NULL, 0); }
264 static int Compare(
const Key& a,
const Key& b) {
265 return a < b ? -1 : (a > b ? 1 : 0);
268 typedef SplayTree<CodeTreeConfig> CodeTree;
270 class CodeTreePrinter {
272 void Call(
const Address& key,
const CodeEntryInfo& value);
278 static CodeEntry*
const kSharedFunctionCodeEntry;
283 DISALLOW_COPY_AND_ASSIGN(
CodeMap);
296 double actual_sampling_rate);
299 return function_and_resource_names_.
GetName(name);
302 return function_and_resource_names_.
GetName(args_count);
313 const char* name_prefix,
String* name);
324 const char* GetFunctionName(
String* name) {
327 const char* GetFunctionName(
const char* name) {
330 int GetProfileIndex(
unsigned uid);
331 List<CpuProfile*>* GetProfilesList(
int security_token_id);
332 int TokenToIndex(
int security_token_id);
334 INLINE(
static bool UidsMatch(
void* key1,
void* key2)) {
338 StringsStorage function_and_resource_names_;
339 List<CodeEntry*> code_entries_;
340 List<List<CpuProfile*>* > profiles_by_token_;
344 List<CpuProfile*> detached_profiles_;
347 List<CpuProfile*> current_profiles_;
348 Semaphore* current_profiles_semaphore_;
357 : result_(
Logger::kSamplingIntervalMs * kResultScale),
358 ticks_per_ms_(
Logger::kSamplingIntervalMs),
359 measurements_count_(0),
360 wall_time_query_countdown_(1) {
364 return result_ /
static_cast<double>(kResultScale);
379 static const int kResultScale = 100000;
383 double ticks_per_ms_;
384 unsigned measurements_count_;
385 unsigned wall_time_query_countdown_;
386 double last_wall_time_;
400 return profiles_->
NewCodeEntry(tag, name, resource_name, line_number);
409 const char* name_prefix,
465 HeapGraphEdge(Type type,
const char* name,
int from,
int to);
471 ASSERT(type_ == kElement || type_ == kHidden || type_ == kWeak);
475 ASSERT(type_ == kContextVariable
476 || type_ == kProperty
477 || type_ == kInternal
478 || type_ == kShortcut);
481 INLINE(HeapEntry* from()
const);
482 HeapEntry*
to()
const {
return to_entry_; }
488 int from_index_ : 29;
529 const char*
name() {
return name_; }
533 INLINE(
int index()
const);
535 INLINE(
int set_children_index(
int index));
537 children_arr()[children_count_++] = edge;
542 void SetIndexedReference(
544 void SetNamedReference(
548 const char* prefix,
const char* edge_name,
int max_depth,
int indent);
554 const char* TypeAsString();
557 int children_count_: 28;
566 class HeapSnapshotsCollection;
587 const char*
title() {
return title_; }
588 unsigned uid() {
return uid_; }
590 HeapEntry*
root() {
return &entries_[root_index_]; }
591 HeapEntry*
gc_roots() {
return &entries_[gc_roots_index_]; }
594 return &entries_[gc_subroot_indexes_[index]];
601 return max_snapshot_js_object_id_;
616 void Print(
int max_depth);
626 int natives_root_index_;
669 : id(
id), addr(addr), size(size), accessed(
true) { }
671 : id(
id), addr(addr), size(size), accessed(accessed) { }
677 struct TimeInterval {
684 void UpdateHeapObjectsMap();
685 void RemoveDeadEntries();
687 static bool AddressesMatch(
void* key1,
void* key2) {
691 static uint32_t AddressHash(
Address addr) {
693 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(addr)),
694 v8::internal::kZeroHashSeed);
699 List<EntryInfo> entries_;
700 List<TimeInterval> time_intervals_;
742 INLINE(
static bool HeapSnapshotsMatch(
void* key1,
void* key2)) {
746 bool is_tracking_objects_;
747 List<HeapSnapshot*> snapshots_;
750 StringsStorage names_;
751 TokenEnumerator* token_enumerator_;
784 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(thing)),
785 v8::internal::kZeroHashSeed);
828 HeapEntry* child_entry) = 0;
831 HeapEntry* child_entry) = 0;
834 const char* reference_name,
835 HeapEntry* child_entry) = 0;
838 HeapEntry* child_entry) = 0;
869 HeapEntry::Type type,
871 const char* GetSystemEntryName(
HeapObject*
object);
875 void ExtractJSObjectReferences(
int entry,
JSObject* js_obj);
876 void ExtractStringReferences(
int entry,
String* obj);
877 void ExtractContextReferences(
int entry,
Context* context);
878 void ExtractMapReferences(
int entry,
Map* map);
879 void ExtractSharedFunctionInfoReferences(
int entry,
881 void ExtractScriptReferences(
int entry,
Script* script);
882 void ExtractCodeCacheReferences(
int entry,
CodeCache* code_cache);
883 void ExtractCodeReferences(
int entry,
Code*
code);
884 void ExtractJSGlobalPropertyCellReferences(
int entry,
886 void ExtractClosureReferences(
JSObject* js_obj,
int entry);
887 void ExtractPropertyReferences(
JSObject* js_obj,
int entry);
888 void ExtractElementReferences(
JSObject* js_obj,
int entry);
889 void ExtractInternalReferences(
JSObject* js_obj,
int entry);
890 bool IsEssentialObject(
Object*
object);
891 void SetClosureReference(
HeapObject* parent_obj,
895 void SetNativeBindReference(
HeapObject* parent_obj,
897 const char* reference_name,
899 void SetElementReference(
HeapObject* parent_obj,
903 void SetInternalReference(
HeapObject* parent_obj,
905 const char* reference_name,
907 int field_offset = -1);
908 void SetInternalReference(
HeapObject* parent_obj,
912 int field_offset = -1);
913 void SetHiddenReference(
HeapObject* parent_obj,
922 void SetPropertyReference(
HeapObject* parent_obj,
926 const char* name_format_string =
NULL,
927 int field_offset = -1);
928 void SetUserGlobalReference(
Object* user_global);
929 void SetRootGcRootsReference();
931 void SetGcSubrootReference(
933 const char* GetStrongGcSubrootName(
Object*
object);
934 void TagObject(
Object* obj,
const char* tag);
936 HeapEntry* GetEntry(
Object* obj);
938 static inline HeapObject* GetNthGcSubrootObject(
int delta);
939 static inline int GetGcSubrootOrder(
HeapObject* subroot);
950 static HeapObject*
const kFirstGcSubrootObject;
951 static HeapObject*
const kLastGcSubrootObject;
975 void FillRetainedObjects();
976 void FillImplicitReferences();
979 void SetRootNativeRootsReference();
980 void SetWrapperNativeReferences(
HeapObject* wrapper,
986 v8::internal::kZeroHashSeed);
988 static bool RetainedInfosMatch(
void* key1,
void* key2) {
989 return key1 == key2 ||
991 reinterpret_cast<v8::RetainedObjectInfo*>(key2));
993 INLINE(
static bool StringsMatch(
void* key1,
void* key2)) {
994 return strcmp(reinterpret_cast<char*>(key1),
995 reinterpret_cast<char*>(key2)) == 0;
1003 bool embedder_queried_;
1013 static HeapThing const kNativesRootObject;
1028 bool FillReferences();
1029 void ProgressStep();
1030 bool ProgressReport(
bool force =
false);
1031 void SetProgressTotal(
int iterations_count);
1040 int progress_counter_;
1041 int progress_total_;
1051 : snapshot_(snapshot),
1052 strings_(ObjectsMatch),
1060 INLINE(
static bool ObjectsMatch(
void* key1,
void* key2)) {
1061 return key1 == key2;
1064 INLINE(
static uint32_t ObjectHash(
const void* key)) {
1066 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(key)),
1067 v8::internal::kZeroHashSeed);
1070 HeapSnapshot* CreateFakeSnapshot();
1071 int GetStringId(
const char* s);
1072 int entry_index(HeapEntry* e) {
return e->index() * kNodeFieldsCount; }
1073 void SerializeEdge(HeapGraphEdge* edge,
bool first_edge);
1074 void SerializeEdges();
1075 void SerializeImpl();
1076 void SerializeNode(HeapEntry* entry);
1077 void SerializeNodes();
1078 void SerializeSnapshot();
1079 void SerializeString(
const unsigned char* s);
1080 void SerializeStrings();
1081 void SortHashMap(
HashMap* map, List<HashMap::Entry*>* sorted_entries);
1083 static const int kEdgeFieldsCount;
1084 static const int kNodeFieldsCount;
1086 HeapSnapshot* snapshot_;
1089 int next_string_id_;
1090 OutputStreamWriter* writer_;
1100 #endif // V8_PROFILE_GENERATOR_H_
virtual bool ProgressReport(bool force)=0
virtual HeapEntry * AllocateEntry(HeapThing ptr)=0
size_t GetUsedMemorySize() const
virtual ~V8HeapExplorer()
bool IsLastProfile(const char *title)
size_t GetUsedMemorySize() const
static const SnapshotObjectId kGcRootsFirstSubrootId
SnapshotObjectId FindOrAddEntry(Address addr, unsigned int size)
void RemoveSnapshot(HeapSnapshot *snapshot)
static const int kInheritsSecurityToken
Handle< HeapObject > FindHeapObjectById(SnapshotObjectId id)
virtual HeapEntry * AllocateEntry(HeapThing ptr)
void SetTickRatePerMs(double ticks_per_ms)
void StopHeapObjectsTracking()
virtual intptr_t GetHash()=0
uint32_t GetCallUid() const
static const int kMaxSimultaneousProfiles
INLINE(CodeEntry *NewCodeEntry(Logger::LogEventsAndTags tag, const char *name))
ProfileNode * root() const
INLINE(CodeEntry *NewCodeEntry(Logger::LogEventsAndTags tag, const char *name_prefix, String *name))
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the snapshot(mksnapshot only)") DEFINE_bool(help
HeapEntry * natives_root()
static const int kNoEntry
ProfileGenerator(CpuProfilesCollection *profiles)
INLINE(double actual_sampling_rate())
static SnapshotObjectId GetNthGcSubrootId(int delta)
CpuProfile * GetProfile(int security_token_id, unsigned uid)
void Print(int max_depth)
static const SnapshotObjectId kNativesRootObjectId
INLINE(const char *title() const)
INLINE(void IncreaseTotalTicks(unsigned amount))
bool IterateAndExtractReferences(SnapshotFillerInterface *filler)
CodeEntry * NewCodeEntry(Logger::LogEventsAndTags tag, String *name, String *resource_name, int line_number)
INLINE(unsigned total_ticks() const)
INLINE(bool is_js_function() const)
void FilteredClone(ProfileTree *src, int security_token_id)
int children_count() const
double GetSelfMillis() const
List< HeapSnapshot * > * snapshots()
void SnapshotGenerationFinished()
TokenEnumerator * token_enumerator()
const char * name() const
virtual void SetNamedAutoIndexReference(HeapGraphEdge::Type type, int parent_entry, HeapEntry *child_entry)=0
size_t RawSnapshotSize() const
#define ASSERT(condition)
v8::Handle< v8::Value > Print(const v8::Arguments &args)
const char * GetFormatted(const char *format,...)
SnapshotObjectId PushHeapObjectsStats(OutputStream *stream)
static const int kNoSecurityToken
bool HasDetachedProfiles()
virtual HeapEntry * AddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)=0
INLINE(CodeEntry(Logger::LogEventsAndTags tag, const char *name_prefix, const char *name, const char *resource_name, int line_number, int security_token_id))
void set_name(const char *name)
void SetTag(Object *obj, const char *tag)
INLINE(const char *name() const)
void SetActualSamplingRate(double actual_sampling_rate)
bool IterateAndExtractReferences(SnapshotFillerInterface *filler)
const char * GetName(int args_count)
INLINE(void set_shared_id(int shared_id))
HeapEntry * AddNativesRootEntry()
virtual ~SnapshottingProgressReportingInterface()
int EstimateObjectsCount()
virtual void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, int parent_entry, HeapEntry *child_entry)=0
INLINE(CodeMap *code_map())
INLINE(unsigned self_ticks() const)
static const SnapshotObjectId kGcRootsObjectId
static SnapshotObjectId GenerateId(v8::RetainedObjectInfo *info)
INLINE(CodeEntry *entry() const)
HeapEntry * AddGcRootsEntry()
void CalculateTotalTicks()
INLINE(CodeEntry *NewCodeEntry(Logger::LogEventsAndTags tag, String *name, String *resource_name, int line_number))
const char * GetName(String *name)
SnapshotObjectId last_assigned_id() const
INLINE(CodeEntry *NewCodeEntry(int security_token_id))
virtual void SetNamedReference(HeapGraphEdge::Type type, int parent_entry, const char *reference_name, HeapEntry *child_entry)=0
CodeEntry * FindEntry(Address addr)
NativeObjectsExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress)
List< HeapEntry > & entries()
void RecordTickSample(const TickSample &sample)
const char * GetTag(Object *obj)
int GetSharedId(Address addr)
friend class HeapSnapshotTester
static const int kObjectIdStep
SnapshotObjectId FindObjectId(Address object_addr)
static String * GetConstructorName(JSObject *object)
HeapEntry * gc_subroot(int index)
ProfileNode * FindChild(CodeEntry *entry)
HeapSnapshotsCollection * collection()
bool Contains(Object *object)
uint32_t occupancy() const
int Compare(const T &a, const T &b)
static HeapObject *const kInternalRootObject
virtual void ProgressStep()=0
INLINE(CodeEntry *NewCodeEntry(Logger::LogEventsAndTags tag, int args_count))
INLINE(int shared_id() const)
INLINE(const ProfileTree *bottom_up() const)
HeapEntry * AddRootEntry()
void MoveCode(Address from, Address to)
static const char *const kProgramEntryName
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
double TicksToMillis(unsigned ticks) const
static const SnapshotObjectId kFirstAvailableObjectId
List< HeapGraphEdge > & edges()
HeapSnapshotGenerator(HeapSnapshot *snapshot, v8::ActivityControl *control)
INLINE(unsigned uid() const)
double GetTotalMillis() const
SnapshotObjectId FindEntry(Address addr)
bool IsSameAs(CodeEntry *entry) const
void AddPathFromEnd(const Vector< CodeEntry * > &path)
virtual void SetIndexedReference(HeapGraphEdge::Type type, int parent_entry, int index, HeapEntry *child_entry)=0
V8HeapExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress)
INLINE(const char *name_prefix() const)
void RemoveProfile(CpuProfile *profile)
CpuProfile * FilteredClone(int security_token_id)
virtual ~HeapEntriesAllocator()
INLINE(int line_number() const)
SnapshotObjectId PushHeapObjectsStats(OutputStream *stream)
friend class HeapSnapshotJSONSerializerEnumerator
static const char *const kGarbageCollectorEntryName
void AddPathToCurrentProfiles(const Vector< CodeEntry * > &path)
HeapEntry * GetEntryById(SnapshotObjectId id)
virtual HeapEntry * FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)=0
void AddRootEntries(SnapshotFillerInterface *filler)
void CopyData(const CodeEntry &source)
void UpdateMeasurements(double current_time)
static const unsigned kWallTimeQueryIntervalMs
void AddPath(const Vector< CodeEntry * > &path)
void AddCode(Address addr, CodeEntry *entry, unsigned size)
void Serialize(v8::OutputStream *stream)
void AddPathFromStart(const Vector< CodeEntry * > &path)
void StopHeapObjectsTracking()
INLINE(int security_token_id() const)
HeapEntry * AddEntry(HeapEntry::Type type, const char *name, SnapshotObjectId id, int size)
int GetTokenId(Object *token)
static const char *const kAnonymousFunctionName
uint32_t ComputeIntegerHash(uint32_t key, uint32_t seed)
static const SnapshotObjectId kInternalRootObjectId
INLINE(bool has_name_prefix() const)
virtual ~NativeObjectsExplorer()
HeapSnapshotsCollection()
INLINE(static HeapObject *EnsureDoubleAligned(Heap *heap, HeapObject *object, int size))
List< HeapEntry * > * GetSortedEntriesList()
uint32_t SnapshotObjectId
virtual HeapEntry * FindEntry(HeapThing ptr)=0
void SnapshotGenerationFinished(HeapSnapshot *snapshot)
const char * GetCopy(const char *src)
virtual ~SnapshotFillerInterface()
void Pair(HeapThing thing, int entry)
HeapEntry * AddGcSubrootEntry(int tag)
TemplateHashMapImpl< FreeStoreAllocationPolicy > HashMap
SnapshotObjectId last_assigned_id() const
INLINE(const char *resource_name() const)
static const char *const kEmptyNamePrefix
void StartHeapObjectsTracking()
void AddRootEntries(SnapshotFillerInterface *filler)
bool is_tracking_objects()
size_t GetUsedMemorySize() const
const char * GetFunctionName(String *name)
HeapSnapshot * NewSnapshot(HeapSnapshot::Type type, const char *name, unsigned uid)
bool StartProfiling(const char *title, unsigned uid)
HeapSnapshot * GetSnapshot(unsigned uid)
~HeapSnapshotsCollection()
void RememberLastJSObjectId()
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
List< HeapGraphEdge * > & children()
SnapshotObjectId GetObjectId(Address object_addr, int object_size)
INLINE(const List< ProfileNode * > *children() const)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
INLINE(ProfileNode(ProfileTree *tree, CodeEntry *entry))
int EstimateObjectsCount(HeapIterator *iterator)
const char * GetVFormatted(const char *format, va_list args)
CpuProfile * StopProfiling(int security_token_id, const char *title, double actual_sampling_rate)
HeapSnapshot * snapshot()
friend class HeapSnapshotJSONSerializerIterator
INLINE(void IncrementSelfTicks())
SnapshotObjectId max_snapshot_js_object_id() const
const char * GetName(String *name)
void MoveObject(Address from, Address to)
ProfileNode * FindOrAddChild(CodeEntry *entry)
CpuProfile(const char *title, unsigned uid)
void add_child(HeapGraphEdge *edge)
HeapSnapshot(HeapSnapshotsCollection *collection, Type type, const char *title, unsigned uid)
INLINE(void IncreaseSelfTicks(unsigned amount))
INLINE(const ProfileTree *top_down() const)
Vector< HeapGraphEdge * > children()
void CalculateTotalTicks()
HeapSnapshotJSONSerializer(HeapSnapshot *snapshot)
void ObjectMoveEvent(Address from, Address to)
List< CpuProfile * > * Profiles(int security_token_id)