28 #ifndef V8_SERIALIZE_H_
29 #define V8_SERIALIZE_H_
74 int size()
const {
return refs_.length(); }
78 uint32_t
code(
int i) {
return refs_[i].code; }
80 const char*
name(
int i) {
return refs_[i].name; }
86 PopulateTable(isolate);
89 struct ExternalReferenceEntry {
95 void PopulateTable(Isolate* isolate);
106 List<ExternalReferenceEntry> refs_;
121 static uint32_t Hash(
Address key) {
122 return static_cast<uint32_t
>(
reinterpret_cast<uintptr_t
>(key) >> 2);
125 int IndexOf(
Address key)
const;
127 static bool Match(
void* key1,
void* key2) {
return key1 == key2; }
129 void Put(
Address key,
int index);
141 if (key == 0)
return NULL;
148 Address* Lookup(uint32_t key)
const {
152 return &encodings_[type][id];
155 void Put(uint32_t key,
Address value) {
156 *Lookup(key) = value;
166 : data_(array), length_(length), position_(0) { }
168 bool HasMore() {
return position_ < length_; }
171 ASSERT(position_ < length_);
172 return data_[position_++];
176 #if defined(V8_HOST_CAN_READ_UNALIGNED) && __BYTE_ORDER == __LITTLE_ENDIAN
178 ASSERT(position_ +
sizeof(answer) <= length_ + 0u);
179 answer = *
reinterpret_cast<const int32_t*
>(data_ + position_);
181 int32_t answer = data_[position_];
182 answer |= data_[position_ + 1] << 8;
183 answer |= data_[position_ + 2] << 16;
184 answer |= data_[position_ + 3] << 24;
191 inline void CopyRaw(
byte* to,
int number_of_bytes);
278 return 0x72 + repeats;
282 return byte_code - 0x72;
288 return byte_code & 0x1f;
303 int bytes = answer & 3;
305 uint32_t mask = 0xffffffffu;
306 mask >>= 32 - (bytes << 3);
314 OS::MemCopy(to, data_ + position_, number_of_bytes);
315 position_ += number_of_bytes;
334 ASSERT(space_number >= 0);
336 reservations_[space_number] = reservation;
340 virtual void VisitPointers(
Object** start,
Object** end);
342 virtual void VisitRuntimeEntry(RelocInfo* rinfo) {
348 void RelinkAllocationSite(AllocationSite* site);
357 void ReadObject(
int space_number,
Object** write_back);
363 Address address = high_water_[space_index];
364 high_water_[space_index] = address +
size;
366 if (profiler->is_tracking_allocations()) {
367 profiler->AllocationEvent(address, size);
374 HeapObject* GetAddressFromEnd(
int space) {
375 int offset = source_->
GetInt();
380 void FlushICacheForNewCodeObjects();
385 SnapshotByteSource* source_;
391 static const intptr_t kUninitializedReservation = -1;
393 ExternalReferenceDecoder* external_reference_decoder_;
402 virtual void Put(
int byte,
const char* description) = 0;
404 Put(byte, description);
406 void PutInt(uintptr_t integer,
const char* description);
417 serialization_map_(new
HashMap(&SerializationMatchFun)) { }
420 delete serialization_map_;
424 return serialization_map_->
Lookup(Key(obj), Hash(obj),
false) !=
NULL;
429 return static_cast<int>(
reinterpret_cast<intptr_t
>(
430 serialization_map_->
Lookup(Key(obj), Hash(obj),
false)->value));
435 HashMap::Entry* entry =
436 serialization_map_->
Lookup(Key(obj), Hash(obj),
true);
437 entry->value =
Value(to);
441 static bool SerializationMatchFun(
void* key1,
void* key2) {
445 static uint32_t Hash(HeapObject*
obj) {
446 return static_cast<int32_t>(
reinterpret_cast<intptr_t
>(obj->address()));
449 static void* Key(HeapObject* obj) {
450 return reinterpret_cast<void*
>(obj->address());
453 static void* Value(
int v) {
454 return reinterpret_cast<void*
>(v);
463 class CodeAddressMap;
511 : serializer_(serializer),
514 reference_representation_(how_to_code + where_to_point),
515 bytes_processed_so_far_(0),
516 code_object_(o->IsCode()),
517 code_has_been_output_(
false) { }
537 enum ReturnSkip { kCanReturnSkipInsteadOfSkipping, kIgnoringReturn };
542 int OutputRawData(
Address up_to, ReturnSkip return_skip = kIgnoringReturn);
547 int reference_representation_;
548 int bytes_processed_so_far_;
550 bool code_has_been_output_;
605 startup_serializer_(startup_snapshot_serializer) {
624 return o->IsName() || o->IsSharedFunctionInfo() ||
625 o->IsHeapNumber() || o->IsCode() ||
628 startup_serializer_->
isolate()->
heap()->fixed_cow_array_map();
645 isolate->set_serialize_partial_snapshot_cache_length(0);
664 virtual bool ShouldBeInThePartialSnapshotCache(
HeapObject* o) {
672 #endif // V8_SERIALIZE_H_
virtual void SerializeObject(Object *o, HowToCode how_to_code, WhereToPoint where_to_point, int skip)=0
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
static const int kInvalidRootIndex
void VisitCodeTarget(RelocInfo *target)
int CurrentAllocationAddress(int space)
ExternalReferenceEncoder(Isolate *isolate)
SerializationAddressMapper address_mapper_
void CopyRaw(byte *to, int number_of_bytes)
ObjectSerializer(Serializer *serializer, Object *o, SnapshotByteSink *sink, HowToCode how_to_code, WhereToPoint where_to_point)
const int kReferenceTypeShift
void AddMapping(HeapObject *obj, int to)
virtual void Serialize(Object **o)
static bool too_late_to_enable_now_
bool IsMapped(HeapObject *obj)
static int RootArrayConstantFromByteCode(int byte_code)
virtual ~SnapshotByteSink()
const int kDeoptTableSerializeEntryCount
intptr_t root_index_wave_front()
virtual void SerializeStrongReferences()
void SerializeWeakReferences()
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including on console Map counters to a file Enable debugger compile events enable GDBJIT enable GDBJIT interface for all code objects dump only objects containing this substring stress the GC compactor to flush out pretty print source code print source AST function name where to insert a breakpoint print scopes for builtins trace contexts operations print stuff during garbage collection report code statistics after GC report handles after GC trace cache state transitions print interface inference details prints when objects are turned into dictionaries report heap spill statistics along with trace isolate state changes trace regexp bytecode execution Minimal Log all events to the log file Log API events to the log file Log heap samples on garbage collection for the hp2ps tool log positions Log suspect operations Used with turns on browser compatible mode for profiling v8 Specify the name of the log file Enable low level linux profiler Enable perf linux profiler(experimental annotate support).") DEFINE_string(gc_fake_mmap
int SpaceAreaSize(int space)
Serializer(Isolate *isolate, SnapshotByteSink *sink)
void VisitRuntimeEntry(RelocInfo *reloc)
#define ASSERT(condition)
int EncodeExternalReference(Address addr)
static const int kSpaceMask
void VisitPointers(Object **start, Object **end)
static const int kAnyOldSpace
void VisitExternalReference(Address *p)
static int CodeForRepeats(int repeats)
const int kReferenceIdMask
void PutInt(uintptr_t integer, const char *description)
Address Decode(uint32_t key) const
intptr_t root_index_wave_front_
void PutRoot(int index, HeapObject *object, HowToCode how, WhereToPoint where, int skip)
void VisitCodeEntry(Address entry_address)
void set_reservation(int space_number, int reservation)
void VisitPointers(Object **start, Object **end)
bool ShouldBeSkipped(Object **current)
void VisitExternalTwoByteString(v8::String::ExternalStringResource **resource)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
int fullness_[LAST_SPACE+1]
static const int kConstantRepeat
static const int kMaxRepeats
static void MemCopy(void *dest, const void *src, size_t size)
int32_t GetUnalignedInt()
Deserializer(SnapshotByteSource *source)
void InitializeAllocators()
static const int kRawData
virtual int PartialSnapshotCacheIndex(HeapObject *o)
~ExternalReferenceTable()
static const int kRootArrayConstants
static int SpaceOfObject(HeapObject *object)
SerializationAddressMapper * address_mapper()
static void TooLateToEnableNow()
virtual void SerializeObject(Object *o, HowToCode how_to_code, WhereToPoint where_to_point, int skip)
~SerializationAddressMapper()
ExternalReferenceEncoder * external_reference_encoder_
void Deserialize(Isolate *isolate)
void VisitCell(RelocInfo *rinfo)
Entry * Lookup(void *key, uint32_t hash, bool insert, AllocationPolicy allocator=AllocationPolicy())
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
int Allocate(int space, int size)
static const int kNumberOfSpaces
static const int kRootArrayNumberOfConstantEncodings
uint32_t Encode(Address key) const
static bool serialization_enabled_
static void Iterate(Isolate *isolate, ObjectVisitor *visitor)
void DeserializePartial(Isolate *isolate, Object **root)
static int RepeatsForCode(int byte_code)
SerializationAddressMapper()
void SerializeReferenceToPreviousObject(int space, int address, HowToCode how_to_code, WhereToPoint where_to_point, int skip)
static void Enable(Isolate *isolate)
~ExternalReferenceDecoder()
const int kDebugRegisterBits
HeapProfiler * heap_profiler() const
const int kObjectAlignmentBits
TemplateHashMapImpl< FreeStoreAllocationPolicy > HashMap
void VisitExternalAsciiString(v8::String::ExternalAsciiStringResource **resource)
Isolate * isolate() const
virtual void SerializeObject(Object *o, HowToCode how_to_code, WhereToPoint where_to_point, int skip)
StartupSerializer(Isolate *isolate, SnapshotByteSink *sink)
void set_root_index_wave_front(intptr_t value)
static HeapObject * FromAddress(Address address)
static const int kSynchronize
PerThreadAssertScopeDebugOnly< HEAP_ALLOCATION_ASSERT, false > DisallowHeapAllocation
const int kReferenceIdBits
static const int kNativesStringResource
int RootIndex(HeapObject *heap_object, HowToCode from)
virtual bool ShouldBeInThePartialSnapshotCache(HeapObject *o)=0
virtual void Put(int byte, const char *description)=0
static ExternalReferenceTable * instance(Isolate *isolate)
virtual bool ShouldBeInThePartialSnapshotCache(HeapObject *o)
PartialSerializer(Isolate *isolate, Serializer *startup_snapshot_serializer, SnapshotByteSink *sink)
int MappedTo(HeapObject *obj)
SnapshotByteSource(const byte *array, int length)
virtual void PutSection(int byte, const char *description)
ExternalReferenceDecoder(Isolate *isolate)
void VisitEmbeddedPointer(RelocInfo *target)
const char * NameOfAddress(Address key) const