45 using namespace v8::internal;
47 static const unsigned kCounters = 256;
48 static int local_counters[kCounters];
49 static const char* local_counter_names[kCounters];
52 static unsigned CounterHash(
const char* s) {
63 static int* counter_function(
const char* name) {
64 unsigned hash = CounterHash(name) % kCounters;
65 unsigned original_hash = hash;
68 if (local_counter_names[hash] == name) {
69 return &local_counters[hash];
71 if (local_counter_names[hash] == 0) {
72 local_counter_names[hash] = name;
73 return &local_counters[hash];
75 if (strcmp(local_counter_names[hash], name) == 0) {
76 return &local_counters[hash];
78 hash = (hash + 1) % kCounters;
79 ASSERT(hash != original_hash);
86 return ExternalReference(
id, i::Isolate::Current()).address();
92 return encoder.
Encode(AddressOf(
id));
96 static int make_code(
TypeCode type,
int id) {
102 Isolate* isolate = i::Isolate::Current();
108 Encode(encoder, Builtins::kArrayCode));
110 Encode(encoder, Runtime::kAbort));
112 Encode(encoder,
IC_Utility(IC::kLoadCallbackProperty)));
113 ExternalReference keyed_load_function_prototype =
114 ExternalReference(isolate->
counters()->keyed_load_function_prototype());
116 encoder.
Encode(keyed_load_function_prototype.address()));
117 ExternalReference stack_limit_address =
118 ExternalReference::address_of_stack_limit(isolate);
120 encoder.
Encode(stack_limit_address.address()));
121 ExternalReference real_stack_limit_address =
122 ExternalReference::address_of_real_stack_limit(isolate);
124 encoder.
Encode(real_stack_limit_address.address()));
125 #ifdef ENABLE_DEBUGGER_SUPPORT
127 encoder.
Encode(ExternalReference::debug_break(isolate).address()));
128 #endif // ENABLE_DEBUGGER_SUPPORT
131 ExternalReference::new_space_start(isolate).address()));
134 ExternalReference::roots_array_start(isolate).address()));
139 Isolate* isolate = i::Isolate::Current();
144 CHECK_EQ(AddressOf(Builtins::kArrayCode),
146 CHECK_EQ(AddressOf(Runtime::kAbort),
151 ExternalReference keyed_load_function =
152 ExternalReference(isolate->
counters()->keyed_load_function_prototype());
153 CHECK_EQ(keyed_load_function.address(),
156 Counters::k_keyed_load_function_prototype)));
157 CHECK_EQ(ExternalReference::address_of_stack_limit(isolate).address(),
159 CHECK_EQ(ExternalReference::address_of_real_stack_limit(isolate).address(),
161 #ifdef ENABLE_DEBUGGER_SUPPORT
162 CHECK_EQ(ExternalReference::debug_break(isolate).address(),
164 #endif // ENABLE_DEBUGGER_SUPPORT
165 CHECK_EQ(ExternalReference::new_space_start(isolate).address(),
174 file_name_ = snapshot_file;
176 PrintF(
"Unable to write to snapshot file \"%s\"\n", snapshot_file);
185 virtual void Put(
int byte,
const char* description) {
195 int pointer_space_used,
199 int cell_space_used);
203 const char* file_name_;
209 int pointer_space_used,
213 int cell_space_used) {
214 int file_name_length =
StrLength(file_name_) + 10;
219 fprintf(fp,
"new %d\n", new_space_used);
220 fprintf(fp,
"pointer %d\n", pointer_space_used);
221 fprintf(fp,
"data %d\n", data_space_used);
222 fprintf(fp,
"code %d\n", code_space_used);
223 fprintf(fp,
"map %d\n", map_space_used);
224 fprintf(fp,
"cell %d\n", cell_space_used);
229 static bool WriteToFile(
const char* snapshot_file) {
246 static void Serialize() {
253 WriteToFile(FLAG_testing_serialization_file);
281 static void Deserialize() {
286 static void SanityCheck() {
291 CHECK(Isolate::Current()->global_object()->IsJSObject());
292 CHECK(Isolate::Current()->native_context()->IsContext());
293 CHECK(
HEAP->symbol_table()->IsSymbolTable());
294 CHECK(!
FACTORY->LookupAsciiSymbol(
"Empty")->IsFailure());
335 const char* c_source =
"\"1234\".length";
352 const char* c_source =
"\"1234\".length";
371 Isolate::Current()->bootstrapper()->NativesSourceLookup(i);
382 raw_foo = *(v8::Utils::OpenHandle(*foo));
385 int file_name_length =
StrLength(FLAG_testing_serialization_file) + 10;
387 OS::SNPrintF(startup_name,
"%s.startup", FLAG_testing_serialization_file);
394 startup_serializer.SerializeStrongReferences();
396 FileByteSink partial_sink(FLAG_testing_serialization_file);
399 startup_serializer.SerializeWeakReferences();
409 startup_sink.WriteSpaceUsed(
410 startup_serializer.CurrentAllocationAddress(
NEW_SPACE),
413 startup_serializer.CurrentAllocationAddress(
CODE_SPACE),
414 startup_serializer.CurrentAllocationAddress(
MAP_SPACE),
415 startup_serializer.CurrentAllocationAddress(
CELL_SPACE));
421 static void ReserveSpaceForSnapshot(
Deserializer* deserializer,
422 const char* file_name) {
423 int file_name_length =
StrLength(file_name) + 10;
428 int new_size, pointer_size, data_size, code_size, map_size, cell_size;
432 #define fscanf fscanf_s
434 CHECK_EQ(1, fscanf(fp,
"new %d\n", &new_size));
435 CHECK_EQ(1, fscanf(fp,
"pointer %d\n", &pointer_size));
436 CHECK_EQ(1, fscanf(fp,
"data %d\n", &data_size));
437 CHECK_EQ(1, fscanf(fp,
"code %d\n", &code_size));
438 CHECK_EQ(1, fscanf(fp,
"map %d\n", &map_size));
439 CHECK_EQ(1, fscanf(fp,
"cell %d\n", &cell_size));
455 int file_name_length =
StrLength(FLAG_testing_serialization_file) + 10;
457 OS::SNPrintF(startup_name,
"%s.startup", FLAG_testing_serialization_file);
462 const char* file_name = FLAG_testing_serialization_file;
464 int snapshot_size = 0;
471 ReserveSpaceForSnapshot(&deserializer, file_name);
473 CHECK(root->IsString());
483 ReserveSpaceForSnapshot(&deserializer, file_name);
485 CHECK(root2->IsString());
486 CHECK(*root_handle == root2);
503 Isolate::Current()->bootstrapper()->NativesSourceLookup(i);
510 int file_name_length =
StrLength(FLAG_testing_serialization_file) + 10;
512 OS::SNPrintF(startup_name,
"%s.startup", FLAG_testing_serialization_file);
516 Object* raw_context = *(v8::Utils::OpenHandle(*env));
522 startup_serializer.SerializeStrongReferences();
524 FileByteSink partial_sink(FLAG_testing_serialization_file);
527 startup_serializer.SerializeWeakReferences();
537 startup_sink.WriteSpaceUsed(
538 startup_serializer.CurrentAllocationAddress(
NEW_SPACE),
541 startup_serializer.CurrentAllocationAddress(
CODE_SPACE),
542 startup_serializer.CurrentAllocationAddress(
MAP_SPACE),
543 startup_serializer.CurrentAllocationAddress(
CELL_SPACE));
551 int file_name_length =
StrLength(FLAG_testing_serialization_file) + 10;
553 OS::SNPrintF(startup_name,
"%s.startup", FLAG_testing_serialization_file);
558 const char* file_name = FLAG_testing_serialization_file;
560 int snapshot_size = 0;
567 ReserveSpaceForSnapshot(&deserializer, file_name);
569 CHECK(root->IsContext());
579 ReserveSpaceForSnapshot(&deserializer, file_name);
581 CHECK(root2->IsContext());
582 CHECK(*root_handle != root2);
593 bool ArtificialFailure =
false;
594 CHECK(ArtificialFailure);
599 bool ArtificialFailure2 =
false;
600 CHECK(ArtificialFailure2);
int CurrentAllocationAddress(int space)
static Local< Script > Compile(Handle< String > source, ScriptOrigin *origin=NULL, ScriptData *pre_data=NULL, Handle< String > script_data=Handle< String >())
#define CHECK_EQ(expected, value)
const int kReferenceTypeShift
void PrintF(const char *format,...)
virtual void Serialize(Object **o)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the snapshot(mksnapshot only)") DEFINE_bool(help
StatsTable * stats_table()
static V8EXPORT Local< String > New(const char *data, int length=-1)
void WriteSpaceUsed(int new_space_used, int pointer_space_used, int data_space_used, int code_space_used, int map_space_used, int cell_space_used)
#define ASSERT(condition)
void DeserializePartial(Object **root)
byte * ReadBytes(const char *filename, int *size, bool verbose)
Address Decode(uint32_t key) const
void SetCounterFunction(CounterLookupCallback f)
void set_reservation(int space_number, int reservation)
static int GetBuiltinsCount()
static const int kNoGCFlags
static FILE * FOpen(const char *path, const char *mode)
uint32_t Encode(Address key) const
static Vector< T > New(int length)
int StrLength(const char *string)
#define T(name, string, precedence)
static int SNPrintF(Vector< char > str, const char *format,...)
static bool HaveASnapshotToStartFrom()
virtual void Put(int byte, const char *description)
DEPENDENT_TEST(Deserialize, Serialize)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static Persistent< Context > New(ExtensionConfiguration *extensions=NULL, Handle< ObjectTemplate > global_template=Handle< ObjectTemplate >(), Handle< Value > global_object=Handle< Value >())
FileByteSink(const char *snapshot_file)
static bool Initialize(const char *snapshot_file=NULL)