52 bool V8::is_running_ =
false;
53 bool V8::has_been_set_up_ =
false;
54 bool V8::has_been_disposed_ =
false;
55 bool V8::has_fatal_error_ =
false;
56 bool V8::use_crankshaft_ =
true;
57 List<CallCompletedCallback>* V8::call_completed_callbacks_ =
NULL;
67 InitializeOncePerProcess();
81 i::Isolate::Current());
83 if (
IsDead())
return false;
85 Isolate* isolate = Isolate::Current();
89 has_been_set_up_ =
true;
90 has_fatal_error_ =
false;
91 has_been_disposed_ =
false;
93 return isolate->
Init(des);
99 has_fatal_error_ =
true;
104 Isolate* isolate = Isolate::Current();
107 if (!has_been_set_up_ || has_been_disposed_)
return;
120 has_been_disposed_ =
true;
122 delete call_completed_callbacks_;
123 call_completed_callbacks_ =
NULL;
129 static void seed_random(uint32_t* state) {
130 for (
int i = 0; i < 2; ++i) {
131 if (FLAG_random_seed != 0) {
132 state[i] = FLAG_random_seed;
133 }
else if (entropy_source !=
NULL) {
135 ScopedLock lock(entropy_mutex.
Pointer());
136 entropy_source(reinterpret_cast<unsigned char*>(&val),
sizeof(uint32_t));
146 static uint32_t random_base(uint32_t* state) {
149 if (state[0] == 0) seed_random(state);
152 state[0] = 18273 * (state[0] & 0xFFFF) + (state[0] >> 16);
153 state[1] = 36969 * (state[1] & 0xFFFF) + (state[1] >> 16);
155 return (state[0] << 14) + (state[1] & 0x3FFFF);
160 entropy_source = source;
166 StackFrame::SetReturnAddressLocationResolver(resolver);
172 ASSERT(context->IsNativeContext());
174 return random_base(reinterpret_cast<uint32_t*>(seed->GetDataStartAddress()));
182 ASSERT(isolate == Isolate::Current());
183 return random_base(isolate->private_random_seed());
190 if (!FLAG_use_idle_notification)
return true;
193 return HEAP->IdleNotification(hint);
198 if (call_completed_callbacks_ ==
NULL) {
201 for (
int i = 0; i < call_completed_callbacks_->length(); i++) {
202 if (callback == call_completed_callbacks_->
at(i))
return;
204 call_completed_callbacks_->
Add(callback);
209 if (call_completed_callbacks_ ==
NULL)
return;
210 for (
int i = 0; i < call_completed_callbacks_->length(); i++) {
211 if (callback == call_completed_callbacks_->
at(i)) {
212 call_completed_callbacks_->
Remove(i);
219 if (call_completed_callbacks_ ==
NULL)
return;
225 for (
int i = 0; i < call_completed_callbacks_->length(); i++) {
226 call_completed_callbacks_->
at(i)();
242 uint64_t random_bits =
Random(context);
246 static const double binary_million = 1048576.0;
255 void V8::InitializeOncePerProcessImpl() {
258 use_crankshaft_ = FLAG_crankshaft;
261 use_crankshaft_ =
false;
266 use_crankshaft_ =
false;
275 if (FLAG_stress_compaction) {
276 FLAG_force_marking_deque_overflows =
true;
277 FLAG_gc_global =
true;
284 ExternalReference::SetUp();
287 void V8::InitializeOncePerProcess() {
288 CallOnce(&init_once, &InitializeOncePerProcessImpl);
static bool Initialize(Deserializer *des)
void(* CallCompletedCallback)()
static void TearDownCaches()
HandleScopeImplementer * handle_scope_implementer()
V8_DECLARE_ONCE(initialize_gc_once)
static void SetFatalError()
static void FireCallCompletedCallback(Isolate *isolate)
void CallOnce(OnceType *once, NoArgFunction init_func)
bool Init(Deserializer *des)
static Object * FillHeapNumberWithRandom(Object *heap_number, Context *context)
#define ASSERT(condition)
static PerIsolateThreadData * CurrentPerIsolateThreadData()
static void EnforceFlagImplications()
static void SetUpCaches()
bool IsDefaultIsolate() const
static bool IdleNotification(int hint)
static void SetEntropySource(EntropySource source)
static ThreadId Current()
static void EnterDefaultIsolate()
void IncrementCallDepth()
uintptr_t(* ReturnAddressLocationResolver)(uintptr_t return_addr_location)
static void InitializeOncePerProcess()
static void RemoveCallCompletedCallback(CallCompletedCallback callback)
static void UnregisterAll()
static bool SupportsCrankshaft()
static void GlobalSetUp()
LazyDynamicInstance< Mutex, CreateMutexTrait, ThreadSafeInitOnceTrait >::type LazyMutex
static HeapNumber * cast(Object *obj)
void set_value(double value)
void SetUpJSCallerSavedCodeData()
static uint32_t RandomPrivate(Isolate *isolate)
static void AddCallCompletedCallback(CallCompletedCallback callback)
static void SetReturnAddressLocationResolver(ReturnAddressLocationResolver resolver)
bool(* EntropySource)(unsigned char *buffer, size_t length)
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
void DecrementCallDepth()
static uint32_t Random(Context *context)