42 bool Locker::active_ =
false;
50 isolate_(reinterpret_cast<i::
Isolate*>(isolate)) {
51 if (isolate_ ==
NULL) {
52 isolate_ = i::Isolate::GetDefaultIsolateForLocking();
75 internal::ExecutionAccess access(isolate_);
90 if (internal_isolate ==
NULL) {
91 internal_isolate = i::Isolate::GetDefaultIsolateForLocking();
119 : isolate_(reinterpret_cast<i::
Isolate*>(isolate)) {
120 if (isolate_ ==
NULL) {
121 isolate_ = i::Isolate::GetDefaultIsolateForLocking();
168 lazily_archived_thread_state_ =
NULL;
175 ExecutionAccess access(isolate_);
179 if (lazily_archived_thread_.IsValid()) {
180 EagerlyArchiveThread();
190 char* from = state->
data();
193 from = Relocatable::RestoreState(isolate_, from);
194 #ifdef ENABLE_DEBUGGER_SUPPORT
195 from = isolate_->debug()->RestoreDebug(from);
225 static int ArchiveSpacePerThread() {
228 #ifdef ENABLE_DEBUGGER_SUPPORT
229 Debug::ArchiveSpacePerThread() +
234 Relocatable::ArchiveSpacePerThread();
238 ThreadState::ThreadState(ThreadManager* thread_manager)
239 : id_(ThreadId::Invalid()),
240 terminate_on_restore_(
false),
244 thread_manager_(thread_manager) {
248 ThreadState::~ThreadState() {
249 DeleteArray<char>(data_);
253 void ThreadState::AllocateSpace() {
254 data_ = NewArray<char>(ArchiveSpacePerThread());
259 next_->previous_ = previous_;
260 previous_->next_ = next_;
266 list ==
FREE_LIST ? thread_manager_->free_anchor_
267 : thread_manager_->in_use_anchor_;
268 next_ = flying_anchor->next_;
269 previous_ = flying_anchor;
270 flying_anchor->next_ =
this;
271 next_->previous_ =
this;
277 if (gotten == free_anchor_) {
279 new_thread_state->AllocateSpace();
280 return new_thread_state;
288 return in_use_anchor_->
Next();
293 if (next_ == thread_manager_->in_use_anchor_)
return NULL;
301 ThreadManager::ThreadManager()
302 : mutex_(
OS::CreateMutex()),
304 lazily_archived_thread_(
ThreadId::Invalid()),
305 lazily_archived_thread_state_(
NULL),
307 in_use_anchor_(
NULL) {
313 ThreadManager::~ThreadManager() {
315 DeleteThreadStateList(free_anchor_);
316 DeleteThreadStateList(in_use_anchor_);
320 void ThreadManager::DeleteThreadStateList(ThreadState* anchor) {
322 for (
ThreadState* current = anchor->next_; current != anchor;) {
338 isolate_->FindOrAllocatePerThreadDataForThisThread();
341 lazily_archived_thread_state_ = state;
348 void ThreadManager::EagerlyArchiveThread() {
350 ThreadState* state = lazily_archived_thread_state_;
352 char* to = state->
data();
357 to = Relocatable::ArchiveState(isolate_, to);
358 #ifdef ENABLE_DEBUGGER_SUPPORT
359 to = isolate_->debug()->ArchiveDebug(to);
365 lazily_archived_thread_state_ =
NULL;
372 #ifdef ENABLE_DEBUGGER_SUPPORT
391 state = state->
Next()) {
392 char* data = state->
data();
394 data = isolate_->
Iterate(v, data);
395 data = Relocatable::Iterate(v, data);
403 state = state->
Next()) {
404 char* data = state->
data();
419 state = state->
Next()) {
420 if (thread_id.Equals(state->
id())) {
427 ContextSwitcher::ContextSwitcher(
Isolate* isolate,
int every_n_ms)
428 :
Thread(
"v8:CtxtSwitcher"),
430 sleep_ms_(every_n_ms),
438 Isolate* isolate = Isolate::Current();
455 Isolate* isolate = Isolate::Current();
472 void ContextSwitcher::Run() {
473 while (keep_going_) {
char * ArchiveState(char *to)
HandleScopeImplementer * handle_scope_implementer()
char * ArchiveThread(char *to)
char * RestoreThread(char *from)
static int ArchiveSpacePerThread()
void FreeThreadResources()
Bootstrapper * bootstrapper()
PerIsolateThreadData * FindPerThreadDataForThisThread()
RegExpStack * regexp_stack()
#define ASSERT(condition)
char * RestoreState(char *from)
ThreadManager * thread_manager()
Locker(Isolate *isolate=NULL)
bool IsDefaultIsolate() const
void IterateArchivedThreads(ThreadVisitor *v)
StackGuard * stack_guard()
void FreeThreadResources()
void TerminateExecution(ThreadId thread_id)
void FreeThreadResources()
char * RestoreStackGuard(char *from)
void ClearThread(const ExecutionAccess &lock)
char * ArchiveStack(char *to)
static void PreemptionReceived()
char * ArchiveStackGuard(char *to)
static ThreadId Current()
bool terminate_on_restore()
static void EnterDefaultIsolate()
void TerminateExecution()
static int ArchiveSpacePerThread()
void Iterate(ObjectVisitor *v)
char * RestoreStack(char *from)
activate correct semantics for inheriting readonliness false
void Iterate(ObjectVisitor *v)
activate correct semantics for inheriting readonliness false
static void Sleep(const int milliseconds)
void set_thread_state(ThreadState *value)
bool IsLockedByCurrentThread()
ThreadState * GetFreeThreadState()
static int ArchiveSpacePerThread()
void FreeThreadResources()
void IterateThread(ThreadVisitor *v)
static void StopPreemption()
ThreadState * thread_state() const
void FreeThreadResources()
Unlocker(Isolate *isolate=NULL)
void Iterate(v8::internal::ObjectVisitor *v)
static int ArchiveSpacePerThread()
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static void StartPreemption(int every_n_ms)
ContextSwitcher * context_switcher()
char * ArchiveThread(char *to)
static ThreadId Invalid()
static void StartPreemption(int every_n_ms)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
char * RestoreThread(char *from)
void set_context_switcher(ContextSwitcher *switcher)
void FreeThreadResources()
static void StopPreemption()
void set_terminate_on_restore(bool terminate_on_restore)
ThreadState * FirstThreadStateInUse()
static int ArchiveSpacePerThread()
static bool IsLocked(Isolate *isolate=NULL)
void InitThread(const ExecutionAccess &lock)