39 ObjectGroup::~ObjectGroup() {
72 in_new_space_list_ =
false;
73 parameter_or_next_free_.next_free =
NULL;
79 index_ =
static_cast<uint8_t
>(index);
80 ASSERT(static_cast<int>(index_) == index);
82 in_new_space_list_ =
false;
83 parameter_or_next_free_.next_free = *first_free;
93 parameter_or_next_free_.parameter =
NULL;
95 IncreaseBlockUses(global_handles);
101 global_handles->number_of_weak_handles_--;
102 if (object_->IsJSGlobalObject()) {
103 global_handles->number_of_global_object_weak_handles_--;
107 parameter_or_next_free_.next_free = global_handles->first_free_;
108 global_handles->first_free_ =
this;
109 DecreaseBlockUses(global_handles);
123 class_id_ = class_id;
167 parameter_or_next_free_.parameter =
parameter;
171 return parameter_or_next_free_.parameter;
177 return parameter_or_next_free_.next_free;
181 parameter_or_next_free_.next_free = value;
189 global_handles->number_of_weak_handles_++;
190 if (object_->IsJSGlobalObject()) {
191 global_handles->number_of_global_object_weak_handles_++;
202 global_handles->number_of_weak_handles_--;
203 if (object_->IsJSGlobalObject()) {
204 global_handles->number_of_global_object_weak_handles_--;
227 ASSERT(!object_->IsExternalAsciiString() ||
229 ASSERT(!object_->IsExternalTwoByteString() ||
232 VMState
state(isolate, EXTERNAL);
243 inline void IncreaseBlockUses(
GlobalHandles* global_handles);
244 inline void DecreaseBlockUses(
GlobalHandles* global_handles);
262 bool independent_ : 1;
263 bool in_new_space_list_ : 1;
273 } parameter_or_next_free_;
275 DISALLOW_COPY_AND_ASSIGN(
Node);
284 : next_(next), used_nodes_(0), next_used_(
NULL), prev_used_(
NULL) {}
287 for (
int i =
kSize - 1; i >= 0; --i) {
294 return &nodes_[index];
299 if (used_nodes_++ == 0) {
300 NodeBlock* old_first = global_handles->first_used_block_;
301 global_handles->first_used_block_ =
this;
302 next_used_ = old_first;
304 if (old_first ==
NULL)
return;
305 old_first->prev_used_ =
this;
311 if (--used_nodes_ == 0) {
312 if (next_used_ !=
NULL) next_used_->prev_used_ = prev_used_;
313 if (prev_used_ !=
NULL) prev_used_->next_used_ = next_used_;
314 if (
this == global_handles->first_used_block_) {
315 global_handles->first_used_block_ = next_used_;
337 intptr_t ptr =
reinterpret_cast<intptr_t
>(
this);
338 ptr = ptr - index_ *
sizeof(
Node);
339 NodeBlock* block =
reinterpret_cast<NodeBlock*
>(ptr);
340 ASSERT(block->node_at(index_) ==
this);
345 void GlobalHandles::Node::IncreaseBlockUses(GlobalHandles* global_handles) {
346 FindBlock()->IncreaseUses(global_handles);
350 void GlobalHandles::Node::DecreaseBlockUses(GlobalHandles* global_handles) {
351 FindBlock()->DecreaseUses(global_handles);
358 : block_(global_handles->first_used_block_),
365 return block_->node_at(index_);
372 block_ = block_->next_used();
385 number_of_weak_handles_(0),
386 number_of_global_object_weak_handles_(0),
387 number_of_global_handles_(0),
389 first_used_block_(
NULL),
391 post_gc_processing_count_(0) {}
396 while (block !=
NULL) {
406 isolate_->
counters()->global_handles()->Increment();
407 number_of_global_handles_++;
408 if (first_free_ ==
NULL) {
409 first_block_ =
new NodeBlock(first_block_);
414 Node* result = first_free_;
419 new_space_nodes_.Add(result);
427 isolate_->
counters()->global_handles()->Decrement();
428 number_of_global_handles_--;
429 if (location ==
NULL)
return;
476 if (it.node()->IsWeakRetainer()) v->VisitPointer(it.node()->location());
484 if (it.node()->IsWeak() && it.node()->callback() == callback) {
485 f(it.node()->object(), it.node()->parameter());
493 if (it.node()->IsWeak() && f(it.node()->location())) {
494 it.node()->MarkPending();
501 for (
int i = 0; i < new_space_nodes_.length(); ++i) {
502 Node* node = new_space_nodes_[i];
513 for (
int i = 0; i < new_space_nodes_.length(); ++i) {
514 Node* node = new_space_nodes_[i];
525 for (
int i = 0; i < new_space_nodes_.length(); ++i) {
526 Node* node = new_space_nodes_[i];
541 const int initial_post_gc_processing_count = ++post_gc_processing_count_;
542 bool next_gc_likely_to_collect_more =
false;
544 for (
int i = 0; i < new_space_nodes_.length(); ++i) {
545 Node* node = new_space_nodes_[i];
552 if (initial_post_gc_processing_count != post_gc_processing_count_) {
557 return next_gc_likely_to_collect_more;
561 next_gc_likely_to_collect_more =
true;
566 if (it.node()->PostGarbageCollectionProcessing(isolate_,
this)) {
567 if (initial_post_gc_processing_count != post_gc_processing_count_) {
569 return next_gc_likely_to_collect_more;
572 if (!it.node()->IsRetainer()) {
573 next_gc_likely_to_collect_more =
true;
579 for (
int i = 0; i < new_space_nodes_.length(); ++i) {
580 Node* node = new_space_nodes_[i];
583 new_space_nodes_[last++] = node;
588 new_space_nodes_.Rewind(last);
589 return next_gc_likely_to_collect_more;
595 if (it.node()->IsStrongRetainer()) {
596 v->VisitPointer(it.node()->location());
604 if (it.node()->IsRetainer()) {
605 v->VisitPointer(it.node()->location());
613 if (it.node()->has_wrapper_class_id() && it.node()->IsRetainer()) {
614 v->VisitEmbedderReference(it.node()->location(),
615 it.node()->wrapper_class_id());
635 }
else if (it.node()->state() ==
Node::FREE) {
643 void GlobalHandles::PrintStats() {
650 for (NodeIterator it(
this); !it.done(); it.Advance()) {
655 if (it.node()->state() ==
Node::FREE) destroyed++;
658 PrintF(
"Global Handle Statistics:\n");
660 PrintF(
" # weak = %d\n", weak);
661 PrintF(
" # pending = %d\n", pending);
662 PrintF(
" # near_death = %d\n", near_death);
663 PrintF(
" # free = %d\n", destroyed);
664 PrintF(
" # total = %d\n", total);
668 PrintF(
"Global handles:\n");
669 for (NodeIterator it(
this); !it.done(); it.Advance()) {
670 PrintF(
" handle %p to %p%s\n",
671 reinterpret_cast<void*>(it.node()->location()),
672 reinterpret_cast<void*>(it.node()->object()),
673 it.node()->IsWeak() ?
" (weak)" :
"");
685 for (
size_t i = 0; i < length; ++i) {
702 for (
size_t i = 0; i < length; ++i) {
706 if (length == 0)
return;
712 for (
int i = 0; i < object_groups_.length(); i++) {
713 object_groups_.at(i)->Dispose();
715 object_groups_.Clear();
720 for (
int i = 0; i < implicit_ref_groups_.length(); i++) {
721 implicit_ref_groups_.at(i)->Dispose();
723 implicit_ref_groups_.Clear();
bool(* WeakSlotCallbackWithHeap)(Heap *heap, Object **pointer)
void RemoveImplicitRefGroups()
void Destroy(Object **location)
static ObjectGroup * New(Object ***handles, size_t length, v8::RetainedObjectInfo *info)
Node * node_at(int index)
bool is_in_new_space_list() const
void MakeWeak(GlobalHandles *global_handles, void *parameter, WeakReferenceCallback callback)
bool IsStrongRetainer() const
void PrintF(const char *format,...)
bool InNewSpace(Object *object)
void IdentifyNewSpaceWeakIndependentHandles(WeakSlotCallbackWithHeap f)
void IncreaseUses(GlobalHandles *global_handles)
void AddImplicitReferences(HeapObject **parent, Object ***children, size_t length)
static ExternalTwoByteString * cast(Object *obj)
NodeBlock(NodeBlock *next)
static bool IsNearDeath(Object **location)
NodeBlock * prev_used() const
void IterateWeakRoots(ObjectVisitor *v)
#define ASSERT(condition)
static Node * FromLocation(Object **location)
void ClearWeakness(Object **location)
v8::Handle< v8::Value > Print(const v8::Arguments &args)
void IterateStrongRoots(ObjectVisitor *v)
WeakReferenceCallback callback()
void Acquire(Object *object, GlobalHandles *global_handles)
const Resource * resource()
Handle< Object > handle()
static ExternalAsciiString * cast(Object *obj)
void AddObjectGroup(Object ***handles, size_t length, v8::RetainedObjectInfo *info)
int * pending_global_handle_count
void set_next_free(Node *value)
int * near_death_global_handle_count
void DecreaseUses(GlobalHandles *global_handles)
uint16_t wrapper_class_id() const
void set_in_new_space_list(bool v)
bool PostGarbageCollectionProcessing(GarbageCollector collector)
void set_wrapper_class_id(uint16_t class_id)
void PutNodesOnFreeList(Node **first_free)
#define OFFSET_OF(type, field)
static const uint16_t kPersistentHandleNoClassId
NodeIterator(GlobalHandles *global_handles)
Handle< Object > Create(Object *value)
static uint16_t GetWrapperClassId(Object **location)
int * global_handle_count
static bool IsIndependent(Object **location)
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
void IterateAllRoots(ObjectVisitor *v)
static void SetWrapperClassId(Object **location, uint16_t class_id)
void set_parameter(void *parameter)
void(* WeakReferenceCallback)(Persistent< Value > object, void *parameter)
int * free_global_handle_count
void IterateAllRootsWithClassIds(ObjectVisitor *v)
void IterateNewSpaceStrongAndDependentRoots(ObjectVisitor *v)
NodeBlock * next_used() const
int * weak_global_handle_count
void IdentifyWeakHandles(WeakSlotCallback f)
void RecordStats(HeapStats *stats)
const Resource * resource()
void(* WeakReferenceGuest)(Object *object, void *parameter)
static ImplicitRefGroup * New(HeapObject **parent, Object ***children, size_t length)
void MarkIndependent(Object **location)
static bool IsWeak(Object **location)
void RemoveObjectGroups()
void IterateNewSpaceWeakIndependentRoots(ObjectVisitor *v)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
void MakeWeak(Object **location, void *parameter, WeakReferenceCallback callback)
bool is_independent() const
void ClearWeakness(GlobalHandles *global_handles)
bool IsWeakRetainer() const
bool has_wrapper_class_id() const
bool PostGarbageCollectionProcessing(Isolate *isolate, GlobalHandles *global_handles)
bool(* WeakSlotCallback)(Object **pointer)
v8::RetainedObjectInfo * info_
void Initialize(int index, Node **first_free)
void Release(GlobalHandles *global_handles)