78 STATIC_ASSERT(static_cast<int>(IsPartiallyDependent::kShift) ==
82 #ifdef ENABLE_HANDLE_ZAPPING
92 parameter_or_next_free_.next_free =
NULL;
93 weak_callback_ =
NULL;
98 index_ =
static_cast<uint8_t
>(index);
99 ASSERT(static_cast<int>(index_) == index);
102 parameter_or_next_free_.next_free = *first_free;
113 parameter_or_next_free_.parameter =
NULL;
114 weak_callback_ =
NULL;
126 weak_callback_ =
NULL;
145 return NodeState::decode(flags_);
148 flags_ = NodeState::update(flags_, state);
152 return IsIndependent::decode(flags_);
155 flags_ = IsIndependent::update(flags_, v);
159 return IsPartiallyDependent::decode(flags_);
162 flags_ = IsPartiallyDependent::update(flags_, v);
166 return IsInNewSpaceList::decode(flags_);
169 flags_ = IsInNewSpaceList::update(flags_, v);
213 parameter_or_next_free_.parameter =
parameter;
217 return parameter_or_next_free_.parameter;
223 return parameter_or_next_free_.next_free;
227 parameter_or_next_free_.next_free = value;
235 weak_callback_ = weak_callback;
248 if (weak_callback_ ==
NULL) {
260 ASSERT(!object_->IsExternalAsciiString() ||
262 ASSERT(!object_->IsExternalTwoByteString() ||
269 reinterpret_cast<v8::Isolate*>(isolate),
272 weak_callback_(data);
284 inline void IncreaseBlockUses();
285 inline void DecreaseBlockUses();
302 class NodeState:
public BitField<State, 0, 4> {};
304 class IsPartiallyDependent:
public BitField<bool, 5, 1> {};
305 class IsInNewSpaceList:
public BitField<bool, 6, 1> {};
317 } parameter_or_next_free_;
319 DISALLOW_COPY_AND_ASSIGN(
Node);
332 global_handles_(global_handles) {}
335 for (
int i =
kSize - 1; i >= 0; --i) {
342 return &nodes_[index];
347 if (used_nodes_++ == 0) {
348 NodeBlock* old_first = global_handles_->first_used_block_;
349 global_handles_->first_used_block_ =
this;
350 next_used_ = old_first;
352 if (old_first ==
NULL)
return;
353 old_first->prev_used_ =
this;
359 if (--used_nodes_ == 0) {
360 if (next_used_ !=
NULL) next_used_->prev_used_ = prev_used_;
361 if (prev_used_ !=
NULL) prev_used_->next_used_ = next_used_;
362 if (
this == global_handles_->first_used_block_) {
363 global_handles_->first_used_block_ = next_used_;
393 intptr_t ptr =
reinterpret_cast<intptr_t
>(
this);
394 ptr = ptr - index_ *
sizeof(
Node);
396 ASSERT(block->node_at(index_) ==
this);
401 void GlobalHandles::Node::IncreaseBlockUses() {
402 NodeBlock* node_block = FindBlock();
403 node_block->IncreaseUses();
404 GlobalHandles* global_handles = node_block->global_handles();
406 global_handles->number_of_global_handles_++;
410 void GlobalHandles::Node::DecreaseBlockUses() {
411 NodeBlock* node_block = FindBlock();
412 GlobalHandles* global_handles = node_block->global_handles();
413 parameter_or_next_free_.next_free = global_handles->first_free_;
414 global_handles->first_free_ =
this;
415 node_block->DecreaseUses();
416 global_handles->isolate()->counters()->global_handles()->Decrement();
417 global_handles->number_of_global_handles_--;
424 : block_(global_handles->first_used_block_),
431 return block_->node_at(index_);
438 block_ = block_->next_used();
451 number_of_global_handles_(0),
453 first_used_block_(
NULL),
455 post_gc_processing_count_(0),
456 object_group_connections_(kObjectGroupConnectionsCapacity) {}
461 while (block !=
NULL) {
471 if (first_free_ ==
NULL) {
472 first_block_ =
new NodeBlock(
this, first_block_);
477 Node* result = first_free_;
482 new_space_nodes_.Add(result);
539 if (it.node()->IsWeakRetainer()) v->VisitPointer(it.node()->location());
546 if (it.node()->IsWeak() && f(it.node()->location())) {
547 it.node()->MarkPending();
554 for (
int i = 0; i < new_space_nodes_.length(); ++i) {
555 Node* node = new_space_nodes_[i];
567 for (
int i = 0; i < new_space_nodes_.length(); ++i) {
568 Node* node = new_space_nodes_[i];
579 for (
int i = 0; i < new_space_nodes_.length(); ++i) {
580 Node* node = new_space_nodes_[i];
592 ComputeObjectGroupsAndImplicitReferences();
594 bool any_group_was_visited =
false;
595 for (
int i = 0; i < object_groups_.length(); i++) {
600 bool group_should_be_visited =
false;
601 for (
size_t j = 0; j < entry->
length; j++) {
602 Object*
object = *objects[j];
603 if (object->IsHeapObject()) {
604 if (!can_skip(isolate_->
heap(), &object)) {
605 group_should_be_visited =
true;
611 if (!group_should_be_visited) {
612 object_groups_[last++] = entry;
618 for (
size_t j = 0; j < entry->
length; ++j) {
619 Object*
object = *objects[j];
620 if (object->IsHeapObject()) {
621 v->VisitPointer(&
object);
622 any_group_was_visited =
true;
629 object_groups_.at(i) =
NULL;
631 object_groups_.Rewind(last);
632 return any_group_was_visited;
642 const int initial_post_gc_processing_count = ++post_gc_processing_count_;
643 bool next_gc_likely_to_collect_more =
false;
645 for (
int i = 0; i < new_space_nodes_.length(); ++i) {
646 Node* node = new_space_nodes_[i];
661 if (initial_post_gc_processing_count != post_gc_processing_count_) {
666 return next_gc_likely_to_collect_more;
670 next_gc_likely_to_collect_more =
true;
675 if (!it.node()->IsRetainer()) {
680 it.node()->clear_partially_dependent();
681 if (it.node()->PostGarbageCollectionProcessing(isolate_)) {
682 if (initial_post_gc_processing_count != post_gc_processing_count_) {
684 return next_gc_likely_to_collect_more;
687 if (!it.node()->IsRetainer()) {
688 next_gc_likely_to_collect_more =
true;
694 for (
int i = 0; i < new_space_nodes_.length(); ++i) {
695 Node* node = new_space_nodes_[i];
699 new_space_nodes_[last++] = node;
700 tracer->increment_nodes_copied_in_new_space();
703 tracer->increment_nodes_promoted();
707 tracer->increment_nodes_died_in_new_space();
710 new_space_nodes_.Rewind(last);
711 return next_gc_likely_to_collect_more;
717 if (it.node()->IsStrongRetainer()) {
718 v->VisitPointer(it.node()->location());
726 if (it.node()->IsRetainer()) {
727 v->VisitPointer(it.node()->location());
735 if (it.node()->IsRetainer() && it.node()->has_wrapper_class_id()) {
736 v->VisitEmbedderReference(it.node()->location(),
737 it.node()->wrapper_class_id());
744 for (
int i = 0; i < new_space_nodes_.length(); ++i) {
745 Node* node = new_space_nodes_[i];
747 v->VisitEmbedderReference(node->
location(),
757 if (it.node()->IsWeakRetainer()) {
768 if (it.node()->IsWeakRetainer() &&
769 it.node()->object()->IsJSGlobalObject()) {
791 }
else if (it.node()->state() ==
Node::FREE) {
799 void GlobalHandles::PrintStats() {
806 for (NodeIterator it(
this); !it.done(); it.Advance()) {
811 if (it.node()->state() ==
Node::FREE) destroyed++;
814 PrintF(
"Global Handle Statistics:\n");
816 PrintF(
" # weak = %d\n", weak);
817 PrintF(
" # pending = %d\n", pending);
818 PrintF(
" # near_death = %d\n", near_death);
819 PrintF(
" # free = %d\n", destroyed);
820 PrintF(
" # total = %d\n", total);
825 PrintF(
"Global handles:\n");
826 for (NodeIterator it(
this); !it.done(); it.Advance()) {
827 PrintF(
" handle %p to %p%s\n",
828 reinterpret_cast<void*>(it.node()->location()),
829 reinterpret_cast<void*>(it.node()->object()),
830 it.node()->IsWeak() ?
" (weak)" :
"");
842 for (
size_t i = 0; i < length; ++i) {
851 for (
size_t i = 0; i < length; ++i)
852 group->
objects[i] = handles[i];
854 object_groups_.Add(group);
875 for (
size_t i = 0; i < length; ++i) {
879 if (length == 0)
return;
881 for (
size_t i = 0; i < length; ++i)
883 implicit_ref_groups_.Add(group);
897 implicit_ref_groups_.Add(group);
902 for (
int i = 0; i < object_groups_.length(); i++)
903 delete object_groups_.at(i);
904 object_groups_.Clear();
905 for (
int i = 0; i < retainer_infos_.length(); ++i)
906 retainer_infos_[i].
info->Dispose();
907 retainer_infos_.Clear();
908 object_group_connections_.Clear();
909 object_group_connections_.Initialize(kObjectGroupConnectionsCapacity);
914 for (
int i = 0; i < implicit_ref_groups_.length(); i++) {
915 delete implicit_ref_groups_.at(i);
917 implicit_ref_groups_.Clear();
918 implicit_ref_connections_.Clear();
927 void GlobalHandles::ComputeObjectGroupsAndImplicitReferences() {
928 if (object_group_connections_.length() == 0) {
929 for (
int i = 0; i < retainer_infos_.length(); ++i)
930 retainer_infos_[i].
info->Dispose();
931 retainer_infos_.Clear();
932 implicit_ref_connections_.Clear();
936 object_group_connections_.Sort();
937 retainer_infos_.Sort();
938 implicit_ref_connections_.Sort();
942 int current_group_start = 0;
944 int current_implicit_refs_start = 0;
945 int current_implicit_refs_end = 0;
946 for (
int i = 0; i <= object_group_connections_.length(); ++i) {
948 current_group_id = object_group_connections_[i].id;
949 if (i == object_group_connections_.length() ||
950 current_group_id != object_group_connections_[i].id) {
956 while (current_implicit_refs_start < implicit_ref_connections_.length() &&
957 implicit_ref_connections_[current_implicit_refs_start].id <
959 ++current_implicit_refs_start;
960 current_implicit_refs_end = current_implicit_refs_start;
961 while (current_implicit_refs_end < implicit_ref_connections_.length() &&
962 implicit_ref_connections_[current_implicit_refs_end].id ==
964 ++current_implicit_refs_end;
966 if (current_implicit_refs_end > current_implicit_refs_start) {
968 HeapObject** representative =
NULL;
969 for (
int j = current_group_start; j < i; ++j) {
970 Object**
object = object_group_connections_[j].object;
971 if ((*object)->IsHeapObject()) {
972 representative =
reinterpret_cast<HeapObject**
>(object);
976 if (representative) {
977 ImplicitRefGroup* group =
new ImplicitRefGroup(
979 current_implicit_refs_end - current_implicit_refs_start);
980 for (
int j = current_implicit_refs_start;
981 j < current_implicit_refs_end;
983 group->children[j - current_implicit_refs_start] =
984 implicit_ref_connections_[j].object;
986 implicit_ref_groups_.Add(group);
988 current_implicit_refs_start = current_implicit_refs_end;
993 while (info_index < retainer_infos_.length() &&
994 retainer_infos_[info_index].id < current_group_id) {
995 retainer_infos_[info_index].info->Dispose();
998 if (info_index < retainer_infos_.length() &&
999 retainer_infos_[info_index].id == current_group_id) {
1001 info = retainer_infos_[info_index].info;
1006 if (i > current_group_start + 1) {
1007 ObjectGroup* group =
new ObjectGroup(i - current_group_start);
1008 for (
int j = current_group_start; j < i; ++j) {
1009 group->objects[j - current_group_start] =
1010 object_group_connections_[j].object;
1013 object_groups_.Add(group);
1018 if (i < object_group_connections_.length()) {
1019 current_group_id = object_group_connections_[i].id;
1020 current_group_start = i;
1024 object_group_connections_.Clear();
1025 object_group_connections_.Initialize(kObjectGroupConnectionsCapacity);
1026 retainer_infos_.Clear();
1027 implicit_ref_connections_.Clear();
1032 for (
unsigned i = 0; i <
ARRAY_SIZE(singleton_handles_); i++) {
1033 singleton_handles_[i] = kInvalidIndex;
1039 for (
int i = 0; i < blocks_.length(); i++)
delete[] blocks_[i];
1045 for (
int i = 0; i < blocks_.length(); i++) {
1047 Object** block = blocks_[i];
1048 visitor->VisitPointers(block, block +
Min(limit, kSize));
1055 for (
int i = 0; i < new_space_indices_.length(); i++) {
1056 visitor->VisitPointer(GetLocation(new_space_indices_[i]));
1063 for (
int i = 0; i < new_space_indices_.length(); i++) {
1064 int index = new_space_indices_[i];
1066 new_space_indices_[last++] = index;
1069 new_space_indices_.Rewind(last);
1075 if (
object ==
NULL)
return;
1077 int block = size_ >> kShift;
1078 int offset = size_ & kMask;
1082 Object* the_hole = isolate->
heap()->the_hole_value();
1084 blocks_.Add(next_block);
1086 ASSERT_EQ(isolate->
heap()->the_hole_value(), blocks_[block][offset]);
1087 blocks_[block][offset] = object;
1089 new_space_indices_.
Add(size_);
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
v8::RetainedObjectInfo * info
bool(* WeakSlotCallbackWithHeap)(Heap *heap, Object **pointer)
void RemoveImplicitRefGroups()
static void Destroy(Object **location)
Node * node_at(int index)
GlobalHandles * GetGlobalHandles()
bool IsStrongRetainer() const
void PrintF(const char *format,...)
bool InNewSpace(Object *object)
void IdentifyNewSpaceWeakIndependentHandles(WeakSlotCallbackWithHeap f)
void MakeWeak(void *parameter, WeakCallback weak_callback)
bool PostGarbageCollectionProcessing(Isolate *isolate)
void PostGarbageCollectionProcessing(Heap *heap)
int NumberOfGlobalObjectWeakHandles()
void AddImplicitReferences(HeapObject **parent, Object ***children, size_t length)
static ExternalTwoByteString * cast(Object *obj)
void set_independent(bool v)
static bool IsNearDeath(Object **location)
NodeBlock * prev_used() const
void SetReferenceFromGroup(UniqueId id, Object **child)
void IterateWeakRoots(ObjectVisitor *v)
#define ASSERT(condition)
void Create(Isolate *isolate, Object *object, int *index)
static Node * FromLocation(Object **location)
void IterateStrongRoots(ObjectVisitor *v)
bool is_in_new_space_list()
const Resource * resource()
Handle< Object > handle()
static ExternalAsciiString * cast(Object *obj)
void AddObjectGroup(Object ***handles, size_t length, v8::RetainedObjectInfo *info)
int * pending_global_handle_count
void set_partially_dependent(bool v)
static const int kNodeStateIsNearDeathValue
GlobalHandles * global_handles()
void set_next_free(Node *value)
int * near_death_global_handle_count
void MarkPartiallyDependent()
static const int kNodeFlagsOffset
uint16_t wrapper_class_id() const
void set_in_new_space_list(bool v)
static const int kNodeStateIsPendingValue
void PutNodesOnFreeList(Node **first_free)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kNodeStateIsWeakValue
#define OFFSET_OF(type, field)
int NumberOfWeakHandles()
static const uint16_t kPersistentHandleNoClassId
NodeIterator(GlobalHandles *global_handles)
Handle< Object > Create(Object *value)
int * global_handle_count
static bool IsIndependent(Object **location)
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
static void * ClearWeakness(Object **location)
static Handle< Object > CopyGlobal(Object **location)
bool is_partially_dependent()
void IterateAllRoots(ObjectVisitor *v)
void SetReference(HeapObject **parent, Object **child)
void set_parameter(void *parameter)
int * free_global_handle_count
void IterateAllRootsWithClassIds(ObjectVisitor *v)
void IterateNewSpaceRoots(ObjectVisitor *visitor)
static Local< Context > ToLocal(v8::internal::Handle< v8::internal::Context > obj)
void IterateNewSpaceStrongAndDependentRoots(ObjectVisitor *v)
NodeBlock * next_used() const
bool IterateObjectGroups(ObjectVisitor *v, WeakSlotCallbackWithHeap can_skip)
static const int kNodeIsIndependentShift
void SetRetainedObjectInfo(UniqueId id, RetainedObjectInfo *info)
void Acquire(Object *object)
bool PostGarbageCollectionProcessing(GarbageCollector collector, GCTracer *tracer)
static const int kNodeIsPartiallyDependentShift
int * weak_global_handle_count
Handle< T > handle(T *t, Isolate *isolate)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
NodeBlock(GlobalHandles *global_handles, NodeBlock *next)
void IdentifyWeakHandles(WeakSlotCallback f)
void RecordStats(HeapStats *stats)
static void MakeWeak(Object **location, void *parameter, WeakCallback weak_callback)
static void MarkPartiallyDependent(Object **location)
const Resource * resource()
void MemsetPointer(T **dest, U *value, int counter)
const Address kGlobalHandleZapValue
void SetObjectGroupId(Object **handle, UniqueId id)
#define ASSERT_EQ(v1, v2)
void IterateAllRoots(ObjectVisitor *visitor)
#define ASSERT_NE(v1, v2)
static void MarkIndependent(Object **location)
void clear_partially_dependent()
static bool IsWeak(Object **location)
void Print(const v8::FunctionCallbackInfo< v8::Value > &args)
void RemoveObjectGroups()
void IterateAllRootsInNewSpaceWithClassIds(ObjectVisitor *v)
void IterateNewSpaceWeakIndependentRoots(ObjectVisitor *v)
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
static const int kNodeStateMask
WeakCallbackData< v8::Value, void >::Callback WeakCallback
void set_state(State state)
bool IsWeakRetainer() const
bool has_wrapper_class_id() const
bool(* WeakSlotCallback)(Object **pointer)
static const int kNodeClassIdOffset
void Initialize(int index, Node **first_free)