v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
global-handles.cc
Go to the documentation of this file.
1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "api.h"
31 #include "global-handles.h"
32 
33 #include "vm-state-inl.h"
34 
35 namespace v8 {
36 namespace internal {
37 
38 
40  if (info != NULL) info->Dispose();
41  delete[] objects;
42 }
43 
44 
46  delete[] children;
47 }
48 
49 
51  public:
52  // State transition diagram:
53  // FREE -> NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, FREE }
54  enum State {
55  FREE = 0,
56  NORMAL, // Normal global handle.
57  WEAK, // Flagged as weak but not yet finalized.
58  PENDING, // Has been recognized as only reachable by weak handles.
59  NEAR_DEATH // Callback has informed the handle is near death.
60  };
61 
62  // Maps handle location (slot) to the containing node.
64  ASSERT(OFFSET_OF(Node, object_) == 0);
65  return reinterpret_cast<Node*>(location);
66  }
67 
68  Node() {
71  STATIC_ASSERT(static_cast<int>(NodeState::kMask) ==
76  STATIC_ASSERT(static_cast<int>(IsIndependent::kShift) ==
78  STATIC_ASSERT(static_cast<int>(IsPartiallyDependent::kShift) ==
80  }
81 
82 #ifdef ENABLE_HANDLE_ZAPPING
83  ~Node() {
84  // TODO(1428): if it's a weak handle we should have invoked its callback.
85  // Zap the values for eager trapping.
86  object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
88  index_ = 0;
89  set_independent(false);
91  set_in_new_space_list(false);
92  parameter_or_next_free_.next_free = NULL;
93  weak_callback_ = NULL;
94  }
95 #endif
96 
97  void Initialize(int index, Node** first_free) {
98  index_ = static_cast<uint8_t>(index);
99  ASSERT(static_cast<int>(index_) == index);
100  set_state(FREE);
101  set_in_new_space_list(false);
102  parameter_or_next_free_.next_free = *first_free;
103  *first_free = this;
104  }
105 
106  void Acquire(Object* object) {
107  ASSERT(state() == FREE);
108  object_ = object;
110  set_independent(false);
112  set_state(NORMAL);
113  parameter_or_next_free_.parameter = NULL;
114  weak_callback_ = NULL;
115  IncreaseBlockUses();
116  }
117 
118  void Release() {
119  ASSERT(state() != FREE);
120  set_state(FREE);
121  // Zap the values for eager trapping.
122  object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
124  set_independent(false);
126  weak_callback_ = NULL;
127  DecreaseBlockUses();
128  }
129 
130  // Object slot accessors.
131  Object* object() const { return object_; }
132  Object** location() { return &object_; }
134 
135  // Wrapper class ID accessors.
136  bool has_wrapper_class_id() const {
138  }
139 
140  uint16_t wrapper_class_id() const { return class_id_; }
141 
142  // State and flag accessors.
143 
144  State state() const {
145  return NodeState::decode(flags_);
146  }
148  flags_ = NodeState::update(flags_, state);
149  }
150 
151  bool is_independent() {
152  return IsIndependent::decode(flags_);
153  }
154  void set_independent(bool v) {
155  flags_ = IsIndependent::update(flags_, v);
156  }
157 
159  return IsPartiallyDependent::decode(flags_);
160  }
161  void set_partially_dependent(bool v) {
162  flags_ = IsPartiallyDependent::update(flags_, v);
163  }
164 
166  return IsInNewSpaceList::decode(flags_);
167  }
168  void set_in_new_space_list(bool v) {
169  flags_ = IsInNewSpaceList::update(flags_, v);
170  }
171 
172  bool IsNearDeath() const {
173  // Check for PENDING to ensure correct answer when processing callbacks.
174  return state() == PENDING || state() == NEAR_DEATH;
175  }
176 
177  bool IsWeak() const { return state() == WEAK; }
178 
179  bool IsRetainer() const { return state() != FREE; }
180 
181  bool IsStrongRetainer() const { return state() == NORMAL; }
182 
183  bool IsWeakRetainer() const {
184  return state() == WEAK || state() == PENDING || state() == NEAR_DEATH;
185  }
186 
187  void MarkPending() {
188  ASSERT(state() == WEAK);
190  }
191 
192  // Independent flag accessors.
194  ASSERT(state() != FREE);
195  set_independent(true);
196  }
197 
199  ASSERT(state() != FREE);
200  if (GetGlobalHandles()->isolate()->heap()->InNewSpace(object_)) {
202  }
203  }
205 
206  // Callback accessor.
207  // TODO(svenpanne) Re-enable or nuke later.
208  // WeakReferenceCallback callback() { return callback_; }
209 
210  // Callback parameter accessors.
211  void set_parameter(void* parameter) {
212  ASSERT(state() != FREE);
213  parameter_or_next_free_.parameter = parameter;
214  }
215  void* parameter() const {
216  ASSERT(state() != FREE);
217  return parameter_or_next_free_.parameter;
218  }
219 
220  // Accessors for next free node in the free list.
222  ASSERT(state() == FREE);
223  return parameter_or_next_free_.next_free;
224  }
225  void set_next_free(Node* value) {
226  ASSERT(state() == FREE);
227  parameter_or_next_free_.next_free = value;
228  }
229 
230  void MakeWeak(void* parameter, WeakCallback weak_callback) {
231  ASSERT(weak_callback != NULL);
232  ASSERT(state() != FREE);
233  set_state(WEAK);
234  set_parameter(parameter);
235  weak_callback_ = weak_callback;
236  }
237 
238  void* ClearWeakness() {
239  ASSERT(state() != FREE);
240  void* p = parameter();
241  set_state(NORMAL);
243  return p;
244  }
245 
247  if (state() != Node::PENDING) return false;
248  if (weak_callback_ == NULL) {
249  Release();
250  return false;
251  }
252  void* par = parameter();
255 
256  Object** object = location();
257  {
258  // Check that we are not passing a finalized external string to
259  // the callback.
260  ASSERT(!object_->IsExternalAsciiString() ||
261  ExternalAsciiString::cast(object_)->resource() != NULL);
262  ASSERT(!object_->IsExternalTwoByteString() ||
263  ExternalTwoByteString::cast(object_)->resource() != NULL);
264  // Leaving V8.
265  VMState<EXTERNAL> state(isolate);
266  HandleScope handle_scope(isolate);
267  Handle<Object> handle(*object, isolate);
269  reinterpret_cast<v8::Isolate*>(isolate),
270  v8::Utils::ToLocal(handle),
271  par);
272  weak_callback_(data);
273  }
274  // Absence of explicit cleanup or revival of weak handle
275  // in most of the cases would lead to memory leak.
276  CHECK(state() != NEAR_DEATH);
277  return true;
278  }
279 
281 
282  private:
283  inline NodeBlock* FindBlock();
284  inline void IncreaseBlockUses();
285  inline void DecreaseBlockUses();
286 
287  // Storage for object pointer.
288  // Placed first to avoid offset computation.
289  Object* object_;
290 
291  // Next word stores class_id, index, state, and independent.
292  // Note: the most aligned fields should go first.
293 
294  // Wrapper class ID.
295  uint16_t class_id_;
296 
297  // Index in the containing handle block.
298  uint8_t index_;
299 
300  // This stores three flags (independent, partially_dependent and
301  // in_new_space_list) and a State.
302  class NodeState: public BitField<State, 0, 4> {};
303  class IsIndependent: public BitField<bool, 4, 1> {};
304  class IsPartiallyDependent: public BitField<bool, 5, 1> {};
305  class IsInNewSpaceList: public BitField<bool, 6, 1> {};
306 
307  uint8_t flags_;
308 
309  // Handle specific callback - might be a weak reference in disguise.
310  WeakCallback weak_callback_;
311 
312  // Provided data for callback. In FREE state, this is used for
313  // the free list link.
314  union {
315  void* parameter;
317  } parameter_or_next_free_;
318 
319  DISALLOW_COPY_AND_ASSIGN(Node);
320 };
321 
322 
324  public:
325  static const int kSize = 256;
326 
328  : next_(next),
329  used_nodes_(0),
330  next_used_(NULL),
331  prev_used_(NULL),
332  global_handles_(global_handles) {}
333 
334  void PutNodesOnFreeList(Node** first_free) {
335  for (int i = kSize - 1; i >= 0; --i) {
336  nodes_[i].Initialize(i, first_free);
337  }
338  }
339 
340  Node* node_at(int index) {
341  ASSERT(0 <= index && index < kSize);
342  return &nodes_[index];
343  }
344 
345  void IncreaseUses() {
346  ASSERT(used_nodes_ < kSize);
347  if (used_nodes_++ == 0) {
348  NodeBlock* old_first = global_handles_->first_used_block_;
349  global_handles_->first_used_block_ = this;
350  next_used_ = old_first;
351  prev_used_ = NULL;
352  if (old_first == NULL) return;
353  old_first->prev_used_ = this;
354  }
355  }
356 
357  void DecreaseUses() {
358  ASSERT(used_nodes_ > 0);
359  if (--used_nodes_ == 0) {
360  if (next_used_ != NULL) next_used_->prev_used_ = prev_used_;
361  if (prev_used_ != NULL) prev_used_->next_used_ = next_used_;
362  if (this == global_handles_->first_used_block_) {
363  global_handles_->first_used_block_ = next_used_;
364  }
365  }
366  }
367 
368  GlobalHandles* global_handles() { return global_handles_; }
369 
370  // Next block in the list of all blocks.
371  NodeBlock* next() const { return next_; }
372 
373  // Next/previous block in the list of blocks with used nodes.
374  NodeBlock* next_used() const { return next_used_; }
375  NodeBlock* prev_used() const { return prev_used_; }
376 
377  private:
378  Node nodes_[kSize];
379  NodeBlock* const next_;
380  int used_nodes_;
381  NodeBlock* next_used_;
382  NodeBlock* prev_used_;
383  GlobalHandles* global_handles_;
384 };
385 
386 
388  return FindBlock()->global_handles();
389 }
390 
391 
392 GlobalHandles::NodeBlock* GlobalHandles::Node::FindBlock() {
393  intptr_t ptr = reinterpret_cast<intptr_t>(this);
394  ptr = ptr - index_ * sizeof(Node);
395  NodeBlock* block = reinterpret_cast<NodeBlock*>(ptr);
396  ASSERT(block->node_at(index_) == this);
397  return block;
398 }
399 
400 
401 void GlobalHandles::Node::IncreaseBlockUses() {
402  NodeBlock* node_block = FindBlock();
403  node_block->IncreaseUses();
404  GlobalHandles* global_handles = node_block->global_handles();
405  global_handles->isolate()->counters()->global_handles()->Increment();
406  global_handles->number_of_global_handles_++;
407 }
408 
409 
410 void GlobalHandles::Node::DecreaseBlockUses() {
411  NodeBlock* node_block = FindBlock();
412  GlobalHandles* global_handles = node_block->global_handles();
413  parameter_or_next_free_.next_free = global_handles->first_free_;
414  global_handles->first_free_ = this;
415  node_block->DecreaseUses();
416  global_handles->isolate()->counters()->global_handles()->Decrement();
417  global_handles->number_of_global_handles_--;
418 }
419 
420 
422  public:
423  explicit NodeIterator(GlobalHandles* global_handles)
424  : block_(global_handles->first_used_block_),
425  index_(0) {}
426 
427  bool done() const { return block_ == NULL; }
428 
429  Node* node() const {
430  ASSERT(!done());
431  return block_->node_at(index_);
432  }
433 
434  void Advance() {
435  ASSERT(!done());
436  if (++index_ < NodeBlock::kSize) return;
437  index_ = 0;
438  block_ = block_->next_used();
439  }
440 
441  private:
442  NodeBlock* block_;
443  int index_;
444 
446 };
447 
448 
449 GlobalHandles::GlobalHandles(Isolate* isolate)
450  : isolate_(isolate),
451  number_of_global_handles_(0),
452  first_block_(NULL),
453  first_used_block_(NULL),
454  first_free_(NULL),
455  post_gc_processing_count_(0),
456  object_group_connections_(kObjectGroupConnectionsCapacity) {}
457 
458 
460  NodeBlock* block = first_block_;
461  while (block != NULL) {
462  NodeBlock* tmp = block->next();
463  delete block;
464  block = tmp;
465  }
466  first_block_ = NULL;
467 }
468 
469 
471  if (first_free_ == NULL) {
472  first_block_ = new NodeBlock(this, first_block_);
473  first_block_->PutNodesOnFreeList(&first_free_);
474  }
475  ASSERT(first_free_ != NULL);
476  // Take the first node in the free list.
477  Node* result = first_free_;
478  first_free_ = result->next_free();
479  result->Acquire(value);
480  if (isolate_->heap()->InNewSpace(value) &&
481  !result->is_in_new_space_list()) {
482  new_space_nodes_.Add(result);
483  result->set_in_new_space_list(true);
484  }
485  return result->handle();
486 }
487 
488 
490  ASSERT(location != NULL);
491  return Node::FromLocation(location)->GetGlobalHandles()->Create(*location);
492 }
493 
494 
495 void GlobalHandles::Destroy(Object** location) {
496  if (location != NULL) Node::FromLocation(location)->Release();
497 }
498 
499 
501  void* parameter,
502  WeakCallback weak_callback) {
503  Node::FromLocation(location)->MakeWeak(parameter, weak_callback);
504 }
505 
506 
508  return Node::FromLocation(location)->ClearWeakness();
509 }
510 
511 
513  Node::FromLocation(location)->MarkIndependent();
514 }
515 
516 
519 }
520 
521 
523  return Node::FromLocation(location)->is_independent();
524 }
525 
526 
528  return Node::FromLocation(location)->IsNearDeath();
529 }
530 
531 
532 bool GlobalHandles::IsWeak(Object** location) {
533  return Node::FromLocation(location)->IsWeak();
534 }
535 
536 
537 void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) {
538  for (NodeIterator it(this); !it.done(); it.Advance()) {
539  if (it.node()->IsWeakRetainer()) v->VisitPointer(it.node()->location());
540  }
541 }
542 
543 
545  for (NodeIterator it(this); !it.done(); it.Advance()) {
546  if (it.node()->IsWeak() && f(it.node()->location())) {
547  it.node()->MarkPending();
548  }
549  }
550 }
551 
552 
554  for (int i = 0; i < new_space_nodes_.length(); ++i) {
555  Node* node = new_space_nodes_[i];
556  if (node->IsStrongRetainer() ||
557  (node->IsWeakRetainer() && !node->is_independent() &&
558  !node->is_partially_dependent())) {
559  v->VisitPointer(node->location());
560  }
561  }
562 }
563 
564 
567  for (int i = 0; i < new_space_nodes_.length(); ++i) {
568  Node* node = new_space_nodes_[i];
569  ASSERT(node->is_in_new_space_list());
570  if ((node->is_independent() || node->is_partially_dependent()) &&
571  node->IsWeak() && f(isolate_->heap(), node->location())) {
572  node->MarkPending();
573  }
574  }
575 }
576 
577 
579  for (int i = 0; i < new_space_nodes_.length(); ++i) {
580  Node* node = new_space_nodes_[i];
581  ASSERT(node->is_in_new_space_list());
582  if ((node->is_independent() || node->is_partially_dependent()) &&
583  node->IsWeakRetainer()) {
584  v->VisitPointer(node->location());
585  }
586  }
587 }
588 
589 
591  WeakSlotCallbackWithHeap can_skip) {
592  ComputeObjectGroupsAndImplicitReferences();
593  int last = 0;
594  bool any_group_was_visited = false;
595  for (int i = 0; i < object_groups_.length(); i++) {
596  ObjectGroup* entry = object_groups_.at(i);
597  ASSERT(entry != NULL);
598 
599  Object*** objects = entry->objects;
600  bool group_should_be_visited = false;
601  for (size_t j = 0; j < entry->length; j++) {
602  Object* object = *objects[j];
603  if (object->IsHeapObject()) {
604  if (!can_skip(isolate_->heap(), &object)) {
605  group_should_be_visited = true;
606  break;
607  }
608  }
609  }
610 
611  if (!group_should_be_visited) {
612  object_groups_[last++] = entry;
613  continue;
614  }
615 
616  // An object in the group requires visiting, so iterate over all
617  // objects in the group.
618  for (size_t j = 0; j < entry->length; ++j) {
619  Object* object = *objects[j];
620  if (object->IsHeapObject()) {
621  v->VisitPointer(&object);
622  any_group_was_visited = true;
623  }
624  }
625 
626  // Once the entire group has been iterated over, set the object
627  // group to NULL so it won't be processed again.
628  delete entry;
629  object_groups_.at(i) = NULL;
630  }
631  object_groups_.Rewind(last);
632  return any_group_was_visited;
633 }
634 
635 
637  GarbageCollector collector, GCTracer* tracer) {
638  // Process weak global handle callbacks. This must be done after the
639  // GC is completely done, because the callbacks may invoke arbitrary
640  // API functions.
641  ASSERT(isolate_->heap()->gc_state() == Heap::NOT_IN_GC);
642  const int initial_post_gc_processing_count = ++post_gc_processing_count_;
643  bool next_gc_likely_to_collect_more = false;
644  if (collector == SCAVENGER) {
645  for (int i = 0; i < new_space_nodes_.length(); ++i) {
646  Node* node = new_space_nodes_[i];
647  ASSERT(node->is_in_new_space_list());
648  if (!node->IsRetainer()) {
649  // Free nodes do not have weak callbacks. Do not use them to compute
650  // the next_gc_likely_to_collect_more.
651  continue;
652  }
653  // Skip dependent handles. Their weak callbacks might expect to be
654  // called between two global garbage collection callbacks which
655  // are not called for minor collections.
656  if (!node->is_independent() && !node->is_partially_dependent()) {
657  continue;
658  }
660  if (node->PostGarbageCollectionProcessing(isolate_)) {
661  if (initial_post_gc_processing_count != post_gc_processing_count_) {
662  // Weak callback triggered another GC and another round of
663  // PostGarbageCollection processing. The current node might
664  // have been deleted in that round, so we need to bail out (or
665  // restart the processing).
666  return next_gc_likely_to_collect_more;
667  }
668  }
669  if (!node->IsRetainer()) {
670  next_gc_likely_to_collect_more = true;
671  }
672  }
673  } else {
674  for (NodeIterator it(this); !it.done(); it.Advance()) {
675  if (!it.node()->IsRetainer()) {
676  // Free nodes do not have weak callbacks. Do not use them to compute
677  // the next_gc_likely_to_collect_more.
678  continue;
679  }
680  it.node()->clear_partially_dependent();
681  if (it.node()->PostGarbageCollectionProcessing(isolate_)) {
682  if (initial_post_gc_processing_count != post_gc_processing_count_) {
683  // See the comment above.
684  return next_gc_likely_to_collect_more;
685  }
686  }
687  if (!it.node()->IsRetainer()) {
688  next_gc_likely_to_collect_more = true;
689  }
690  }
691  }
692  // Update the list of new space nodes.
693  int last = 0;
694  for (int i = 0; i < new_space_nodes_.length(); ++i) {
695  Node* node = new_space_nodes_[i];
696  ASSERT(node->is_in_new_space_list());
697  if (node->IsRetainer()) {
698  if (isolate_->heap()->InNewSpace(node->object())) {
699  new_space_nodes_[last++] = node;
700  tracer->increment_nodes_copied_in_new_space();
701  } else {
702  node->set_in_new_space_list(false);
703  tracer->increment_nodes_promoted();
704  }
705  } else {
706  node->set_in_new_space_list(false);
707  tracer->increment_nodes_died_in_new_space();
708  }
709  }
710  new_space_nodes_.Rewind(last);
711  return next_gc_likely_to_collect_more;
712 }
713 
714 
715 void GlobalHandles::IterateStrongRoots(ObjectVisitor* v) {
716  for (NodeIterator it(this); !it.done(); it.Advance()) {
717  if (it.node()->IsStrongRetainer()) {
718  v->VisitPointer(it.node()->location());
719  }
720  }
721 }
722 
723 
724 void GlobalHandles::IterateAllRoots(ObjectVisitor* v) {
725  for (NodeIterator it(this); !it.done(); it.Advance()) {
726  if (it.node()->IsRetainer()) {
727  v->VisitPointer(it.node()->location());
728  }
729  }
730 }
731 
732 
734  for (NodeIterator it(this); !it.done(); it.Advance()) {
735  if (it.node()->IsRetainer() && it.node()->has_wrapper_class_id()) {
736  v->VisitEmbedderReference(it.node()->location(),
737  it.node()->wrapper_class_id());
738  }
739  }
740 }
741 
742 
744  for (int i = 0; i < new_space_nodes_.length(); ++i) {
745  Node* node = new_space_nodes_[i];
746  if (node->IsRetainer() && node->has_wrapper_class_id()) {
747  v->VisitEmbedderReference(node->location(),
748  node->wrapper_class_id());
749  }
750  }
751 }
752 
753 
755  int count = 0;
756  for (NodeIterator it(this); !it.done(); it.Advance()) {
757  if (it.node()->IsWeakRetainer()) {
758  count++;
759  }
760  }
761  return count;
762 }
763 
764 
766  int count = 0;
767  for (NodeIterator it(this); !it.done(); it.Advance()) {
768  if (it.node()->IsWeakRetainer() &&
769  it.node()->object()->IsJSGlobalObject()) {
770  count++;
771  }
772  }
773  return count;
774 }
775 
776 
778  *stats->global_handle_count = 0;
779  *stats->weak_global_handle_count = 0;
780  *stats->pending_global_handle_count = 0;
781  *stats->near_death_global_handle_count = 0;
782  *stats->free_global_handle_count = 0;
783  for (NodeIterator it(this); !it.done(); it.Advance()) {
784  *stats->global_handle_count += 1;
785  if (it.node()->state() == Node::WEAK) {
786  *stats->weak_global_handle_count += 1;
787  } else if (it.node()->state() == Node::PENDING) {
788  *stats->pending_global_handle_count += 1;
789  } else if (it.node()->state() == Node::NEAR_DEATH) {
790  *stats->near_death_global_handle_count += 1;
791  } else if (it.node()->state() == Node::FREE) {
792  *stats->free_global_handle_count += 1;
793  }
794  }
795 }
796 
797 #ifdef DEBUG
798 
799 void GlobalHandles::PrintStats() {
800  int total = 0;
801  int weak = 0;
802  int pending = 0;
803  int near_death = 0;
804  int destroyed = 0;
805 
806  for (NodeIterator it(this); !it.done(); it.Advance()) {
807  total++;
808  if (it.node()->state() == Node::WEAK) weak++;
809  if (it.node()->state() == Node::PENDING) pending++;
810  if (it.node()->state() == Node::NEAR_DEATH) near_death++;
811  if (it.node()->state() == Node::FREE) destroyed++;
812  }
813 
814  PrintF("Global Handle Statistics:\n");
815  PrintF(" allocated memory = %" V8_PTR_PREFIX "dB\n", sizeof(Node) * total);
816  PrintF(" # weak = %d\n", weak);
817  PrintF(" # pending = %d\n", pending);
818  PrintF(" # near_death = %d\n", near_death);
819  PrintF(" # free = %d\n", destroyed);
820  PrintF(" # total = %d\n", total);
821 }
822 
823 
824 void GlobalHandles::Print() {
825  PrintF("Global handles:\n");
826  for (NodeIterator it(this); !it.done(); it.Advance()) {
827  PrintF(" handle %p to %p%s\n",
828  reinterpret_cast<void*>(it.node()->location()),
829  reinterpret_cast<void*>(it.node()->object()),
830  it.node()->IsWeak() ? " (weak)" : "");
831  }
832 }
833 
834 #endif
835 
836 
837 
839  size_t length,
841 #ifdef DEBUG
842  for (size_t i = 0; i < length; ++i) {
843  ASSERT(!Node::FromLocation(handles[i])->is_independent());
844  }
845 #endif
846  if (length == 0) {
847  if (info != NULL) info->Dispose();
848  return;
849  }
850  ObjectGroup* group = new ObjectGroup(length);
851  for (size_t i = 0; i < length; ++i)
852  group->objects[i] = handles[i];
853  group->info = info;
854  object_groups_.Add(group);
855 }
856 
857 
859  UniqueId id) {
860  object_group_connections_.Add(ObjectGroupConnection(id, handle));
861 }
862 
863 
866  retainer_infos_.Add(ObjectGroupRetainerInfo(id, info));
867 }
868 
869 
871  Object*** children,
872  size_t length) {
873 #ifdef DEBUG
874  ASSERT(!Node::FromLocation(BitCast<Object**>(parent))->is_independent());
875  for (size_t i = 0; i < length; ++i) {
876  ASSERT(!Node::FromLocation(children[i])->is_independent());
877  }
878 #endif
879  if (length == 0) return;
880  ImplicitRefGroup* group = new ImplicitRefGroup(parent, length);
881  for (size_t i = 0; i < length; ++i)
882  group->children[i] = children[i];
883  implicit_ref_groups_.Add(group);
884 }
885 
886 
888  ASSERT(!Node::FromLocation(child)->is_independent());
889  implicit_ref_connections_.Add(ObjectGroupConnection(id, child));
890 }
891 
892 
894  ASSERT(!Node::FromLocation(child)->is_independent());
895  ImplicitRefGroup* group = new ImplicitRefGroup(parent, 1);
896  group->children[0] = child;
897  implicit_ref_groups_.Add(group);
898 }
899 
900 
902  for (int i = 0; i < object_groups_.length(); i++)
903  delete object_groups_.at(i);
904  object_groups_.Clear();
905  for (int i = 0; i < retainer_infos_.length(); ++i)
906  retainer_infos_[i].info->Dispose();
907  retainer_infos_.Clear();
908  object_group_connections_.Clear();
909  object_group_connections_.Initialize(kObjectGroupConnectionsCapacity);
910 }
911 
912 
914  for (int i = 0; i < implicit_ref_groups_.length(); i++) {
915  delete implicit_ref_groups_.at(i);
916  }
917  implicit_ref_groups_.Clear();
918  implicit_ref_connections_.Clear();
919 }
920 
921 
923  // TODO(1428): invoke weak callbacks.
924 }
925 
926 
927 void GlobalHandles::ComputeObjectGroupsAndImplicitReferences() {
928  if (object_group_connections_.length() == 0) {
929  for (int i = 0; i < retainer_infos_.length(); ++i)
930  retainer_infos_[i].info->Dispose();
931  retainer_infos_.Clear();
932  implicit_ref_connections_.Clear();
933  return;
934  }
935 
936  object_group_connections_.Sort();
937  retainer_infos_.Sort();
938  implicit_ref_connections_.Sort();
939 
940  int info_index = 0; // For iterating retainer_infos_.
941  UniqueId current_group_id(0);
942  int current_group_start = 0;
943 
944  int current_implicit_refs_start = 0;
945  int current_implicit_refs_end = 0;
946  for (int i = 0; i <= object_group_connections_.length(); ++i) {
947  if (i == 0)
948  current_group_id = object_group_connections_[i].id;
949  if (i == object_group_connections_.length() ||
950  current_group_id != object_group_connections_[i].id) {
951  // Group detected: objects in indices [current_group_start, i[.
952 
953  // Find out which implicit references are related to this group. (We want
954  // to ignore object groups which only have 1 object, but that object is
955  // needed as a representative object for the implicit refrerence group.)
956  while (current_implicit_refs_start < implicit_ref_connections_.length() &&
957  implicit_ref_connections_[current_implicit_refs_start].id <
958  current_group_id)
959  ++current_implicit_refs_start;
960  current_implicit_refs_end = current_implicit_refs_start;
961  while (current_implicit_refs_end < implicit_ref_connections_.length() &&
962  implicit_ref_connections_[current_implicit_refs_end].id ==
963  current_group_id)
964  ++current_implicit_refs_end;
965 
966  if (current_implicit_refs_end > current_implicit_refs_start) {
967  // Find a representative object for the implicit references.
968  HeapObject** representative = NULL;
969  for (int j = current_group_start; j < i; ++j) {
970  Object** object = object_group_connections_[j].object;
971  if ((*object)->IsHeapObject()) {
972  representative = reinterpret_cast<HeapObject**>(object);
973  break;
974  }
975  }
976  if (representative) {
977  ImplicitRefGroup* group = new ImplicitRefGroup(
978  representative,
979  current_implicit_refs_end - current_implicit_refs_start);
980  for (int j = current_implicit_refs_start;
981  j < current_implicit_refs_end;
982  ++j) {
983  group->children[j - current_implicit_refs_start] =
984  implicit_ref_connections_[j].object;
985  }
986  implicit_ref_groups_.Add(group);
987  }
988  current_implicit_refs_start = current_implicit_refs_end;
989  }
990 
991  // Find a RetainedObjectInfo for the group.
992  RetainedObjectInfo* info = NULL;
993  while (info_index < retainer_infos_.length() &&
994  retainer_infos_[info_index].id < current_group_id) {
995  retainer_infos_[info_index].info->Dispose();
996  ++info_index;
997  }
998  if (info_index < retainer_infos_.length() &&
999  retainer_infos_[info_index].id == current_group_id) {
1000  // This object group has an associated ObjectGroupRetainerInfo.
1001  info = retainer_infos_[info_index].info;
1002  ++info_index;
1003  }
1004 
1005  // Ignore groups which only contain one object.
1006  if (i > current_group_start + 1) {
1007  ObjectGroup* group = new ObjectGroup(i - current_group_start);
1008  for (int j = current_group_start; j < i; ++j) {
1009  group->objects[j - current_group_start] =
1010  object_group_connections_[j].object;
1011  }
1012  group->info = info;
1013  object_groups_.Add(group);
1014  } else if (info) {
1015  info->Dispose();
1016  }
1017 
1018  if (i < object_group_connections_.length()) {
1019  current_group_id = object_group_connections_[i].id;
1020  current_group_start = i;
1021  }
1022  }
1023  }
1024  object_group_connections_.Clear();
1025  object_group_connections_.Initialize(kObjectGroupConnectionsCapacity);
1026  retainer_infos_.Clear();
1027  implicit_ref_connections_.Clear();
1028 }
1029 
1030 
1032  for (unsigned i = 0; i < ARRAY_SIZE(singleton_handles_); i++) {
1033  singleton_handles_[i] = kInvalidIndex;
1034  }
1035 }
1036 
1037 
1039  for (int i = 0; i < blocks_.length(); i++) delete[] blocks_[i];
1040 }
1041 
1042 
1043 void EternalHandles::IterateAllRoots(ObjectVisitor* visitor) {
1044  int limit = size_;
1045  for (int i = 0; i < blocks_.length(); i++) {
1046  ASSERT(limit > 0);
1047  Object** block = blocks_[i];
1048  visitor->VisitPointers(block, block + Min(limit, kSize));
1049  limit -= kSize;
1050  }
1051 }
1052 
1053 
1054 void EternalHandles::IterateNewSpaceRoots(ObjectVisitor* visitor) {
1055  for (int i = 0; i < new_space_indices_.length(); i++) {
1056  visitor->VisitPointer(GetLocation(new_space_indices_[i]));
1057  }
1058 }
1059 
1060 
1062  int last = 0;
1063  for (int i = 0; i < new_space_indices_.length(); i++) {
1064  int index = new_space_indices_[i];
1065  if (heap->InNewSpace(*GetLocation(index))) {
1066  new_space_indices_[last++] = index;
1067  }
1068  }
1069  new_space_indices_.Rewind(last);
1070 }
1071 
1072 
1073 void EternalHandles::Create(Isolate* isolate, Object* object, int* index) {
1074  ASSERT_EQ(kInvalidIndex, *index);
1075  if (object == NULL) return;
1076  ASSERT_NE(isolate->heap()->the_hole_value(), object);
1077  int block = size_ >> kShift;
1078  int offset = size_ & kMask;
1079  // need to resize
1080  if (offset == 0) {
1081  Object** next_block = new Object*[kSize];
1082  Object* the_hole = isolate->heap()->the_hole_value();
1083  MemsetPointer(next_block, the_hole, kSize);
1084  blocks_.Add(next_block);
1085  }
1086  ASSERT_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]);
1087  blocks_[block][offset] = object;
1088  if (isolate->heap()->InNewSpace(object)) {
1089  new_space_indices_.Add(size_);
1090  }
1091  *index = size_++;
1092 }
1093 
1094 
1095 } } // namespace v8::internal
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
v8::RetainedObjectInfo * info
bool(* WeakSlotCallbackWithHeap)(Heap *heap, Object **pointer)
Definition: v8globals.h:171
static void Destroy(Object **location)
void PrintF(const char *format,...)
Definition: v8utils.cc:40
bool InNewSpace(Object *object)
Definition: heap-inl.h:307
void IdentifyNewSpaceWeakIndependentHandles(WeakSlotCallbackWithHeap f)
void MakeWeak(void *parameter, WeakCallback weak_callback)
bool PostGarbageCollectionProcessing(Isolate *isolate)
void PostGarbageCollectionProcessing(Heap *heap)
void AddImplicitReferences(HeapObject **parent, Object ***children, size_t length)
static ExternalTwoByteString * cast(Object *obj)
static bool IsNearDeath(Object **location)
void SetReferenceFromGroup(UniqueId id, Object **child)
void IterateWeakRoots(ObjectVisitor *v)
#define ASSERT(condition)
Definition: checks.h:329
void Create(Isolate *isolate, Object *object, int *index)
static Node * FromLocation(Object **location)
unsigned short uint16_t
Definition: unicode.cc:46
void IterateStrongRoots(ObjectVisitor *v)
#define CHECK(condition)
Definition: checks.h:75
static ExternalAsciiString * cast(Object *obj)
void AddObjectGroup(Object ***handles, size_t length, v8::RetainedObjectInfo *info)
int * pending_global_handle_count
Definition: heap.h:2577
static const int kNodeStateIsNearDeathValue
Definition: v8.h:5581
int * near_death_global_handle_count
Definition: heap.h:2578
static const int kNodeFlagsOffset
Definition: v8.h:5577
static const int kNodeStateIsPendingValue
Definition: v8.h:5580
void PutNodesOnFreeList(Node **first_free)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kNodeStateIsWeakValue
Definition: v8.h:5579
#define OFFSET_OF(type, field)
Definition: globals.h:325
static const uint16_t kPersistentHandleNoClassId
Definition: v8-profiler.h:514
NodeIterator(GlobalHandles *global_handles)
Handle< Object > Create(Object *value)
int * global_handle_count
Definition: heap.h:2575
static bool IsIndependent(Object **location)
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition: globals.h:359
static void * ClearWeakness(Object **location)
static Handle< Object > CopyGlobal(Object **location)
HeapState gc_state()
Definition: heap.h:1508
void IterateAllRoots(ObjectVisitor *v)
#define V8_PTR_PREFIX
Definition: globals.h:220
void SetReference(HeapObject **parent, Object **child)
void set_parameter(void *parameter)
int * free_global_handle_count
Definition: heap.h:2579
void IterateAllRootsWithClassIds(ObjectVisitor *v)
void IterateNewSpaceRoots(ObjectVisitor *visitor)
static Local< Context > ToLocal(v8::internal::Handle< v8::internal::Context > obj)
void IterateNewSpaceStrongAndDependentRoots(ObjectVisitor *v)
bool IterateObjectGroups(ObjectVisitor *v, WeakSlotCallbackWithHeap can_skip)
static const int kNodeIsIndependentShift
Definition: v8.h:5582
void SetRetainedObjectInfo(UniqueId id, RetainedObjectInfo *info)
bool PostGarbageCollectionProcessing(GarbageCollector collector, GCTracer *tracer)
static const int kNodeIsPartiallyDependentShift
Definition: v8.h:5583
int * weak_global_handle_count
Definition: heap.h:2576
virtual void Dispose()=0
Handle< T > handle(T *t, Isolate *isolate)
Definition: handles.h:103
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
NodeBlock(GlobalHandles *global_handles, NodeBlock *next)
void IdentifyWeakHandles(WeakSlotCallback f)
void RecordStats(HeapStats *stats)
static void MakeWeak(Object **location, void *parameter, WeakCallback weak_callback)
static void MarkPartiallyDependent(Object **location)
void MemsetPointer(T **dest, U *value, int counter)
Definition: v8utils.h:198
const Address kGlobalHandleZapValue
Definition: v8globals.h:84
void SetObjectGroupId(Object **handle, UniqueId id)
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
void IterateAllRoots(ObjectVisitor *visitor)
Counters * counters()
Definition: isolate.h:859
#define ASSERT_NE(v1, v2)
Definition: checks.h:331
static void MarkIndependent(Object **location)
static bool IsWeak(Object **location)
void Print(const v8::FunctionCallbackInfo< v8::Value > &args)
void IterateAllRootsInNewSpaceWithClassIds(ObjectVisitor *v)
void IterateNewSpaceWeakIndependentRoots(ObjectVisitor *v)
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:39
static const int kNodeStateMask
Definition: v8.h:5578
WeakCallbackData< v8::Value, void >::Callback WeakCallback
T Min(T a, T b)
Definition: utils.h:234
#define ARRAY_SIZE(a)
Definition: globals.h:333
bool(* WeakSlotCallback)(Object **pointer)
Definition: v8globals.h:169
static const int kNodeClassIdOffset
Definition: v8.h:5576
void Initialize(int index, Node **first_free)