v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
objects-visiting-inl.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_OBJECTS_VISITING_INL_H_
29 #define V8_OBJECTS_VISITING_INL_H_
30 
31 
32 namespace v8 {
33 namespace internal {
34 
35 template<typename StaticVisitor>
37  table_.Register(kVisitShortcutCandidate,
38  &FixedBodyVisitor<StaticVisitor,
40  int>::Visit);
41 
42  table_.Register(kVisitConsString,
43  &FixedBodyVisitor<StaticVisitor,
45  int>::Visit);
46 
47  table_.Register(kVisitSlicedString,
48  &FixedBodyVisitor<StaticVisitor,
50  int>::Visit);
51 
52  table_.Register(kVisitSymbol,
53  &FixedBodyVisitor<StaticVisitor,
55  int>::Visit);
56 
57  table_.Register(kVisitFixedArray,
58  &FlexibleBodyVisitor<StaticVisitor,
60  int>::Visit);
61 
62  table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
63  table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
64  table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
65 
66  table_.Register(kVisitNativeContext,
67  &FixedBodyVisitor<StaticVisitor,
69  int>::Visit);
70 
71  table_.Register(kVisitByteArray, &VisitByteArray);
72 
73  table_.Register(kVisitSharedFunctionInfo,
74  &FixedBodyVisitor<StaticVisitor,
76  int>::Visit);
77 
78  table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
79 
80  table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
81 
82  table_.Register(kVisitJSFunction, &VisitJSFunction);
83 
84  table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
85 
86  table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
87 
88  table_.Register(kVisitJSDataView, &VisitJSDataView);
89 
90  table_.Register(kVisitFreeSpace, &VisitFreeSpace);
91 
92  table_.Register(kVisitJSWeakMap, &JSObjectVisitor::Visit);
93 
94  table_.Register(kVisitJSWeakSet, &JSObjectVisitor::Visit);
95 
96  table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
97 
98  table_.template RegisterSpecializations<DataObjectVisitor,
99  kVisitDataObject,
100  kVisitDataObjectGeneric>();
101 
102  table_.template RegisterSpecializations<JSObjectVisitor,
103  kVisitJSObject,
104  kVisitJSObjectGeneric>();
105  table_.template RegisterSpecializations<StructVisitor,
106  kVisitStruct,
107  kVisitStructGeneric>();
108 }
109 
110 
111 template<typename StaticVisitor>
113  Map* map, HeapObject* object) {
114  Heap* heap = map->GetHeap();
115 
119  VisitPointers(
120  heap,
123  VisitPointers(
124  heap,
125  HeapObject::RawField(object,
129 }
130 
131 
132 template<typename StaticVisitor>
133 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
134  Map* map, HeapObject* object) {
135  VisitPointers(
136  map->GetHeap(),
139  VisitPointers(
140  map->GetHeap(),
141  HeapObject::RawField(object,
145 }
146 
147 
148 template<typename StaticVisitor>
149 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(
150  Map* map, HeapObject* object) {
151  VisitPointers(
152  map->GetHeap(),
155  VisitPointers(
156  map->GetHeap(),
157  HeapObject::RawField(object,
161 }
162 
163 
164 template<typename StaticVisitor>
166  table_.Register(kVisitShortcutCandidate,
167  &FixedBodyVisitor<StaticVisitor,
169  void>::Visit);
170 
171  table_.Register(kVisitConsString,
172  &FixedBodyVisitor<StaticVisitor,
174  void>::Visit);
175 
176  table_.Register(kVisitSlicedString,
177  &FixedBodyVisitor<StaticVisitor,
179  void>::Visit);
180 
181  table_.Register(kVisitSymbol,
182  &FixedBodyVisitor<StaticVisitor,
184  void>::Visit);
185 
186  table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
187 
188  table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
189 
190  table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit);
191 
192  table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
193 
194  table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray);
195 
196  table_.Register(kVisitNativeContext, &VisitNativeContext);
197 
198  table_.Register(kVisitAllocationSite, &VisitAllocationSite);
199 
200  table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
201 
202  table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
203 
204  table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
205 
206  table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
207 
208  table_.Register(kVisitJSWeakMap, &StaticVisitor::VisitWeakCollection);
209 
210  table_.Register(kVisitJSWeakSet, &StaticVisitor::VisitWeakCollection);
211 
212  table_.Register(kVisitOddball,
213  &FixedBodyVisitor<StaticVisitor,
215  void>::Visit);
216 
217  table_.Register(kVisitMap, &VisitMap);
218 
219  table_.Register(kVisitCode, &VisitCode);
220 
221  table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
222 
223  table_.Register(kVisitJSFunction, &VisitJSFunction);
224 
225  table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
226 
227  table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
228 
229  table_.Register(kVisitJSDataView, &VisitJSDataView);
230 
231  // Registration for kVisitJSRegExp is done by StaticVisitor.
232 
233  table_.Register(kVisitCell,
234  &FixedBodyVisitor<StaticVisitor,
236  void>::Visit);
237 
238  table_.Register(kVisitPropertyCell, &VisitPropertyCell);
239 
240  table_.template RegisterSpecializations<DataObjectVisitor,
241  kVisitDataObject,
242  kVisitDataObjectGeneric>();
243 
244  table_.template RegisterSpecializations<JSObjectVisitor,
245  kVisitJSObject,
246  kVisitJSObjectGeneric>();
247 
248  table_.template RegisterSpecializations<StructObjectVisitor,
249  kVisitStruct,
250  kVisitStructGeneric>();
251 }
252 
253 
254 template<typename StaticVisitor>
256  Heap* heap, Address entry_address) {
258  heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
259  StaticVisitor::MarkObject(heap, code);
260 }
261 
262 
263 template<typename StaticVisitor>
264 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
265  Heap* heap, RelocInfo* rinfo) {
266  ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
267  ASSERT(!rinfo->target_object()->IsConsString());
268  HeapObject* object = HeapObject::cast(rinfo->target_object());
269  heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
270  // TODO(ulan): It could be better to record slots only for strongly embedded
271  // objects here and record slots for weakly embedded object during clearing
272  // of non-live references in mark-compact.
273  if (!rinfo->host()->IsWeakObject(object)) {
274  StaticVisitor::MarkObject(heap, object);
275  }
276 }
277 
278 
279 template<typename StaticVisitor>
280 void StaticMarkingVisitor<StaticVisitor>::VisitCell(
281  Heap* heap, RelocInfo* rinfo) {
282  ASSERT(rinfo->rmode() == RelocInfo::CELL);
283  Cell* cell = rinfo->target_cell();
284  // No need to record slots because the cell space is not compacted during GC.
285  if (!rinfo->host()->IsWeakObject(cell)) {
286  StaticVisitor::MarkObject(heap, cell);
287  }
288 }
289 
290 
291 template<typename StaticVisitor>
292 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(
293  Heap* heap, RelocInfo* rinfo) {
294  ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
295  rinfo->IsPatchedReturnSequence()) ||
296  (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
297  rinfo->IsPatchedDebugBreakSlotSequence()));
298  Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
299  heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
300  StaticVisitor::MarkObject(heap, target);
301 }
302 
303 
304 template<typename StaticVisitor>
305 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(
306  Heap* heap, RelocInfo* rinfo) {
307  ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
308  Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
309  // Monomorphic ICs are preserved when possible, but need to be flushed
310  // when they might be keeping a Context alive, or when the heap is about
311  // to be serialized.
312  if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
313  && (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC ||
314  target->ic_state() == POLYMORPHIC || heap->flush_monomorphic_ics() ||
315  Serializer::enabled() || target->ic_age() != heap->global_ic_age())) {
316  IC::Clear(target->GetIsolate(), rinfo->pc(),
317  rinfo->host()->constant_pool());
318  target = Code::GetCodeFromTargetAddress(rinfo->target_address());
319  }
320  heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
321  StaticVisitor::MarkObject(heap, target);
322 }
323 
324 
325 template<typename StaticVisitor>
326 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
327  Heap* heap, RelocInfo* rinfo) {
328  ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
329  Code* target = rinfo->code_age_stub();
330  ASSERT(target != NULL);
331  heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
332  StaticVisitor::MarkObject(heap, target);
333 }
334 
335 
336 template<typename StaticVisitor>
337 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
338  Map* map, HeapObject* object) {
339  FixedBodyVisitor<StaticVisitor,
341  void>::Visit(map, object);
342 
343  MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
344  for (int idx = Context::FIRST_WEAK_SLOT;
346  ++idx) {
347  Object** slot = Context::cast(object)->RawFieldOfElementAt(idx);
348  collector->RecordSlot(slot, slot, *slot);
349  }
350 }
351 
352 
353 template<typename StaticVisitor>
354 void StaticMarkingVisitor<StaticVisitor>::VisitMap(
355  Map* map, HeapObject* object) {
356  Heap* heap = map->GetHeap();
357  Map* map_object = Map::cast(object);
358 
359  // Clears the cache of ICs related to this map.
360  if (FLAG_cleanup_code_caches_at_gc) {
361  map_object->ClearCodeCache(heap);
362  }
363 
364  // When map collection is enabled we have to mark through map's transitions
365  // and back pointers in a special way to make these links weak.
366  if (FLAG_collect_maps && map_object->CanTransition()) {
367  MarkMapContents(heap, map_object);
368  } else {
369  StaticVisitor::VisitPointers(heap,
372  }
373 }
374 
375 
376 template<typename StaticVisitor>
377 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
378  Map* map, HeapObject* object) {
379  Heap* heap = map->GetHeap();
380 
381  Object** slot =
383  if (FLAG_collect_maps) {
384  // Mark property cell dependent codes array but do not push it onto marking
385  // stack, this will make references from it weak. We will clean dead
386  // codes when we iterate over property cells in ClearNonLiveReferences.
387  HeapObject* obj = HeapObject::cast(*slot);
388  heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
389  StaticVisitor::MarkObjectWithoutPush(heap, obj);
390  } else {
391  StaticVisitor::VisitPointer(heap, slot);
392  }
393 
394  StaticVisitor::VisitPointers(heap,
397 }
398 
399 
400 template<typename StaticVisitor>
401 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
402  Map* map, HeapObject* object) {
403  Heap* heap = map->GetHeap();
404 
405  Object** slot =
407  if (FLAG_collect_maps) {
408  // Mark allocation site dependent codes array but do not push it onto
409  // marking stack, this will make references from it weak. We will clean
410  // dead codes when we iterate over allocation sites in
411  // ClearNonLiveReferences.
412  HeapObject* obj = HeapObject::cast(*slot);
413  heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
414  StaticVisitor::MarkObjectWithoutPush(heap, obj);
415  } else {
416  StaticVisitor::VisitPointer(heap, slot);
417  }
418 
419  StaticVisitor::VisitPointers(heap,
422 }
423 
424 
425 template<typename StaticVisitor>
426 void StaticMarkingVisitor<StaticVisitor>::VisitCode(
427  Map* map, HeapObject* object) {
428  Heap* heap = map->GetHeap();
429  Code* code = Code::cast(object);
430  if (FLAG_cleanup_code_caches_at_gc) {
431  code->ClearTypeFeedbackInfo(heap);
432  }
433  if (FLAG_age_code && !Serializer::enabled()) {
434  code->MakeOlder(heap->mark_compact_collector()->marking_parity());
435  }
436  code->CodeIterateBody<StaticVisitor>(heap);
437 }
438 
439 
440 template<typename StaticVisitor>
441 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
442  Map* map, HeapObject* object) {
443  Heap* heap = map->GetHeap();
444  SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
445  if (shared->ic_age() != heap->global_ic_age()) {
446  shared->ResetForNewContext(heap->global_ic_age());
447  }
448  if (FLAG_cache_optimized_code &&
449  FLAG_flush_optimized_code_cache &&
450  !shared->optimized_code_map()->IsSmi()) {
451  // Always flush the optimized code map if requested by flag.
452  shared->ClearOptimizedCodeMap();
453  }
454  MarkCompactCollector* collector = heap->mark_compact_collector();
455  if (collector->is_code_flushing_enabled()) {
456  if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
457  // Add the shared function info holding an optimized code map to
458  // the code flusher for processing of code maps after marking.
459  collector->code_flusher()->AddOptimizedCodeMap(shared);
460  // Treat all references within the code map weakly by marking the
461  // code map itself but not pushing it onto the marking deque.
462  FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
463  StaticVisitor::MarkObjectWithoutPush(heap, code_map);
464  }
465  if (IsFlushable(heap, shared)) {
466  // This function's code looks flushable. But we have to postpone
467  // the decision until we see all functions that point to the same
468  // SharedFunctionInfo because some of them might be optimized.
469  // That would also make the non-optimized version of the code
470  // non-flushable, because it is required for bailing out from
471  // optimized code.
472  collector->code_flusher()->AddCandidate(shared);
473  // Treat the reference to the code object weakly.
474  VisitSharedFunctionInfoWeakCode(heap, object);
475  return;
476  }
477  } else {
478  if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
479  // Flush optimized code map on major GCs without code flushing,
480  // needed because cached code doesn't contain breakpoints.
481  shared->ClearOptimizedCodeMap();
482  }
483  }
484  VisitSharedFunctionInfoStrongCode(heap, object);
485 }
486 
487 
488 template<typename StaticVisitor>
489 void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
490  Map* map, HeapObject* object) {
491  Heap* heap = map->GetHeap();
492  ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object);
493  for (int i = 0; i < constant_pool->count_of_code_ptr_entries(); i++) {
494  int index = constant_pool->first_code_ptr_index() + i;
495  Address code_entry =
496  reinterpret_cast<Address>(constant_pool->RawFieldOfElementAt(index));
497  StaticVisitor::VisitCodeEntry(heap, code_entry);
498  }
499  for (int i = 0; i < constant_pool->count_of_heap_ptr_entries(); i++) {
500  int index = constant_pool->first_heap_ptr_index() + i;
501  StaticVisitor::VisitPointer(heap,
502  constant_pool->RawFieldOfElementAt(index));
503  }
504 }
505 
506 
507 template<typename StaticVisitor>
508 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(
509  Map* map, HeapObject* object) {
510  Heap* heap = map->GetHeap();
511  JSFunction* function = JSFunction::cast(object);
512  MarkCompactCollector* collector = heap->mark_compact_collector();
513  if (collector->is_code_flushing_enabled()) {
514  if (IsFlushable(heap, function)) {
515  // This function's code looks flushable. But we have to postpone
516  // the decision until we see all functions that point to the same
517  // SharedFunctionInfo because some of them might be optimized.
518  // That would also make the non-optimized version of the code
519  // non-flushable, because it is required for bailing out from
520  // optimized code.
521  collector->code_flusher()->AddCandidate(function);
522  // Visit shared function info immediately to avoid double checking
523  // of its flushability later. This is just an optimization because
524  // the shared function info would eventually be visited.
525  SharedFunctionInfo* shared = function->shared();
526  if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
527  StaticVisitor::MarkObject(heap, shared->map());
528  VisitSharedFunctionInfoWeakCode(heap, shared);
529  }
530  // Treat the reference to the code object weakly.
531  VisitJSFunctionWeakCode(heap, object);
532  return;
533  } else {
534  // Visit all unoptimized code objects to prevent flushing them.
535  StaticVisitor::MarkObject(heap, function->shared()->code());
536  if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
537  MarkInlinedFunctionsCode(heap, function->code());
538  }
539  }
540  }
541  VisitJSFunctionStrongCode(heap, object);
542 }
543 
544 
545 template<typename StaticVisitor>
546 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(
547  Map* map, HeapObject* object) {
548  int last_property_offset =
549  JSRegExp::kSize + kPointerSize * map->inobject_properties();
550  StaticVisitor::VisitPointers(map->GetHeap(),
552  HeapObject::RawField(object, last_property_offset));
553 }
554 
555 
556 template<typename StaticVisitor>
557 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
558  Map* map, HeapObject* object) {
559  Heap* heap = map->GetHeap();
560 
564  StaticVisitor::VisitPointers(
565  heap,
568  StaticVisitor::VisitPointers(
569  heap,
570  HeapObject::RawField(object,
573 }
574 
575 
576 template<typename StaticVisitor>
577 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
578  Map* map, HeapObject* object) {
579  StaticVisitor::VisitPointers(
580  map->GetHeap(),
583  StaticVisitor::VisitPointers(
584  map->GetHeap(),
585  HeapObject::RawField(object,
588 }
589 
590 
591 template<typename StaticVisitor>
592 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(
593  Map* map, HeapObject* object) {
594  StaticVisitor::VisitPointers(
595  map->GetHeap(),
598  StaticVisitor::VisitPointers(
599  map->GetHeap(),
600  HeapObject::RawField(object,
603 }
604 
605 
606 template<typename StaticVisitor>
608  Heap* heap, Map* map) {
609  // Make sure that the back pointer stored either in the map itself or
610  // inside its transitions array is marked. Skip recording the back
611  // pointer slot since map space is not compacted.
612  StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
613 
614  // Treat pointers in the transitions array as weak and also mark that
615  // array to prevent visiting it later. Skip recording the transition
616  // array slot, since it will be implicitly recorded when the pointer
617  // fields of this map are visited.
618  TransitionArray* transitions = map->unchecked_transition_array();
619  if (transitions->IsTransitionArray()) {
620  MarkTransitionArray(heap, transitions);
621  } else {
622  // Already marked by marking map->GetBackPointer() above.
623  ASSERT(transitions->IsMap() || transitions->IsUndefined());
624  }
625 
626  // Since descriptor arrays are potentially shared, ensure that only the
627  // descriptors that belong to this map are marked. The first time a
628  // non-empty descriptor array is marked, its header is also visited. The slot
629  // holding the descriptor array will be implicitly recorded when the pointer
630  // fields of this map are visited.
631  DescriptorArray* descriptors = map->instance_descriptors();
632  if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
633  descriptors->length() > 0) {
634  StaticVisitor::VisitPointers(heap,
635  descriptors->GetFirstElementAddress(),
636  descriptors->GetDescriptorEndSlot(0));
637  }
638  int start = 0;
639  int end = map->NumberOfOwnDescriptors();
640  if (start < end) {
641  StaticVisitor::VisitPointers(heap,
642  descriptors->GetDescriptorStartSlot(start),
643  descriptors->GetDescriptorEndSlot(end));
644  }
645 
646  // Mark prototype dependent codes array but do not push it onto marking
647  // stack, this will make references from it weak. We will clean dead
648  // codes when we iterate over maps in ClearNonLiveTransitions.
650  HeapObject* obj = HeapObject::cast(*slot);
651  heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
652  StaticVisitor::MarkObjectWithoutPush(heap, obj);
653 
654  // Mark the pointer fields of the Map. Since the transitions array has
655  // been marked already, it is fine that one of these fields contains a
656  // pointer to it.
657  StaticVisitor::VisitPointers(heap,
660 }
661 
662 
663 template<typename StaticVisitor>
665  Heap* heap, TransitionArray* transitions) {
666  if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
667 
668  // Simple transitions do not have keys nor prototype transitions.
669  if (transitions->IsSimpleTransition()) return;
670 
671  if (transitions->HasPrototypeTransitions()) {
672  // Mark prototype transitions array but do not push it onto marking
673  // stack, this will make references from it weak. We will clean dead
674  // prototype transitions in ClearNonLiveTransitions.
675  Object** slot = transitions->GetPrototypeTransitionsSlot();
676  HeapObject* obj = HeapObject::cast(*slot);
677  heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
678  StaticVisitor::MarkObjectWithoutPush(heap, obj);
679  }
680 
681  for (int i = 0; i < transitions->number_of_transitions(); ++i) {
682  StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
683  }
684 }
685 
686 
687 template<typename StaticVisitor>
689  Heap* heap, Code* code) {
690  // For optimized functions we should retain both non-optimized version
691  // of its code and non-optimized version of all inlined functions.
692  // This is required to support bailing out from inlined code.
694  DeoptimizationInputData::cast(code->deoptimization_data());
695  FixedArray* literals = data->LiteralArray();
696  for (int i = 0, count = data->InlinedFunctionCount()->value();
697  i < count;
698  i++) {
699  JSFunction* inlined = JSFunction::cast(literals->get(i));
700  StaticVisitor::MarkObject(heap, inlined->shared()->code());
701  }
702 }
703 
704 
705 inline static bool IsValidNonBuiltinContext(Object* context) {
706  return context->IsContext() &&
707  !Context::cast(context)->global_object()->IsJSBuiltinsObject();
708 }
709 
710 
711 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
712  Object* undefined = heap->undefined_value();
713  return (info->script() != undefined) &&
714  (reinterpret_cast<Script*>(info->script())->source() != undefined);
715 }
716 
717 
718 template<typename StaticVisitor>
719 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
720  Heap* heap, JSFunction* function) {
721  SharedFunctionInfo* shared_info = function->shared();
722 
723  // Code is either on stack, in compilation cache or referenced
724  // by optimized version of function.
725  MarkBit code_mark = Marking::MarkBitFrom(function->code());
726  if (code_mark.Get()) {
727  return false;
728  }
729 
730  // The function must have a valid context and not be a builtin.
731  if (!IsValidNonBuiltinContext(function->context())) {
732  return false;
733  }
734 
735  // We do not (yet) flush code for optimized functions.
736  if (function->code() != shared_info->code()) {
737  return false;
738  }
739 
740  // Check age of optimized code.
741  if (FLAG_age_code && !function->code()->IsOld()) {
742  return false;
743  }
744 
745  return IsFlushable(heap, shared_info);
746 }
747 
748 
749 template<typename StaticVisitor>
750 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
751  Heap* heap, SharedFunctionInfo* shared_info) {
752  // Code is either on stack, in compilation cache or referenced
753  // by optimized version of function.
754  MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
755  if (code_mark.Get()) {
756  return false;
757  }
758 
759  // The function must be compiled and have the source code available,
760  // to be able to recompile it in case we need the function again.
761  if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
762  return false;
763  }
764 
765  // We never flush code for API functions.
766  Object* function_data = shared_info->function_data();
767  if (function_data->IsFunctionTemplateInfo()) {
768  return false;
769  }
770 
771  // Only flush code for functions.
772  if (shared_info->code()->kind() != Code::FUNCTION) {
773  return false;
774  }
775 
776  // Function must be lazy compilable.
777  if (!shared_info->allows_lazy_compilation()) {
778  return false;
779  }
780 
781  // We do not (yet?) flush code for generator functions, because we don't know
782  // if there are still live activations (generator objects) on the heap.
783  if (shared_info->is_generator()) {
784  return false;
785  }
786 
787  // If this is a full script wrapped in a function we do not flush the code.
788  if (shared_info->is_toplevel()) {
789  return false;
790  }
791 
792  // If this is a function initialized with %SetCode then the one-to-one
793  // relation between SharedFunctionInfo and Code is broken.
794  if (shared_info->dont_flush()) {
795  return false;
796  }
797 
798  // Check age of code. If code aging is disabled we never flush.
799  if (!FLAG_age_code || !shared_info->code()->IsOld()) {
800  return false;
801  }
802 
803  return true;
804 }
805 
806 
807 template<typename StaticVisitor>
809  Heap* heap, HeapObject* object) {
810  StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
811  Object** start_slot =
812  HeapObject::RawField(object,
814  Object** end_slot =
815  HeapObject::RawField(object,
817  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
818 }
819 
820 
821 template<typename StaticVisitor>
823  Heap* heap, HeapObject* object) {
824  StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
825  Object** name_slot =
827  StaticVisitor::VisitPointer(heap, name_slot);
828 
829  // Skip visiting kCodeOffset as it is treated weakly here.
834 
835  Object** start_slot =
836  HeapObject::RawField(object,
838  Object** end_slot =
839  HeapObject::RawField(object,
841  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
842 }
843 
844 
845 template<typename StaticVisitor>
847  Heap* heap, HeapObject* object) {
848  Object** start_slot =
850  Object** end_slot =
852  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
853 
854  VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
857 
858  start_slot =
860  end_slot =
862  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
863 }
864 
865 
866 template<typename StaticVisitor>
868  Heap* heap, HeapObject* object) {
869  Object** start_slot =
871  Object** end_slot =
873  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
874 
875  // Skip visiting kCodeEntryOffset as it is treated weakly here.
878 
879  start_slot =
881  end_slot =
883  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
884 }
885 
886 
887 void Code::CodeIterateBody(ObjectVisitor* v) {
888  int mode_mask = RelocInfo::kCodeTargetMask |
889  RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
890  RelocInfo::ModeMask(RelocInfo::CELL) |
891  RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
892  RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
893  RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
894  RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
895 
896  // There are two places where we iterate code bodies: here and the
897  // templated CodeIterateBody (below). They should be kept in sync.
904 
905  RelocIterator it(this, mode_mask);
906  Isolate* isolate = this->GetIsolate();
907  for (; !it.done(); it.next()) {
908  it.rinfo()->Visit(isolate, v);
909  }
910 }
911 
912 
913 template<typename StaticVisitor>
915  int mode_mask = RelocInfo::kCodeTargetMask |
916  RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
917  RelocInfo::ModeMask(RelocInfo::CELL) |
918  RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
919  RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
920  RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
921  RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
922 
923  // There are two places where we iterate code bodies: here and the non-
924  // templated CodeIterateBody (above). They should be kept in sync.
925  StaticVisitor::VisitPointer(
926  heap,
927  reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
928  StaticVisitor::VisitPointer(
929  heap,
930  reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
931  StaticVisitor::VisitPointer(
932  heap,
933  reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
934  StaticVisitor::VisitPointer(
935  heap,
936  reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
937  StaticVisitor::VisitNextCodeLink(
938  heap,
939  reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
940  StaticVisitor::VisitPointer(
941  heap,
942  reinterpret_cast<Object**>(this->address() + kConstantPoolOffset));
943 
944 
945  RelocIterator it(this, mode_mask);
946  for (; !it.done(); it.next()) {
947  it.rinfo()->template Visit<StaticVisitor>(heap);
948  }
949 }
950 
951 
952 } } // namespace v8::internal
953 
954 #endif // V8_OBJECTS_VISITING_INL_H_
byte * Address
Definition: globals.h:186
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
static const int kPointerFieldsEndOffset
Definition: objects.h:9602
static const int kPointerFieldsEndOffset
Definition: objects.h:6445
static const int kWeakNextOffset
Definition: objects.h:9880
static const int kCodeOffset
Definition: objects.h:7103
static ConstantPoolArray * cast(Object *obj)
static const int kCodeEntryOffset
Definition: objects.h:7518
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:7519
Object ** RawFieldOfElementAt(int index)
Definition: objects.h:3073
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
static void MarkInlinedFunctionsCode(Heap *heap, Code *code)
void IteratePointer(ObjectVisitor *v, int offset)
Definition: objects-inl.h:1391
static Object * GetObjectFromEntryAddress(Address location_of_address)
Definition: objects-inl.h:4673
int NumberOfOwnDescriptors()
Definition: objects.h:6174
static const int kDependentCodeOffset
Definition: objects.h:6438
static const int kOptimizedCodeMapOffset
Definition: objects.h:7104
static HeapObject * cast(Object *obj)
static void MarkMapContents(Heap *heap, Map *map)
static const int kDeoptimizationDataOffset
Definition: objects.h:5584
static Map * cast(Object *obj)
kSerializedDataOffset Object
Definition: objects-inl.h:5016
static void Clear(Isolate *isolate, Address address, ConstantPoolArray *constant_pool)
Definition: ic.cc:429
static bool enabled()
Definition: serialize.h:485
static const int kStartOffset
Definition: objects.h:1915
static const int kSize
Definition: objects.h:7922
static const int kHandlerTableOffset
Definition: objects.h:5583
#define ASSERT(condition)
Definition: checks.h:329
static Context * cast(Object *context)
Definition: contexts.h:244
static SharedFunctionInfo * cast(Object *obj)
FixedBodyDescriptor< kNameOffset, kFlagsOffset, kSize > BodyDescriptor
Definition: objects.h:8712
static Code * cast(Object *obj)
static Object ** RawField(HeapObject *obj, int offset)
Definition: objects-inl.h:1199
TransitionArray * unchecked_transition_array()
Definition: objects-inl.h:4945
static const int kDependentCodeOffset
Definition: objects.h:8416
Object ** GetDescriptorStartSlot(int descriptor_number)
Definition: objects-inl.h:2591
GlobalObject * global_object()
Definition: contexts.h:388
void IterateNextCodeLink(ObjectVisitor *v, int offset)
Definition: objects-inl.h:1396
static const int kWeakFirstViewOffset
Definition: objects.h:9881
Object ** GetDescriptorEndSlot(int descriptor_number)
Definition: objects-inl.h:2596
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kPointerFieldsEndOffset
Definition: objects.h:8424
const int kPointerSize
Definition: globals.h:268
void CodeIterateBody(ObjectVisitor *v)
static const int kPointerFieldsBeginOffset
Definition: objects.h:8423
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kNameOffset
Definition: objects.h:7102
static const int kPropertiesOffset
Definition: objects.h:2755
static void VisitSharedFunctionInfoStrongCode(Heap *heap, HeapObject *object)
static Code * GetCodeFromTargetAddress(Address address)
Definition: objects-inl.h:4662
Object ** GetFirstElementAddress()
Definition: objects-inl.h:1425
static void VisitSharedFunctionInfoWeakCode(Heap *heap, HeapObject *object)
static const int kTypeFeedbackInfoOffset
Definition: objects.h:5586
static const int kRelocationInfoOffset
Definition: objects.h:5582
static const int kNonWeakFieldsEndOffset
Definition: objects.h:7525
static const int kNextCodeLinkOffset
Definition: objects.h:5588
static const int kSizeWithInternalFields
Definition: objects.h:9884
static const int kSizeWithInternalFields
Definition: objects.h:9952
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
Object * GetBackPointer()
Definition: objects-inl.h:4791
static const int kSizeWithInternalFields
Definition: objects.h:9977
static const int kPointerFieldsBeginOffset
Definition: objects.h:9601
static void VisitJSFunctionWeakCode(Heap *heap, HeapObject *object)
static FixedArray * cast(Object *obj)
static const int kWeakNextOffset
Definition: objects.h:9918
static void MarkTransitionArray(Heap *heap, TransitionArray *transitions)
HeapObject * obj
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric literals(0o77, 0b11)") DEFINE_bool(harmony_strings
#define RUNTIME_ENTRY(name, nargs, ressize)
Object ** GetKeySlot(int transition_number)
static void VisitJSFunctionStrongCode(Heap *heap, HeapObject *object)
static const int kConstantPoolOffset
Definition: objects.h:5598
static const int kDependentCodeOffset
Definition: objects.h:9598
MarkCompactCollector * mark_compact_collector()
Definition: heap.h:1769
FixedBodyDescriptor< kHeaderSize, kHeaderSize+FIRST_WEAK_SLOT *kPointerSize, kSize > MarkCompactBodyDescriptor
Definition: contexts.h:521
void RecordCodeEntrySlot(Address slot, Code *target)
static const int kPointerFieldsBeginOffset
Definition: objects.h:6444
static DeoptimizationInputData * cast(Object *obj)
static JSFunction * cast(Object *obj)