v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
objects-visiting.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_OBJECTS_VISITING_H_
29 #define V8_OBJECTS_VISITING_H_
30 
31 #include "allocation.h"
32 
33 // This file provides base classes and auxiliary methods for defining
34 // static object visitors used during GC.
35 // Visiting HeapObject body with a normal ObjectVisitor requires performing
36 // two switches on object's instance type to determine object size and layout
37 // and one or more virtual method calls on visitor itself.
38 // Static visitor is different: it provides a dispatch table which contains
39 // pointers to specialized visit functions. Each map has the visitor_id
40 // field which contains an index of specialized visitor to use.
41 
42 namespace v8 {
43 namespace internal {
44 
45 
46 // Base class for all static visitors.
47 class StaticVisitorBase : public AllStatic {
48  public:
49 #define VISITOR_ID_LIST(V) \
50  V(SeqOneByteString) \
51  V(SeqTwoByteString) \
52  V(ShortcutCandidate) \
53  V(ByteArray) \
54  V(FreeSpace) \
55  V(FixedArray) \
56  V(FixedDoubleArray) \
57  V(FixedTypedArray) \
58  V(FixedFloat64Array) \
59  V(ConstantPoolArray) \
60  V(NativeContext) \
61  V(AllocationSite) \
62  V(DataObject2) \
63  V(DataObject3) \
64  V(DataObject4) \
65  V(DataObject5) \
66  V(DataObject6) \
67  V(DataObject7) \
68  V(DataObject8) \
69  V(DataObject9) \
70  V(DataObjectGeneric) \
71  V(JSObject2) \
72  V(JSObject3) \
73  V(JSObject4) \
74  V(JSObject5) \
75  V(JSObject6) \
76  V(JSObject7) \
77  V(JSObject8) \
78  V(JSObject9) \
79  V(JSObjectGeneric) \
80  V(Struct2) \
81  V(Struct3) \
82  V(Struct4) \
83  V(Struct5) \
84  V(Struct6) \
85  V(Struct7) \
86  V(Struct8) \
87  V(Struct9) \
88  V(StructGeneric) \
89  V(ConsString) \
90  V(SlicedString) \
91  V(Symbol) \
92  V(Oddball) \
93  V(Code) \
94  V(Map) \
95  V(Cell) \
96  V(PropertyCell) \
97  V(SharedFunctionInfo) \
98  V(JSFunction) \
99  V(JSWeakMap) \
100  V(JSWeakSet) \
101  V(JSArrayBuffer) \
102  V(JSTypedArray) \
103  V(JSDataView) \
104  V(JSRegExp)
105 
106  // For data objects, JS objects and structs along with generic visitor which
107  // can visit object of any size we provide visitors specialized by
108  // object size in words.
109  // Ids of specialized visitors are declared in a linear order (without
110  // holes) starting from the id of visitor specialized for 2 words objects
111  // (base visitor id) and ending with the id of generic visitor.
112  // Method GetVisitorIdForSize depends on this ordering to calculate visitor
113  // id of specialized visitor from given instance size, base visitor id and
114  // generic visitor's id.
115  enum VisitorId {
116 #define VISITOR_ID_ENUM_DECL(id) kVisit##id,
118 #undef VISITOR_ID_ENUM_DECL
120  kVisitDataObject = kVisitDataObject2,
121  kVisitJSObject = kVisitJSObject2,
122  kVisitStruct = kVisitStruct2,
124  };
125 
126  // Visitor ID should fit in one byte.
128 
129  // Determine which specialized visitor should be used for given instance type
130  // and instance type.
131  static VisitorId GetVisitorId(int instance_type, int instance_size);
132 
134  return GetVisitorId(map->instance_type(), map->instance_size());
135  }
136 
137  // For visitors that allow specialization by size calculate VisitorId based
138  // on size, base visitor id and generic visitor id.
140  VisitorId generic,
141  int object_size) {
142  ASSERT((base == kVisitDataObject) ||
143  (base == kVisitStruct) ||
144  (base == kVisitJSObject));
145  ASSERT(IsAligned(object_size, kPointerSize));
146  ASSERT(kMinObjectSizeInWords * kPointerSize <= object_size);
147  ASSERT(object_size <= Page::kMaxRegularHeapObjectSize);
148 
149  const VisitorId specialization = static_cast<VisitorId>(
150  base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords);
151 
152  return Min(specialization, generic);
153  }
154 };
155 
156 
157 template<typename Callback>
159  public:
161  // We are not using memcpy to guarantee that during update
162  // every element of callbacks_ array will remain correct
163  // pointer (memcpy might be implemented as a byte copying loop).
164  for (int i = 0; i < StaticVisitorBase::kVisitorIdCount; i++) {
165  NoBarrier_Store(&callbacks_[i], other->callbacks_[i]);
166  }
167  }
168 
170  return reinterpret_cast<Callback>(callbacks_[id]);
171  }
172 
173  inline Callback GetVisitor(Map* map) {
174  return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
175  }
176 
177  void Register(StaticVisitorBase::VisitorId id, Callback callback) {
178  ASSERT(id < StaticVisitorBase::kVisitorIdCount); // id is unsigned.
179  callbacks_[id] = reinterpret_cast<AtomicWord>(callback);
180  }
181 
182  template<typename Visitor,
185  int object_size_in_words>
187  static const int size = object_size_in_words * kPointerSize;
189  &Visitor::template VisitSpecialized<size>);
190  }
191 
192 
193  template<typename Visitor,
198  (generic - base + StaticVisitorBase::kMinObjectSizeInWords) == 10);
199  RegisterSpecialization<Visitor, base, generic, 2>();
200  RegisterSpecialization<Visitor, base, generic, 3>();
201  RegisterSpecialization<Visitor, base, generic, 4>();
202  RegisterSpecialization<Visitor, base, generic, 5>();
203  RegisterSpecialization<Visitor, base, generic, 6>();
204  RegisterSpecialization<Visitor, base, generic, 7>();
205  RegisterSpecialization<Visitor, base, generic, 8>();
206  RegisterSpecialization<Visitor, base, generic, 9>();
207  Register(generic, &Visitor::Visit);
208  }
209 
210  private:
212 };
213 
214 
215 template<typename StaticVisitor>
216 class BodyVisitorBase : public AllStatic {
217  public:
218  INLINE(static void IteratePointers(Heap* heap,
219  HeapObject* object,
220  int start_offset,
221  int end_offset)) {
222  Object** start_slot = reinterpret_cast<Object**>(object->address() +
223  start_offset);
224  Object** end_slot = reinterpret_cast<Object**>(object->address() +
225  end_offset);
226  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
227  }
228 };
229 
230 
231 template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
232 class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
233  public:
234  INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
235  int object_size = BodyDescriptor::SizeOf(map, object);
237  map->GetHeap(),
238  object,
239  BodyDescriptor::kStartOffset,
240  object_size);
241  return static_cast<ReturnType>(object_size);
242  }
243 
244  template<int object_size>
245  static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
246  ASSERT(BodyDescriptor::SizeOf(map, object) == object_size);
248  map->GetHeap(),
249  object,
250  BodyDescriptor::kStartOffset,
251  object_size);
252  return static_cast<ReturnType>(object_size);
253  }
254 };
255 
256 
257 template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
258 class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> {
259  public:
260  INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
262  map->GetHeap(),
263  object,
264  BodyDescriptor::kStartOffset,
265  BodyDescriptor::kEndOffset);
266  return static_cast<ReturnType>(BodyDescriptor::kSize);
267  }
268 };
269 
270 
271 // Base class for visitors used for a linear new space iteration.
272 // IterateBody returns size of visited object.
273 // Certain types of objects (i.e. Code objects) are not handled
274 // by dispatch table of this visitor because they cannot appear
275 // in the new space.
276 //
277 // This class is intended to be used in the following way:
278 //
279 // class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> {
280 // ...
281 // }
282 //
283 // This is an example of Curiously recurring template pattern
284 // (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
285 // We use CRTP to guarantee aggressive compile time optimizations (i.e.
286 // inlining and specialization of StaticVisitor::VisitPointers methods).
287 template<typename StaticVisitor>
289  public:
290  static void Initialize();
291 
292  INLINE(static int IterateBody(Map* map, HeapObject* obj)) {
293  return table_.GetVisitor(map)(map, obj);
294  }
295 
296  INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
297  for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p);
298  }
299 
300  private:
301  INLINE(static int VisitJSFunction(Map* map, HeapObject* object)) {
302  Heap* heap = map->GetHeap();
303  VisitPointers(heap,
306 
307  // Don't visit code entry. We are using this visitor only during scavenges.
308 
309  VisitPointers(
310  heap,
311  HeapObject::RawField(object,
313  HeapObject::RawField(object,
315  return JSFunction::kSize;
316  }
317 
318  INLINE(static int VisitByteArray(Map* map, HeapObject* object)) {
319  return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
320  }
321 
322  INLINE(static int VisitFixedDoubleArray(Map* map, HeapObject* object)) {
323  int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
324  return FixedDoubleArray::SizeFor(length);
325  }
326 
327  INLINE(static int VisitFixedTypedArray(Map* map, HeapObject* object)) {
328  return reinterpret_cast<FixedTypedArrayBase*>(object)->size();
329  }
330 
331  INLINE(static int VisitJSObject(Map* map, HeapObject* object)) {
332  return JSObjectVisitor::Visit(map, object);
333  }
334 
335  INLINE(static int VisitSeqOneByteString(Map* map, HeapObject* object)) {
336  return SeqOneByteString::cast(object)->
337  SeqOneByteStringSize(map->instance_type());
338  }
339 
340  INLINE(static int VisitSeqTwoByteString(Map* map, HeapObject* object)) {
341  return SeqTwoByteString::cast(object)->
342  SeqTwoByteStringSize(map->instance_type());
343  }
344 
345  INLINE(static int VisitFreeSpace(Map* map, HeapObject* object)) {
346  return FreeSpace::cast(object)->Size();
347  }
348 
349  INLINE(static int VisitJSArrayBuffer(Map* map, HeapObject* object));
350  INLINE(static int VisitJSTypedArray(Map* map, HeapObject* object));
351  INLINE(static int VisitJSDataView(Map* map, HeapObject* object));
352 
353  class DataObjectVisitor {
354  public:
355  template<int object_size>
356  static inline int VisitSpecialized(Map* map, HeapObject* object) {
357  return object_size;
358  }
359 
360  INLINE(static int Visit(Map* map, HeapObject* object)) {
361  return map->instance_size();
362  }
363  };
364 
365  typedef FlexibleBodyVisitor<StaticVisitor,
366  StructBodyDescriptor,
367  int> StructVisitor;
368 
369  typedef FlexibleBodyVisitor<StaticVisitor,
370  JSObject::BodyDescriptor,
371  int> JSObjectVisitor;
372 
373  typedef int (*Callback)(Map* map, HeapObject* object);
374 
375  static VisitorDispatchTable<Callback> table_;
376 };
377 
378 
379 template<typename StaticVisitor>
380 VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback>
381  StaticNewSpaceVisitor<StaticVisitor>::table_;
382 
383 
384 // Base class for visitors used to transitively mark the entire heap.
385 // IterateBody returns nothing.
386 // Certain types of objects might not be handled by this base class and
387 // no visitor function is registered by the generic initialization. A
388 // specialized visitor function needs to be provided by the inheriting
389 // class itself for those cases.
390 //
391 // This class is intended to be used in the following way:
392 //
393 // class SomeVisitor : public StaticMarkingVisitor<SomeVisitor> {
394 // ...
395 // }
396 //
397 // This is an example of Curiously recurring template pattern.
398 template<typename StaticVisitor>
400  public:
401  static void Initialize();
402 
403  INLINE(static void IterateBody(Map* map, HeapObject* obj)) {
404  table_.GetVisitor(map)(map, obj);
405  }
406 
407  INLINE(static void VisitPropertyCell(Map* map, HeapObject* object));
408  INLINE(static void VisitAllocationSite(Map* map, HeapObject* object));
409  INLINE(static void VisitCodeEntry(Heap* heap, Address entry_address));
410  INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo));
411  INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo));
412  INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo));
413  INLINE(static void VisitCodeTarget(Heap* heap, RelocInfo* rinfo));
414  INLINE(static void VisitCodeAgeSequence(Heap* heap, RelocInfo* rinfo));
415  INLINE(static void VisitExternalReference(RelocInfo* rinfo)) { }
416  INLINE(static void VisitRuntimeEntry(RelocInfo* rinfo)) { }
417  // Skip the weak next code link in a code object.
418  INLINE(static void VisitNextCodeLink(Heap* heap, Object** slot)) { }
419 
420  // TODO(mstarzinger): This should be made protected once refactoring is done.
421  // Mark non-optimize code for functions inlined into the given optimized
422  // code. This will prevent it from being flushed.
423  static void MarkInlinedFunctionsCode(Heap* heap, Code* code);
424 
425  protected:
426  INLINE(static void VisitMap(Map* map, HeapObject* object));
427  INLINE(static void VisitCode(Map* map, HeapObject* object));
428  INLINE(static void VisitSharedFunctionInfo(Map* map, HeapObject* object));
429  INLINE(static void VisitConstantPoolArray(Map* map, HeapObject* object));
430  INLINE(static void VisitJSFunction(Map* map, HeapObject* object));
431  INLINE(static void VisitJSRegExp(Map* map, HeapObject* object));
432  INLINE(static void VisitJSArrayBuffer(Map* map, HeapObject* object));
433  INLINE(static void VisitJSTypedArray(Map* map, HeapObject* object));
434  INLINE(static void VisitJSDataView(Map* map, HeapObject* object));
435  INLINE(static void VisitNativeContext(Map* map, HeapObject* object));
436 
437  // Mark pointers in a Map and its TransitionArray together, possibly
438  // treating transitions or back pointers weak.
439  static void MarkMapContents(Heap* heap, Map* map);
440  static void MarkTransitionArray(Heap* heap, TransitionArray* transitions);
441 
442  // Code flushing support.
443  INLINE(static bool IsFlushable(Heap* heap, JSFunction* function));
444  INLINE(static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info));
445 
446  // Helpers used by code flushing support that visit pointer fields and treat
447  // references to code objects either strongly or weakly.
448  static void VisitSharedFunctionInfoStrongCode(Heap* heap, HeapObject* object);
449  static void VisitSharedFunctionInfoWeakCode(Heap* heap, HeapObject* object);
450  static void VisitJSFunctionStrongCode(Heap* heap, HeapObject* object);
451  static void VisitJSFunctionWeakCode(Heap* heap, HeapObject* object);
452 
454  public:
455  template<int size>
456  static inline void VisitSpecialized(Map* map, HeapObject* object) {
457  }
458 
459  INLINE(static void Visit(Map* map, HeapObject* object)) {
460  }
461  };
462 
463  typedef FlexibleBodyVisitor<StaticVisitor,
466 
467  typedef FlexibleBodyVisitor<StaticVisitor,
470 
471  typedef FlexibleBodyVisitor<StaticVisitor,
474 
475  typedef void (*Callback)(Map* map, HeapObject* object);
476 
478 };
479 
480 
481 template<typename StaticVisitor>
484 
485 
486 } } // namespace v8::internal
487 
488 #endif // V8_OBJECTS_VISITING_H_
byte * Address
Definition: globals.h:186
FlexibleBodyVisitor< StaticVisitor, FixedArray::BodyDescriptor, void > FixedArrayVisitor
STATIC_ASSERT(kVisitorIdCount<=256)
static const int kCodeEntryOffset
Definition: objects.h:7518
static VisitorDispatchTable< Callback > table_
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
INLINE(static void VisitPointers(Heap *heap, Object **start, Object **end))
static void MarkInlinedFunctionsCode(Heap *heap, Code *code)
static void MarkMapContents(Heap *heap, Map *map)
static VisitorId GetVisitorIdForSize(VisitorId base, VisitorId generic, int object_size)
static VisitorId GetVisitorId(Map *map)
static FreeSpace * cast(Object *obj)
static SeqOneByteString * cast(Object *obj)
INLINE(static ReturnType Visit(Map *map, HeapObject *object))
static void VisitSpecialized(Map *map, HeapObject *object)
#define ASSERT(condition)
Definition: checks.h:329
const int kPointerSizeLog2
Definition: globals.h:281
#define VISITOR_ID_LIST(V)
INLINE(static void VisitNextCodeLink(Heap *heap, Object **slot))
void NoBarrier_Store(volatile Atomic32 *ptr, Atomic32 value)
static Object ** RawField(HeapObject *obj, int offset)
Definition: objects-inl.h:1199
void(* Callback)(Map *map, HeapObject *object)
FlexibleBodyVisitor< StaticVisitor, StructBodyDescriptor, void > StructObjectVisitor
INLINE(static int IterateBody(Map *map, HeapObject *obj))
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Definition: flags.cc:211
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
void Register(StaticVisitorBase::VisitorId id, Callback callback)
INLINE(static void VisitRuntimeEntry(RelocInfo *rinfo))
INLINE(static void IteratePointers(Heap *heap, HeapObject *object, int start_offset, int end_offset))
const int kPointerSize
Definition: globals.h:268
bool IsAligned(T value, U alignment)
Definition: utils.h:211
INLINE(static void IterateBody(Map *map, HeapObject *obj))
static const int kMaxRegularHeapObjectSize
Definition: spaces.h:820
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
INLINE(static void VisitExternalReference(RelocInfo *rinfo))
intptr_t AtomicWord
Definition: atomicops.h:79
static const int kPropertiesOffset
Definition: objects.h:2755
static void VisitSharedFunctionInfoStrongCode(Heap *heap, HeapObject *object)
static int SizeFor(int length)
Definition: objects.h:3152
INLINE(static ReturnType Visit(Map *map, HeapObject *object))
static void VisitSharedFunctionInfoWeakCode(Heap *heap, HeapObject *object)
Callback GetVisitorById(StaticVisitorBase::VisitorId id)
static const int kNonWeakFieldsEndOffset
Definition: objects.h:7525
static SeqTwoByteString * cast(Object *obj)
INLINE(static void Visit(Map *map, HeapObject *object))
#define VISITOR_ID_ENUM_DECL(id)
FlexibleBodyVisitor< StaticVisitor, JSObject::BodyDescriptor, void > JSObjectVisitor
InstanceType instance_type()
Definition: objects-inl.h:4012
static void VisitJSFunctionWeakCode(Heap *heap, HeapObject *object)
static VisitorId GetVisitorId(int instance_type, int instance_size)
static const int kSize
Definition: objects.h:7527
static void MarkTransitionArray(Heap *heap, TransitionArray *transitions)
HeapObject * obj
void CopyFrom(VisitorDispatchTable *other)
T Min(T a, T b)
Definition: utils.h:234
static void VisitJSFunctionStrongCode(Heap *heap, HeapObject *object)
static ReturnType VisitSpecialized(Map *map, HeapObject *object)