v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
objects-visiting-inl.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_OBJECTS_VISITING_INL_H_
29 #define V8_OBJECTS_VISITING_INL_H_
30 
31 
32 namespace v8 {
33 namespace internal {
34 
35 template<typename StaticVisitor>
37  table_.Register(kVisitShortcutCandidate,
38  &FixedBodyVisitor<StaticVisitor,
40  int>::Visit);
41 
42  table_.Register(kVisitConsString,
43  &FixedBodyVisitor<StaticVisitor,
45  int>::Visit);
46 
47  table_.Register(kVisitSlicedString,
48  &FixedBodyVisitor<StaticVisitor,
50  int>::Visit);
51 
52  table_.Register(kVisitFixedArray,
53  &FlexibleBodyVisitor<StaticVisitor,
55  int>::Visit);
56 
57  table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
58 
59  table_.Register(kVisitNativeContext,
60  &FixedBodyVisitor<StaticVisitor,
62  int>::Visit);
63 
64  table_.Register(kVisitByteArray, &VisitByteArray);
65 
66  table_.Register(kVisitSharedFunctionInfo,
67  &FixedBodyVisitor<StaticVisitor,
69  int>::Visit);
70 
71  table_.Register(kVisitSeqAsciiString, &VisitSeqAsciiString);
72 
73  table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
74 
75  table_.Register(kVisitJSFunction, &VisitJSFunction);
76 
77  table_.Register(kVisitFreeSpace, &VisitFreeSpace);
78 
79  table_.Register(kVisitJSWeakMap, &JSObjectVisitor::Visit);
80 
81  table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
82 
83  table_.template RegisterSpecializations<DataObjectVisitor,
84  kVisitDataObject,
85  kVisitDataObjectGeneric>();
86 
87  table_.template RegisterSpecializations<JSObjectVisitor,
88  kVisitJSObject,
89  kVisitJSObjectGeneric>();
90  table_.template RegisterSpecializations<StructVisitor,
91  kVisitStruct,
92  kVisitStructGeneric>();
93 }
94 
95 
96 template<typename StaticVisitor>
98  table_.Register(kVisitShortcutCandidate,
99  &FixedBodyVisitor<StaticVisitor,
101  void>::Visit);
102 
103  table_.Register(kVisitConsString,
104  &FixedBodyVisitor<StaticVisitor,
106  void>::Visit);
107 
108  table_.Register(kVisitSlicedString,
109  &FixedBodyVisitor<StaticVisitor,
111  void>::Visit);
112 
113  table_.Register(kVisitFixedArray,
114  &FlexibleBodyVisitor<StaticVisitor,
116  void>::Visit);
117 
118  table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
119 
120  table_.Register(kVisitNativeContext, &VisitNativeContext);
121 
122  table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
123 
124  table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
125 
126  table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit);
127 
128  table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
129 
130  table_.Register(kVisitJSWeakMap, &StaticVisitor::VisitJSWeakMap);
131 
132  table_.Register(kVisitOddball,
133  &FixedBodyVisitor<StaticVisitor,
135  void>::Visit);
136 
137  table_.Register(kVisitMap, &VisitMap);
138 
139  table_.Register(kVisitCode, &VisitCode);
140 
141  table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
142 
143  table_.Register(kVisitJSFunction, &VisitJSFunction);
144 
145  // Registration for kVisitJSRegExp is done by StaticVisitor.
146 
147  table_.Register(kVisitPropertyCell,
148  &FixedBodyVisitor<StaticVisitor,
150  void>::Visit);
151 
152  table_.template RegisterSpecializations<DataObjectVisitor,
153  kVisitDataObject,
154  kVisitDataObjectGeneric>();
155 
156  table_.template RegisterSpecializations<JSObjectVisitor,
157  kVisitJSObject,
158  kVisitJSObjectGeneric>();
159 
160  table_.template RegisterSpecializations<StructObjectVisitor,
161  kVisitStruct,
162  kVisitStructGeneric>();
163 }
164 
165 
166 template<typename StaticVisitor>
168  Heap* heap, Address entry_address) {
170  heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
171  StaticVisitor::MarkObject(heap, code);
172 }
173 
174 
175 template<typename StaticVisitor>
177  Heap* heap, RelocInfo* rinfo) {
178  ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
179  ASSERT(!rinfo->target_object()->IsConsString());
180  HeapObject* object = HeapObject::cast(rinfo->target_object());
181  heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
182  StaticVisitor::MarkObject(heap, object);
183 }
184 
185 
186 template<typename StaticVisitor>
188  Heap* heap, RelocInfo* rinfo) {
189  ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
190  JSGlobalPropertyCell* cell = rinfo->target_cell();
191  StaticVisitor::MarkObject(heap, cell);
192 }
193 
194 
195 template<typename StaticVisitor>
197  Heap* heap, RelocInfo* rinfo) {
198  ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
199  rinfo->IsPatchedReturnSequence()) ||
200  (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
201  rinfo->IsPatchedDebugBreakSlotSequence()));
202  Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
203  heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
204  StaticVisitor::MarkObject(heap, target);
205 }
206 
207 
208 template<typename StaticVisitor>
210  Heap* heap, RelocInfo* rinfo) {
211  ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
212  Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
213  // Monomorphic ICs are preserved when possible, but need to be flushed
214  // when they might be keeping a Context alive, or when the heap is about
215  // to be serialized.
216  if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
217  && (target->ic_state() == MEGAMORPHIC || heap->flush_monomorphic_ics() ||
218  Serializer::enabled() || target->ic_age() != heap->global_ic_age())) {
219  IC::Clear(rinfo->pc());
220  target = Code::GetCodeFromTargetAddress(rinfo->target_address());
221  }
222  heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
223  StaticVisitor::MarkObject(heap, target);
224 }
225 
226 
227 template<typename StaticVisitor>
229  Map* map, HeapObject* object) {
230  FixedBodyVisitor<StaticVisitor,
232  void>::Visit(map, object);
233 
234  MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
235  for (int idx = Context::FIRST_WEAK_SLOT;
237  ++idx) {
238  Object** slot =
240  collector->RecordSlot(slot, slot, *slot);
241  }
242 }
243 
244 
245 template<typename StaticVisitor>
247  Map* map, HeapObject* object) {
248  Heap* heap = map->GetHeap();
249  Map* map_object = Map::cast(object);
250 
251  // Clears the cache of ICs related to this map.
252  if (FLAG_cleanup_code_caches_at_gc) {
253  map_object->ClearCodeCache(heap);
254  }
255 
256  // When map collection is enabled we have to mark through map's
257  // transitions and back pointers in a special way to make these links
258  // weak. Only maps for subclasses of JSReceiver can have transitions.
260  if (FLAG_collect_maps &&
261  map_object->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
262  MarkMapContents(heap, map_object);
263  } else {
264  StaticVisitor::VisitPointers(heap,
267  }
268 }
269 
270 
271 template<typename StaticVisitor>
273  Map* map, HeapObject* object) {
274  Heap* heap = map->GetHeap();
275  Code* code = Code::cast(object);
276  if (FLAG_cleanup_code_caches_at_gc) {
277  code->ClearTypeFeedbackCells(heap);
278  }
279  code->CodeIterateBody<StaticVisitor>(heap);
280 }
281 
282 
283 template<typename StaticVisitor>
285  Map* map, HeapObject* object) {
286  Heap* heap = map->GetHeap();
288  if (shared->ic_age() != heap->global_ic_age()) {
289  shared->ResetForNewContext(heap->global_ic_age());
290  }
291  MarkCompactCollector* collector = heap->mark_compact_collector();
292  if (collector->is_code_flushing_enabled()) {
293  if (IsFlushable(heap, shared)) {
294  // This function's code looks flushable. But we have to postpone
295  // the decision until we see all functions that point to the same
296  // SharedFunctionInfo because some of them might be optimized.
297  // That would also make the non-optimized version of the code
298  // non-flushable, because it is required for bailing out from
299  // optimized code.
300  collector->code_flusher()->AddCandidate(shared);
301  // Treat the reference to the code object weakly.
302  VisitSharedFunctionInfoWeakCode(heap, object);
303  return;
304  }
305  }
306  VisitSharedFunctionInfoStrongCode(heap, object);
307 }
308 
309 
310 template<typename StaticVisitor>
312  Map* map, HeapObject* object) {
313  Heap* heap = map->GetHeap();
314  JSFunction* function = JSFunction::cast(object);
315  MarkCompactCollector* collector = heap->mark_compact_collector();
316  if (collector->is_code_flushing_enabled()) {
317  if (IsFlushable(heap, function)) {
318  // This function's code looks flushable. But we have to postpone
319  // the decision until we see all functions that point to the same
320  // SharedFunctionInfo because some of them might be optimized.
321  // That would also make the non-optimized version of the code
322  // non-flushable, because it is required for bailing out from
323  // optimized code.
324  collector->code_flusher()->AddCandidate(function);
325  // Visit shared function info immediately to avoid double checking
326  // of its flushability later. This is just an optimization because
327  // the shared function info would eventually be visited.
328  SharedFunctionInfo* shared = function->unchecked_shared();
329  if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
330  StaticVisitor::MarkObject(heap, shared->map());
331  VisitSharedFunctionInfoWeakCode(heap, shared);
332  }
333  // Treat the reference to the code object weakly.
334  VisitJSFunctionWeakCode(heap, object);
335  return;
336  } else {
337  // Visit all unoptimized code objects to prevent flushing them.
338  StaticVisitor::MarkObject(heap, function->shared()->code());
339  if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
340  MarkInlinedFunctionsCode(heap, function->code());
341  }
342  }
343  }
344  VisitJSFunctionStrongCode(heap, object);
345 }
346 
347 
348 template<typename StaticVisitor>
350  Map* map, HeapObject* object) {
351  int last_property_offset =
353  StaticVisitor::VisitPointers(map->GetHeap(),
355  HeapObject::RawField(object, last_property_offset));
356 }
357 
358 
359 template<typename StaticVisitor>
361  Heap* heap, Map* map) {
362  // Make sure that the back pointer stored either in the map itself or
363  // inside its transitions array is marked. Skip recording the back
364  // pointer slot since map space is not compacted.
365  StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
366 
367  // Treat pointers in the transitions array as weak and also mark that
368  // array to prevent visiting it later. Skip recording the transition
369  // array slot, since it will be implicitly recorded when the pointer
370  // fields of this map are visited.
371  TransitionArray* transitions = map->unchecked_transition_array();
372  if (transitions->IsTransitionArray()) {
373  MarkTransitionArray(heap, transitions);
374  } else {
375  // Already marked by marking map->GetBackPointer() above.
376  ASSERT(transitions->IsMap() || transitions->IsUndefined());
377  }
378 
379  // Mark the pointer fields of the Map. Since the transitions array has
380  // been marked already, it is fine that one of these fields contains a
381  // pointer to it.
382  StaticVisitor::VisitPointers(heap,
385 }
386 
387 
388 template<typename StaticVisitor>
390  Heap* heap, TransitionArray* transitions) {
391  if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
392 
393  // Simple transitions do not have keys nor prototype transitions.
394  if (transitions->IsSimpleTransition()) return;
395 
396  if (transitions->HasPrototypeTransitions()) {
397  // Mark prototype transitions array but do not push it onto marking
398  // stack, this will make references from it weak. We will clean dead
399  // prototype transitions in ClearNonLiveTransitions.
400  Object** slot = transitions->GetPrototypeTransitionsSlot();
401  HeapObject* obj = HeapObject::cast(*slot);
402  heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
403  StaticVisitor::MarkObjectWithoutPush(heap, obj);
404  }
405 
406  for (int i = 0; i < transitions->number_of_transitions(); ++i) {
407  StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
408  }
409 }
410 
411 
412 template<typename StaticVisitor>
414  Heap* heap, Code* code) {
415  // For optimized functions we should retain both non-optimized version
416  // of its code and non-optimized version of all inlined functions.
417  // This is required to support bailing out from inlined code.
419  DeoptimizationInputData::cast(code->deoptimization_data());
420  FixedArray* literals = data->LiteralArray();
421  for (int i = 0, count = data->InlinedFunctionCount()->value();
422  i < count;
423  i++) {
424  JSFunction* inlined = JSFunction::cast(literals->get(i));
425  StaticVisitor::MarkObject(heap, inlined->shared()->code());
426  }
427 }
428 
429 
430 inline static bool IsValidNonBuiltinContext(Object* context) {
431  return context->IsContext() &&
432  !Context::cast(context)->global_object()->IsJSBuiltinsObject();
433 }
434 
435 
436 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
437  Object* undefined = heap->undefined_value();
438  return (info->script() != undefined) &&
439  (reinterpret_cast<Script*>(info->script())->source() != undefined);
440 }
441 
442 
443 template<typename StaticVisitor>
445  Heap* heap, JSFunction* function) {
446  SharedFunctionInfo* shared_info = function->unchecked_shared();
447 
448  // Code is either on stack, in compilation cache or referenced
449  // by optimized version of function.
450  MarkBit code_mark = Marking::MarkBitFrom(function->code());
451  if (code_mark.Get()) {
452  if (!Marking::MarkBitFrom(shared_info).Get()) {
453  shared_info->set_code_age(0);
454  }
455  return false;
456  }
457 
458  // The function must have a valid context and not be a builtin.
459  if (!IsValidNonBuiltinContext(function->unchecked_context())) {
460  return false;
461  }
462 
463  // We do not flush code for optimized functions.
464  if (function->code() != shared_info->code()) {
465  return false;
466  }
467 
468  return IsFlushable(heap, shared_info);
469 }
470 
471 
472 template<typename StaticVisitor>
474  Heap* heap, SharedFunctionInfo* shared_info) {
475  // Code is either on stack, in compilation cache or referenced
476  // by optimized version of function.
477  MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
478  if (code_mark.Get()) {
479  return false;
480  }
481 
482  // The function must be compiled and have the source code available,
483  // to be able to recompile it in case we need the function again.
484  if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
485  return false;
486  }
487 
488  // We never flush code for API functions.
489  Object* function_data = shared_info->function_data();
490  if (function_data->IsFunctionTemplateInfo()) {
491  return false;
492  }
493 
494  // Only flush code for functions.
495  if (shared_info->code()->kind() != Code::FUNCTION) {
496  return false;
497  }
498 
499  // Function must be lazy compilable.
500  if (!shared_info->allows_lazy_compilation()) {
501  return false;
502  }
503 
504  // If this is a full script wrapped in a function we do no flush the code.
505  if (shared_info->is_toplevel()) {
506  return false;
507  }
508 
509  // TODO(mstarzinger): The following will soon be replaced by a new way of
510  // aging code, that is based on an aging stub in the function prologue.
511 
512  // How many collections newly compiled code object will survive before being
513  // flushed.
514  static const int kCodeAgeThreshold = 5;
515 
516  // Age this shared function info.
517  if (shared_info->code_age() < kCodeAgeThreshold) {
518  shared_info->set_code_age(shared_info->code_age() + 1);
519  return false;
520  }
521 
522  return true;
523 }
524 
525 
526 template<typename StaticVisitor>
528  Heap* heap, HeapObject* object) {
529  StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
530  Object** start_slot =
531  HeapObject::RawField(object,
533  Object** end_slot =
534  HeapObject::RawField(object,
536  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
537 }
538 
539 
540 template<typename StaticVisitor>
542  Heap* heap, HeapObject* object) {
543  StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
544  Object** name_slot =
546  StaticVisitor::VisitPointer(heap, name_slot);
547 
548  // Skip visiting kCodeOffset as it is treated weakly here.
553 
554  Object** start_slot =
555  HeapObject::RawField(object,
557  Object** end_slot =
558  HeapObject::RawField(object,
560  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
561 }
562 
563 
564 template<typename StaticVisitor>
566  Heap* heap, HeapObject* object) {
567  Object** start_slot =
569  Object** end_slot =
571  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
572 
573  VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
576 
577  start_slot =
579  end_slot =
581  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
582 }
583 
584 
585 template<typename StaticVisitor>
587  Heap* heap, HeapObject* object) {
588  Object** start_slot =
590  Object** end_slot =
592  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
593 
594  // Skip visiting kCodeEntryOffset as it is treated weakly here.
597 
598  start_slot =
600  end_slot =
602  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
603 }
604 
605 
606 void Code::CodeIterateBody(ObjectVisitor* v) {
607  int mode_mask = RelocInfo::kCodeTargetMask |
608  RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
609  RelocInfo::ModeMask(RelocInfo::GLOBAL_PROPERTY_CELL) |
610  RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
611  RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
612  RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
613  RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
614 
615  // There are two places where we iterate code bodies: here and the
616  // templated CodeIterateBody (below). They should be kept in sync.
621 
622  RelocIterator it(this, mode_mask);
623  for (; !it.done(); it.next()) {
624  it.rinfo()->Visit(v);
625  }
626 }
627 
628 
629 template<typename StaticVisitor>
631  int mode_mask = RelocInfo::kCodeTargetMask |
632  RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
633  RelocInfo::ModeMask(RelocInfo::GLOBAL_PROPERTY_CELL) |
634  RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
635  RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
636  RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
637  RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
638 
639  // There are two places where we iterate code bodies: here and the
640  // non-templated CodeIterateBody (above). They should be kept in sync.
641  StaticVisitor::VisitPointer(
642  heap,
643  reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
644  StaticVisitor::VisitPointer(
645  heap,
646  reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
647  StaticVisitor::VisitPointer(
648  heap,
649  reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
650  StaticVisitor::VisitPointer(
651  heap,
652  reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
653 
654  RelocIterator it(this, mode_mask);
655  for (; !it.done(); it.next()) {
656  it.rinfo()->template Visit<StaticVisitor>(heap);
657  }
658 }
659 
660 
661 } } // namespace v8::internal
662 
663 #endif // V8_OBJECTS_VISITING_INL_H_
byte * Address
Definition: globals.h:157
static void VisitCodeEntry(Heap *heap, Address entry_address)
void ClearTypeFeedbackCells(Heap *heap)
Definition: objects.cc:8429
static void VisitCodeTarget(Heap *heap, RelocInfo *rinfo)
static const int kPointerFieldsEndOffset
Definition: objects.h:5144
static const int kCodeOffset
Definition: objects.h:5796
static const int kCodeEntryOffset
Definition: objects.h:6182
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:6183
static void VisitJSFunction(Map *map, HeapObject *object)
static void VisitJSRegExp(Map *map, HeapObject *object)
static void MarkInlinedFunctionsCode(Heap *heap, Code *code)
void IteratePointer(ObjectVisitor *v, int offset)
Definition: objects-inl.h:1193
bool flush_monomorphic_ics()
Definition: heap.h:1642
static Object * GetObjectFromEntryAddress(Address location_of_address)
Definition: objects-inl.h:3570
static const int kOptimizedCodeMapOffset
Definition: objects.h:5797
static HeapObject * cast(Object *obj)
static void MarkMapContents(Heap *heap, Map *map)
static const int kDeoptimizationDataOffset
Definition: objects.h:4533
static void VisitDebugTarget(Heap *heap, RelocInfo *rinfo)
static Map * cast(Object *obj)
static bool enabled()
Definition: serialize.h:481
static const int kStartOffset
Definition: objects.h:1284
static const int kSize
Definition: objects.h:6625
static const int kHandlerTableOffset
Definition: objects.h:4532
#define ASSERT(condition)
Definition: checks.h:270
static bool IsFlushable(Heap *heap, JSFunction *function)
static Context * cast(Object *context)
Definition: contexts.h:212
void AddCandidate(SharedFunctionInfo *shared_info)
Definition: mark-compact.h:422
static SharedFunctionInfo * cast(Object *obj)
static void VisitGlobalPropertyCell(Heap *heap, RelocInfo *rinfo)
int global_ic_age()
Definition: heap.h:1634
static Code * cast(Object *obj)
static Object ** RawField(HeapObject *obj, int offset)
Definition: objects-inl.h:971
TransitionArray * unchecked_transition_array()
Definition: objects-inl.h:3792
void ClearCodeCache(Heap *heap)
Definition: objects-inl.h:5250
static MarkBit MarkBitFrom(Address addr)
GlobalObject * global_object()
Definition: contexts.h:328
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
void ResetForNewContext(int new_ic_age)
Definition: objects.cc:8134
const int kPointerSize
Definition: globals.h:220
void CodeIterateBody(ObjectVisitor *v)
static const int kNameOffset
Definition: objects.h:5795
static const int kPropertiesOffset
Definition: objects.h:2171
static void VisitSharedFunctionInfoStrongCode(Heap *heap, HeapObject *object)
static Code * GetCodeFromTargetAddress(Address address)
Definition: objects-inl.h:3559
bool is_inline_cache_stub()
Definition: objects-inl.h:3485
static void VisitCode(Map *map, HeapObject *object)
InlineCacheState ic_state()
Definition: objects-inl.h:3168
static void VisitSharedFunctionInfoWeakCode(Heap *heap, HeapObject *object)
static const int kTypeFeedbackInfoOffset
Definition: objects.h:4535
static const int kRelocationInfoOffset
Definition: objects.h:4531
static const int kNonWeakFieldsEndOffset
Definition: objects.h:6189
static int OffsetOfElementAt(int index)
Definition: objects.h:2356
void RecordRelocSlot(RelocInfo *rinfo, Object *target)
Object * GetBackPointer()
Definition: objects-inl.h:3659
InstanceType instance_type()
Definition: objects-inl.h:3009
static void VisitSharedFunctionInfo(Map *map, HeapObject *object)
static void VisitJSFunctionWeakCode(Heap *heap, HeapObject *object)
static void VisitMap(Map *map, HeapObject *object)
static void MarkTransitionArray(Heap *heap, TransitionArray *transitions)
#define RUNTIME_ENTRY(name, nargs, ressize)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static void VisitNativeContext(Map *map, HeapObject *object)
Object ** GetKeySlot(int transition_number)
static void VisitJSFunctionStrongCode(Heap *heap, HeapObject *object)
static void VisitEmbeddedPointer(Heap *heap, RelocInfo *rinfo)
static void Clear(Address address)
Definition: ic.cc:344
MarkCompactCollector * mark_compact_collector()
Definition: heap.h:1541
FixedBodyDescriptor< kHeaderSize, kHeaderSize+FIRST_WEAK_SLOT *kPointerSize, kSize > MarkCompactBodyDescriptor
Definition: contexts.h:438
void RecordCodeEntrySlot(Address slot, Code *target)
static const int kPointerFieldsBeginOffset
Definition: objects.h:5143
static DeoptimizationInputData * cast(Object *obj)
static JSFunction * cast(Object *obj)