28 #ifndef V8_OBJECTS_VISITING_INL_H_
29 #define V8_OBJECTS_VISITING_INL_H_
35 template<
typename StaticVisitor>
37 table_.Register(kVisitShortcutCandidate,
42 table_.Register(kVisitConsString,
47 table_.Register(kVisitSlicedString,
52 table_.Register(kVisitSymbol,
57 table_.Register(kVisitFixedArray,
62 table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
63 table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
64 table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
66 table_.Register(kVisitNativeContext,
71 table_.Register(kVisitByteArray, &VisitByteArray);
73 table_.Register(kVisitSharedFunctionInfo,
78 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
80 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
82 table_.Register(kVisitJSFunction, &VisitJSFunction);
84 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
86 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
88 table_.Register(kVisitJSDataView, &VisitJSDataView);
90 table_.Register(kVisitFreeSpace, &VisitFreeSpace);
92 table_.Register(kVisitJSWeakMap, &JSObjectVisitor::Visit);
94 table_.Register(kVisitJSWeakSet, &JSObjectVisitor::Visit);
96 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
98 table_.template RegisterSpecializations<DataObjectVisitor,
100 kVisitDataObjectGeneric>();
104 kVisitJSObjectGeneric>();
107 kVisitStructGeneric>();
111 template<
typename StaticVisitor>
132 template<
typename StaticVisitor>
133 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
134 Map* map, HeapObject*
object) {
148 template<
typename StaticVisitor>
149 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(
150 Map* map, HeapObject*
object) {
164 template<
typename StaticVisitor>
166 table_.Register(kVisitShortcutCandidate,
171 table_.Register(kVisitConsString,
176 table_.Register(kVisitSlicedString,
181 table_.Register(kVisitSymbol,
186 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
188 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
190 table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit);
192 table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
194 table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray);
196 table_.Register(kVisitNativeContext, &VisitNativeContext);
198 table_.Register(kVisitAllocationSite, &VisitAllocationSite);
200 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
202 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
204 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
206 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
208 table_.Register(kVisitJSWeakMap, &StaticVisitor::VisitWeakCollection);
210 table_.Register(kVisitJSWeakSet, &StaticVisitor::VisitWeakCollection);
212 table_.Register(kVisitOddball,
217 table_.Register(kVisitMap, &VisitMap);
219 table_.Register(kVisitCode, &VisitCode);
221 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
223 table_.Register(kVisitJSFunction, &VisitJSFunction);
225 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
227 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
229 table_.Register(kVisitJSDataView, &VisitJSDataView);
233 table_.Register(kVisitCell,
238 table_.Register(kVisitPropertyCell, &VisitPropertyCell);
242 kVisitDataObjectGeneric>();
246 kVisitJSObjectGeneric>();
250 kVisitStructGeneric>();
254 template<
typename StaticVisitor>
259 StaticVisitor::MarkObject(heap, code);
263 template<
typename StaticVisitor>
264 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
265 Heap* heap, RelocInfo* rinfo) {
266 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
267 ASSERT(!rinfo->target_object()->IsConsString());
269 heap->mark_compact_collector()->RecordRelocSlot(rinfo,
object);
273 if (!rinfo->host()->IsWeakObject(
object)) {
274 StaticVisitor::MarkObject(heap,
object);
279 template<
typename StaticVisitor>
280 void StaticMarkingVisitor<StaticVisitor>::VisitCell(
281 Heap* heap, RelocInfo* rinfo) {
282 ASSERT(rinfo->rmode() == RelocInfo::CELL);
283 Cell* cell = rinfo->target_cell();
285 if (!rinfo->host()->IsWeakObject(cell)) {
286 StaticVisitor::MarkObject(heap, cell);
291 template<
typename StaticVisitor>
292 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(
293 Heap* heap, RelocInfo* rinfo) {
294 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
295 rinfo->IsPatchedReturnSequence()) ||
296 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
297 rinfo->IsPatchedDebugBreakSlotSequence()));
299 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
300 StaticVisitor::MarkObject(heap, target);
304 template<
typename StaticVisitor>
305 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(
306 Heap* heap, RelocInfo* rinfo) {
307 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
312 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
314 target->ic_state() ==
POLYMORPHIC || heap->flush_monomorphic_ics() ||
316 IC::Clear(target->GetIsolate(), rinfo->pc(),
317 rinfo->host()->constant_pool());
320 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
321 StaticVisitor::MarkObject(heap, target);
325 template<
typename StaticVisitor>
326 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
327 Heap* heap, RelocInfo* rinfo) {
328 ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
329 Code* target = rinfo->code_age_stub();
331 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
332 StaticVisitor::MarkObject(heap, target);
336 template<
typename StaticVisitor>
337 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
338 Map* map, HeapObject*
object) {
339 FixedBodyVisitor<StaticVisitor,
341 void>::Visit(map,
object);
343 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
348 collector->RecordSlot(slot, slot, *slot);
353 template<
typename StaticVisitor>
354 void StaticMarkingVisitor<StaticVisitor>::VisitMap(
355 Map* map, HeapObject*
object) {
356 Heap* heap = map->GetHeap();
360 if (FLAG_cleanup_code_caches_at_gc) {
361 map_object->ClearCodeCache(heap);
366 if (FLAG_collect_maps && map_object->CanTransition()) {
367 MarkMapContents(heap, map_object);
369 StaticVisitor::VisitPointers(heap,
376 template<
typename StaticVisitor>
377 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
378 Map* map, HeapObject*
object) {
379 Heap* heap = map->GetHeap();
383 if (FLAG_collect_maps) {
388 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
389 StaticVisitor::MarkObjectWithoutPush(heap, obj);
391 StaticVisitor::VisitPointer(heap, slot);
394 StaticVisitor::VisitPointers(heap,
400 template<
typename StaticVisitor>
401 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
402 Map* map, HeapObject*
object) {
403 Heap* heap = map->GetHeap();
407 if (FLAG_collect_maps) {
413 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
414 StaticVisitor::MarkObjectWithoutPush(heap, obj);
416 StaticVisitor::VisitPointer(heap, slot);
419 StaticVisitor::VisitPointers(heap,
425 template<
typename StaticVisitor>
426 void StaticMarkingVisitor<StaticVisitor>::VisitCode(
427 Map* map, HeapObject*
object) {
428 Heap* heap = map->GetHeap();
430 if (FLAG_cleanup_code_caches_at_gc) {
431 code->ClearTypeFeedbackInfo(heap);
434 code->MakeOlder(heap->mark_compact_collector()->marking_parity());
436 code->CodeIterateBody<StaticVisitor>(heap);
440 template<
typename StaticVisitor>
441 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
442 Map* map, HeapObject*
object) {
443 Heap* heap = map->GetHeap();
445 if (shared->ic_age() != heap->global_ic_age()) {
446 shared->ResetForNewContext(heap->global_ic_age());
448 if (FLAG_cache_optimized_code &&
449 FLAG_flush_optimized_code_cache &&
450 !shared->optimized_code_map()->IsSmi()) {
452 shared->ClearOptimizedCodeMap();
454 MarkCompactCollector* collector = heap->mark_compact_collector();
455 if (collector->is_code_flushing_enabled()) {
456 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
459 collector->code_flusher()->AddOptimizedCodeMap(shared);
463 StaticVisitor::MarkObjectWithoutPush(heap, code_map);
465 if (IsFlushable(heap, shared)) {
472 collector->code_flusher()->AddCandidate(shared);
474 VisitSharedFunctionInfoWeakCode(heap,
object);
478 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
481 shared->ClearOptimizedCodeMap();
484 VisitSharedFunctionInfoStrongCode(heap,
object);
488 template<
typename StaticVisitor>
489 void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
490 Map* map, HeapObject*
object) {
491 Heap* heap = map->GetHeap();
493 for (
int i = 0; i < constant_pool->count_of_code_ptr_entries(); i++) {
494 int index = constant_pool->first_code_ptr_index() + i;
496 reinterpret_cast<Address>(constant_pool->RawFieldOfElementAt(index));
497 StaticVisitor::VisitCodeEntry(heap, code_entry);
499 for (
int i = 0; i < constant_pool->count_of_heap_ptr_entries(); i++) {
500 int index = constant_pool->first_heap_ptr_index() + i;
501 StaticVisitor::VisitPointer(heap,
502 constant_pool->RawFieldOfElementAt(index));
507 template<
typename StaticVisitor>
508 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(
509 Map* map, HeapObject*
object) {
510 Heap* heap = map->GetHeap();
512 MarkCompactCollector* collector = heap->mark_compact_collector();
513 if (collector->is_code_flushing_enabled()) {
514 if (IsFlushable(heap,
function)) {
521 collector->code_flusher()->AddCandidate(
function);
525 SharedFunctionInfo* shared =
function->shared();
526 if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
527 StaticVisitor::MarkObject(heap, shared->map());
528 VisitSharedFunctionInfoWeakCode(heap, shared);
531 VisitJSFunctionWeakCode(heap,
object);
535 StaticVisitor::MarkObject(heap, function->shared()->code());
536 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
537 MarkInlinedFunctionsCode(heap, function->code());
541 VisitJSFunctionStrongCode(heap,
object);
545 template<
typename StaticVisitor>
546 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(
547 Map* map, HeapObject*
object) {
548 int last_property_offset =
550 StaticVisitor::VisitPointers(map->GetHeap(),
556 template<
typename StaticVisitor>
557 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
558 Map* map, HeapObject*
object) {
559 Heap* heap = map->GetHeap();
564 StaticVisitor::VisitPointers(
568 StaticVisitor::VisitPointers(
576 template<
typename StaticVisitor>
577 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
578 Map* map, HeapObject*
object) {
579 StaticVisitor::VisitPointers(
583 StaticVisitor::VisitPointers(
591 template<
typename StaticVisitor>
592 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(
593 Map* map, HeapObject*
object) {
594 StaticVisitor::VisitPointers(
598 StaticVisitor::VisitPointers(
606 template<
typename StaticVisitor>
619 if (transitions->IsTransitionArray()) {
620 MarkTransitionArray(heap, transitions);
623 ASSERT(transitions->IsMap() || transitions->IsUndefined());
632 if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
633 descriptors->
length() > 0) {
634 StaticVisitor::VisitPointers(heap,
641 StaticVisitor::VisitPointers(heap,
652 StaticVisitor::MarkObjectWithoutPush(heap, obj);
657 StaticVisitor::VisitPointers(heap,
663 template<
typename StaticVisitor>
666 if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions))
return;
678 StaticVisitor::MarkObjectWithoutPush(heap, obj);
682 StaticVisitor::VisitPointer(heap, transitions->
GetKeySlot(i));
687 template<
typename StaticVisitor>
696 for (
int i = 0, count = data->InlinedFunctionCount()->value();
700 StaticVisitor::MarkObject(heap, inlined->shared()->
code());
705 inline static bool IsValidNonBuiltinContext(
Object* context) {
706 return context->IsContext() &&
711 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo*
info) {
712 Object* undefined = heap->undefined_value();
713 return (info->script() != undefined) &&
714 (reinterpret_cast<Script*>(info->script())->source() != undefined);
718 template<
typename StaticVisitor>
719 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
720 Heap* heap, JSFunction*
function) {
721 SharedFunctionInfo* shared_info =
function->shared();
725 MarkBit code_mark = Marking::MarkBitFrom(function->code());
726 if (code_mark.Get()) {
731 if (!IsValidNonBuiltinContext(function->context())) {
736 if (function->code() != shared_info->code()) {
741 if (FLAG_age_code && !function->code()->IsOld()) {
745 return IsFlushable(heap, shared_info);
749 template<
typename StaticVisitor>
750 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
751 Heap* heap, SharedFunctionInfo* shared_info) {
754 MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
755 if (code_mark.Get()) {
761 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
766 Object* function_data = shared_info->function_data();
767 if (function_data->IsFunctionTemplateInfo()) {
772 if (shared_info->code()->kind() != Code::FUNCTION) {
777 if (!shared_info->allows_lazy_compilation()) {
783 if (shared_info->is_generator()) {
788 if (shared_info->is_toplevel()) {
794 if (shared_info->dont_flush()) {
799 if (!FLAG_age_code || !shared_info->code()->IsOld()) {
807 template<
typename StaticVisitor>
810 StaticVisitor::BeforeVisitingSharedFunctionInfo(
object);
817 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
821 template<
typename StaticVisitor>
824 StaticVisitor::BeforeVisitingSharedFunctionInfo(
object);
827 StaticVisitor::VisitPointer(heap, name_slot);
841 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
845 template<
typename StaticVisitor>
852 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
862 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
866 template<
typename StaticVisitor>
873 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
883 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
888 int mode_mask = RelocInfo::kCodeTargetMask |
889 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
890 RelocInfo::ModeMask(RelocInfo::CELL) |
891 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
892 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
893 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
908 it.
rinfo()->Visit(isolate, v);
913 template<
typename StaticVisitor>
915 int mode_mask = RelocInfo::kCodeTargetMask |
916 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
917 RelocInfo::ModeMask(RelocInfo::CELL) |
918 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
919 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
920 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
925 StaticVisitor::VisitPointer(
928 StaticVisitor::VisitPointer(
931 StaticVisitor::VisitPointer(
934 StaticVisitor::VisitPointer(
937 StaticVisitor::VisitNextCodeLink(
940 StaticVisitor::VisitPointer(
947 it.
rinfo()->template Visit<StaticVisitor>(heap);
954 #endif // V8_OBJECTS_VISITING_INL_H_
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
static const int kPointerFieldsEndOffset
static const int kPointerFieldsEndOffset
static const int kWeakNextOffset
static const int kCodeOffset
static ConstantPoolArray * cast(Object *obj)
static const int kCodeEntryOffset
static const int kStartOffset
static const int kPrototypeOrInitialMapOffset
Object ** RawFieldOfElementAt(int index)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
static void MarkInlinedFunctionsCode(Heap *heap, Code *code)
void IteratePointer(ObjectVisitor *v, int offset)
static Object * GetObjectFromEntryAddress(Address location_of_address)
int NumberOfOwnDescriptors()
static const int kDependentCodeOffset
static const int kOptimizedCodeMapOffset
static HeapObject * cast(Object *obj)
static void MarkMapContents(Heap *heap, Map *map)
static const int kDeoptimizationDataOffset
static Map * cast(Object *obj)
kSerializedDataOffset Object
static void Clear(Isolate *isolate, Address address, ConstantPoolArray *constant_pool)
static const int kStartOffset
static const int kHandlerTableOffset
#define ASSERT(condition)
static Context * cast(Object *context)
static SharedFunctionInfo * cast(Object *obj)
FixedBodyDescriptor< kNameOffset, kFlagsOffset, kSize > BodyDescriptor
Object ** GetPrototypeTransitionsSlot()
static Code * cast(Object *obj)
static Object ** RawField(HeapObject *obj, int offset)
TransitionArray * unchecked_transition_array()
static const int kDependentCodeOffset
Object ** GetDescriptorStartSlot(int descriptor_number)
GlobalObject * global_object()
void IterateNextCodeLink(ObjectVisitor *v, int offset)
static const int kWeakFirstViewOffset
Object ** GetDescriptorEndSlot(int descriptor_number)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kPointerFieldsEndOffset
void CodeIterateBody(ObjectVisitor *v)
bool HasPrototypeTransitions()
static const int kPointerFieldsBeginOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kNameOffset
static const int kPropertiesOffset
static void VisitSharedFunctionInfoStrongCode(Heap *heap, HeapObject *object)
static Code * GetCodeFromTargetAddress(Address address)
int number_of_transitions()
Object ** GetFirstElementAddress()
static void VisitSharedFunctionInfoWeakCode(Heap *heap, HeapObject *object)
static const int kTypeFeedbackInfoOffset
static const int kRelocationInfoOffset
static const int kNonWeakFieldsEndOffset
static const int kNextCodeLinkOffset
static const int kSizeWithInternalFields
static const int kSizeWithInternalFields
bool IsSimpleTransition()
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
static const int kEndOffset
Object * GetBackPointer()
static const int kSizeWithInternalFields
static const int kPointerFieldsBeginOffset
static void VisitJSFunctionWeakCode(Heap *heap, HeapObject *object)
static FixedArray * cast(Object *obj)
static const int kWeakNextOffset
static void MarkTransitionArray(Heap *heap, TransitionArray *transitions)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric literals(0o77, 0b11)") DEFINE_bool(harmony_strings
#define RUNTIME_ENTRY(name, nargs, ressize)
Object ** GetKeySlot(int transition_number)
static void VisitJSFunctionStrongCode(Heap *heap, HeapObject *object)
static const int kConstantPoolOffset
static const int kDependentCodeOffset
MarkCompactCollector * mark_compact_collector()
FixedBodyDescriptor< kHeaderSize, kHeaderSize+FIRST_WEAK_SLOT *kPointerSize, kSize > MarkCompactBodyDescriptor
void RecordCodeEntrySlot(Address slot, Code *target)
static const int kPointerFieldsBeginOffset
static JSFunction * cast(Object *obj)