28 #ifndef V8_MARK_COMPACT_H_
29 #define V8_MARK_COMPACT_H_
65 return !mark_bit.
Get() && mark_bit.
Next().
Get();
71 return mark_bit.
Get() && !mark_bit.
Next().
Get();
77 return !mark_bit.
Get();
83 return mark_bit.
Get() && mark_bit.
Next().
Get();
124 static const char* ColorName(ObjectColor color) {
126 case BLACK_OBJECT:
return "black";
127 case WHITE_OBJECT:
return "white";
128 case GREY_OBJECT:
return "grey";
129 case IMPOSSIBLE_COLOR:
return "impossible";
134 static ObjectColor Color(HeapObject* obj) {
138 static ObjectColor Color(MarkBit mark_bit) {
139 if (
IsBlack(mark_bit))
return BLACK_OBJECT;
140 if (
IsWhite(mark_bit))
return WHITE_OBJECT;
141 if (
IsGrey(mark_bit))
return GREY_OBJECT;
143 return IMPOSSIBLE_COLOR;
152 bool is_black =
false;
153 if (from_mark_bit.
Get()) {
157 if (from_mark_bit.
Next().
Get()) {
173 : array_(
NULL), top_(0), bottom_(0), mask_(0), overflowed_(
false) { }
184 inline bool IsFull() {
return ((top_ + 1) & mask_) == bottom_; }
186 inline bool IsEmpty() {
return top_ == bottom_; }
198 ASSERT(object->IsHeapObject());
204 array_[top_] = object;
205 top_ = ((top_ + 1) & mask_);
210 ASSERT(object->IsHeapObject());
214 array_[top_] = object;
215 top_ = ((top_ + 1) & mask_);
221 top_ = ((top_ - 1) & mask_);
223 ASSERT(object->IsHeapObject());
228 ASSERT(object->IsHeapObject());
232 bottom_ = ((bottom_ - 1) & mask_);
233 array_[bottom_] = object;
239 int top() {
return top_; }
283 : idx_(0), chain_length_(1), next_(next_buffer) {
285 chain_length_ = next_->chain_length_ + 1;
294 slots_[idx_++] = slot;
310 return "EMBEDDED_OBJECT_SLOT";
312 return "RELOCATED_CODE_OBJECT";
314 return "CODE_TARGET_SLOT";
316 return "CODE_ENTRY_SLOT";
318 return "DEBUG_TARGET_SLOT";
320 return "JS_RETURN_SLOT";
322 return "NUMBER_OF_SLOT_TYPES";
324 return "UNKNOWN SlotType";
334 if (buffer ==
NULL)
return 0;
335 return static_cast<int>(buffer->idx_ +
349 bool code_slots_filtering_required) {
350 while (buffer !=
NULL) {
351 if (code_slots_filtering_required) {
356 buffer = buffer->
next();
366 return buffer !=
NULL && buffer->chain_length_ >= kChainLengthThreshold;
380 *buffer_address = buffer;
397 static const int kChainLengthThreshold = 15;
400 intptr_t chain_length_;
419 jsfunction_candidates_head_(
NULL),
420 shared_function_info_candidates_head_(
NULL) {}
423 SetNextCandidate(shared_info, shared_function_info_candidates_head_);
424 shared_function_info_candidates_head_ = shared_info;
428 ASSERT(function->code() ==
function->shared()->code());
429 ASSERT(function->next_function_link()->IsUndefined());
430 SetNextCandidate(
function, jsfunction_candidates_head_);
431 jsfunction_candidates_head_ =
function;
435 ProcessSharedFunctionInfoCandidates();
436 ProcessJSFunctionCandidates();
440 void ProcessJSFunctionCandidates();
441 void ProcessSharedFunctionInfoCandidates();
444 Object* next_candidate = candidate->next_function_link();
445 return reinterpret_cast<JSFunction*
>(next_candidate);
448 static void SetNextCandidate(JSFunction* candidate,
449 JSFunction* next_candidate) {
450 candidate->set_next_function_link(next_candidate);
453 static void ClearNextCandidate(JSFunction* candidate,
Object* undefined) {
454 ASSERT(undefined->IsUndefined());
458 static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) {
459 Object* next_candidate = candidate->code()->gc_metadata();
460 return reinterpret_cast<SharedFunctionInfo*
>(next_candidate);
463 static void SetNextCandidate(SharedFunctionInfo* candidate,
464 SharedFunctionInfo* next_candidate) {
465 candidate->code()->set_gc_metadata(next_candidate);
468 static void ClearNextCandidate(SharedFunctionInfo* candidate) {
473 JSFunction* jsfunction_candidates_head_;
474 SharedFunctionInfo* shared_function_info_candidates_head_;
481 class ThreadLocalTop;
493 typedef MaybeObject* (*AllocationFunction)(
Heap*
heap,
548 bool in_use() {
return state_ > PREPARE_GC; }
549 bool are_map_pointers_encoded() {
return state_ == UPDATE_POINTERS; }
575 void VerifyMarkbitsAreClean();
576 static void VerifyMarkbitsAreClean(
PagedSpace* space);
577 static void VerifyMarkbitsAreClean(
NewSpace* space);
584 INLINE(
static bool ShouldSkipEvacuationSlotRecording(
Object** anchor)) {
586 ShouldSkipEvacuationSlotRecording();
591 ShouldSkipEvacuationSlotRecording();
596 IsEvacuationCandidate();
600 if (FLAG_trace_fragmentation) {
601 PrintF(
"Page %p is too popular. Disabling evacuation.\n",
602 reinterpret_cast<void*>(page));
614 evacuation_candidates_.RemoveElement(page);
635 encountered_weak_maps_ = weak_map;
648 bool MarkInvalidatedCode();
649 void RemoveDeadInvalidatedCode();
650 void ProcessInvalidatedCode(ObjectVisitor* visitor);
654 enum CollectorState {
659 ENCODE_FORWARDING_ADDRESSES,
665 CollectorState state_;
670 bool sweep_precisely_;
672 bool reduce_memory_footprint_;
674 bool abort_incremental_marking_;
680 bool was_marked_incrementally_;
686 SlotsBufferAllocator slots_buffer_allocator_;
688 SlotsBuffer* migration_slots_buffer_;
710 void PrepareThreadForCodeFlushing(
Isolate* isolate, ThreadLocalTop* top);
712 void PrepareForCodeFlushing();
715 void MarkLiveObjects();
732 void MarkSymbolTable();
736 void MarkObjectGroups();
740 void MarkImplicitRefGroups();
744 void ProcessExternalMarking();
748 void ProcessMarkingDeque();
754 void EmptyMarkingDeque();
759 void RefillMarkingDeque();
763 void ProcessMapCaches();
767 static bool IsUnmarkedHeapObject(
Object** p);
771 void ClearNonLiveTransitions();
772 void ClearNonLivePrototypeTransitions(
Map* map);
773 void ClearNonLiveMapTransitions(
Map* map,
MarkBit map_mark);
779 void ReattachInitialMaps();
784 void ProcessWeakMaps();
789 void ClearWeakMaps();
807 void EvacuateNewSpace();
809 void EvacuateLiveObjectsFromPage(
Page* p);
811 void EvacuatePages();
813 void EvacuateNewSpaceAndCandidates();
818 friend class MarkObjectVisitor;
821 friend class UnmarkObjectVisitor;
828 Object* encountered_weak_maps_;
841 #endif // V8_MARK_COMPACT_H_
static bool IsBlack(MarkBit mark_bit)
static void BlackToGrey(HeapObject *obj)
void ClearEvacuationCandidate()
static const char * kGreyBitPattern
static bool IsTypedSlot(ObjectSlot slot)
uint32_t RoundDownToPowerOf2(uint32_t x)
static void ReportDeleteIfNeeded(HeapObject *obj, Isolate *isolate)
void PrintF(const char *format,...)
void AddCandidate(JSFunction *function)
static const char * kWhiteBitPattern
void Prepare(GCTracer *tracer)
static const uint32_t kSingleFreeEncoding
static void WhiteToGrey(MarkBit markbit)
static MemoryChunk * FromAddress(Address a)
static void BlackToGrey(MarkBit markbit)
CodeFlusher * code_flusher()
static MarkBit MarkBitFrom(HeapObject *obj)
const char * AllocationSpaceName(AllocationSpace space)
void UpdateSlots(Heap *heap)
#define ASSERT(condition)
static void IncrementLiveBytesFromGC(Address address, int by)
bool StartCompaction(CompactionMode mode)
void UpdateSlotsWithFilter(Heap *heap)
static void AnyToGrey(MarkBit markbit)
static bool IsGrey(MarkBit mark_bit)
static const char * kBlackBitPattern
static bool IsWhite(MarkBit mark_bit)
void AddCandidate(SharedFunctionInfo *shared_info)
INLINE(static bool ShouldSkipEvacuationSlotRecording(Object *host))
static int SizeOfChain(SlotsBuffer *buffer)
static MarkBit MarkBitFrom(Address addr)
bool TryPromoteObject(HeapObject *object, int object_size)
INLINE(static bool TransferColor(HeapObject *from, HeapObject *to))
INLINE(static bool ShouldSkipEvacuationSlotRecording(Object **anchor))
static bool IsMarked(Object *obj)
SlotsBuffer * AllocateBuffer(SlotsBuffer *next_buffer)
bool HasSpaceForTypedSlot()
void(* EncodingFunction)(Heap *heap, HeapObject *old_object, int object_size, Object *new_object, int *offset)
SlotsBuffer(SlotsBuffer *next_buffer)
void AddEvacuationCandidate(Page *p)
void PushBlack(HeapObject *object)
void Initialize(Address low, Address high)
void CollectEvacuationCandidates(PagedSpace *space)
void InvalidateCode(Code *code)
static const uint32_t kMultiFreeEncoding
bool TransferMark(Address old_start, Address new_start)
static void MarkBlack(MarkBit mark_bit)
static void GreyToBlack(MarkBit markbit)
void DeallocateBuffer(SlotsBuffer *buffer)
void(* ProcessNonLiveFunction)(HeapObject *object, Isolate *isolate)
activate correct semantics for inheriting readonliness false
static const int kNumberOfElements
static bool IsImpossible(MarkBit mark_bit)
static intptr_t SweepConservatively(PagedSpace *space, Page *p)
void RecordRelocSlot(RelocInfo *rinfo, Object *target)
void DeallocateChain(SlotsBuffer **buffer_address)
void MigrateObject(Address dst, Address src, int size, AllocationSpace to_old_space)
bool(* IsAliveFunction)(HeapObject *obj, int *size, int *offset)
bool is_code_flushing_enabled() const
bool is_compacting() const
void Add(ObjectSlot slot)
void set_encountered_weak_maps(Object *weak_map)
static bool ChainLengthThresholdReached(SlotsBuffer *buffer)
void RecordCodeTargetPatch(Address pc, Code *target)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static const char * SlotTypeToString(SlotType type)
void EvictEvacuationCandidate(Page *page)
void PushGrey(HeapObject *object)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const char * kImpossibleBitPattern
CodeFlusher(Isolate *isolate)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
static void UpdateSlotsRecordedIn(Heap *heap, SlotsBuffer *buffer, bool code_slots_filtering_required)
INLINE(static bool IsOnEvacuationCandidate(Object *obj))
static bool AddTo(SlotsBufferAllocator *allocator, SlotsBuffer **buffer_address, ObjectSlot slot, AdditionMode mode)
void EnableCodeFlushing(bool enable)
AllocationSpace identity()
void UnshiftGrey(HeapObject *object)
void RecordCodeEntrySlot(Address slot, Code *target)
int(MarkCompactCollector::* LiveObjectCallback)(HeapObject *obj)
Object * encountered_weak_maps()