58 MarkCompactCollector::MarkCompactCollector() :
62 sweep_precisely_(
false),
63 reduce_memory_footprint_(
false),
64 abort_incremental_marking_(
false),
66 was_marked_incrementally_(
false),
67 flush_monomorphic_ics_(
false),
69 migration_slots_buffer_(
NULL),
72 encountered_weak_maps_(
NULL),
73 marker_(this, this) { }
77 class VerifyMarkingVisitor:
public ObjectVisitor {
80 for (
Object** current = start; current < end; current++) {
81 if ((*current)->IsHeapObject()) {
83 ASSERT(
HEAP->mark_compact_collector()->IsMarked(
object));
91 VerifyMarkingVisitor visitor;
100 ASSERT(current >= next_object_must_be_here_or_later);
101 object->Iterate(&visitor);
102 next_object_must_be_here_or_later = current +
object->Size();
108 static void VerifyMarking(NewSpace* space) {
110 NewSpacePageIterator it(space->bottom(), end);
115 while (it.has_next()) {
116 NewSpacePage* page = it.next();
117 Address limit = it.has_next() ? page->area_end() : end;
118 ASSERT(limit == end || !page->Contains(end));
119 VerifyMarking(page->area_start(), limit);
124 static void VerifyMarking(PagedSpace* space) {
125 PageIterator it(space);
127 while (it.has_next()) {
129 VerifyMarking(p->area_start(), p->area_end());
134 static void VerifyMarking(Heap* heap) {
135 VerifyMarking(heap->old_pointer_space());
136 VerifyMarking(heap->old_data_space());
137 VerifyMarking(heap->code_space());
138 VerifyMarking(heap->cell_space());
139 VerifyMarking(heap->map_space());
140 VerifyMarking(heap->new_space());
142 VerifyMarkingVisitor visitor;
144 LargeObjectIterator it(heap->lo_space());
145 for (HeapObject* obj = it.Next(); obj !=
NULL; obj = it.Next()) {
147 obj->Iterate(&visitor);
155 class VerifyEvacuationVisitor:
public ObjectVisitor {
158 for (
Object** current = start; current < end; current++) {
159 if ((*current)->IsHeapObject()) {
161 CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(
object));
169 VerifyEvacuationVisitor visitor;
178 ASSERT(current >= next_object_must_be_here_or_later);
179 object->Iterate(&visitor);
180 next_object_must_be_here_or_later = current +
object->Size();
186 static void VerifyEvacuation(NewSpace* space) {
187 NewSpacePageIterator it(space->bottom(), space->top());
188 VerifyEvacuationVisitor visitor;
190 while (it.has_next()) {
191 NewSpacePage* page = it.next();
192 Address current = page->area_start();
193 Address limit = it.has_next() ? page->area_end() : space->top();
194 ASSERT(limit == space->top() || !page->Contains(space->top()));
195 while (current < limit) {
197 object->Iterate(&visitor);
198 current +=
object->Size();
204 static void VerifyEvacuation(PagedSpace* space) {
205 PageIterator it(space);
207 while (it.has_next()) {
209 if (p->IsEvacuationCandidate())
continue;
210 VerifyEvacuation(p->area_start(), p->area_end());
215 static void VerifyEvacuation(Heap* heap) {
216 VerifyEvacuation(heap->old_pointer_space());
217 VerifyEvacuation(heap->old_data_space());
218 VerifyEvacuation(heap->code_space());
219 VerifyEvacuation(heap->cell_space());
220 VerifyEvacuation(heap->map_space());
221 VerifyEvacuation(heap->new_space());
223 VerifyEvacuationVisitor visitor;
224 heap->IterateStrongRoots(&visitor,
VISIT_ALL);
231 evacuation_candidates_.Add(p);
235 static void TraceFragmentation(
PagedSpace* space) {
237 intptr_t reserved = (number_of_pages * space->
AreaSize());
239 PrintF(
"[%s]: %d pages, %d (%.1f%%) free\n",
242 static_cast<int>(free),
243 static_cast<double>(free) * 100 / reserved);
249 ASSERT(evacuation_candidates_.length() == 0);
256 }
else if (FLAG_trace_fragmentation) {
257 TraceFragmentation(
heap()->code_space());
260 if (FLAG_trace_fragmentation) {
261 TraceFragmentation(
heap()->map_space());
262 TraceFragmentation(
heap()->cell_space());
269 compacting_ = evacuation_candidates_.length() > 0;
279 ASSERT(state_ == PREPARE_GC);
285 if (FLAG_collect_maps) ClearNonLiveTransitions();
290 if (FLAG_verify_heap) {
291 VerifyMarking(heap_);
297 if (!FLAG_collect_maps) ReattachInitialMaps();
306 void MarkCompactCollector::VerifyMarkbitsAreClean(
PagedSpace* space) {
307 PageIterator it(space);
309 while (it.has_next()) {
316 void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
317 NewSpacePageIterator it(space->bottom(), space->top());
319 while (it.has_next()) {
320 NewSpacePage* p = it.next();
321 CHECK(p->markbits()->IsClean());
326 void MarkCompactCollector::VerifyMarkbitsAreClean() {
331 VerifyMarkbitsAreClean(heap_->
map_space());
332 VerifyMarkbitsAreClean(heap_->
new_space());
334 LargeObjectIterator it(heap_->
lo_space());
335 for (HeapObject* obj = it.Next(); obj !=
NULL; obj = it.Next()) {
344 static void ClearMarkbitsInPagedSpace(PagedSpace* space) {
345 PageIterator it(space);
347 while (it.has_next()) {
353 static void ClearMarkbitsInNewSpace(NewSpace* space) {
354 NewSpacePageIterator it(space->ToSpaceStart(), space->ToSpaceEnd());
356 while (it.has_next()) {
363 ClearMarkbitsInPagedSpace(heap_->
code_space());
364 ClearMarkbitsInPagedSpace(heap_->
map_space());
367 ClearMarkbitsInPagedSpace(heap_->
cell_space());
368 ClearMarkbitsInNewSpace(heap_->
new_space());
388 if (old_start == new_start)
return false;
394 ObjectColor old_color = Color(old_mark_bit);
398 old_mark_bit.
Clear();
404 old_mark_bit.
Clear();
413 ObjectColor new_color = Color(new_mark_bit);
414 ASSERT(new_color == old_color);
441 static int FreeListFragmentation(PagedSpace* space, Page* p) {
443 if (!p->WasSwept()) {
444 if (FLAG_trace_fragmentation) {
445 PrintF(
"%p [%s]: %d bytes live (unswept)\n",
446 reinterpret_cast<void*>(p),
453 FreeList::SizeStats sizes;
454 space->CountFreeListItems(p, &sizes);
457 intptr_t ratio_threshold;
458 intptr_t area_size = space->AreaSize();
460 ratio = (sizes.medium_size_ * 10 + sizes.large_size_ * 2) * 100 /
462 ratio_threshold = 10;
464 ratio = (sizes.small_size_ * 5 + sizes.medium_size_) * 100 /
466 ratio_threshold = 15;
469 if (FLAG_trace_fragmentation) {
470 PrintF(
"%p [%s]: %d (%.2f%%) %d (%.2f%%) %d (%.2f%%) %d (%.2f%%) %s\n",
471 reinterpret_cast<void*>(p),
473 static_cast<int>(sizes.small_size_),
474 static_cast<double>(sizes.small_size_ * 100) /
476 static_cast<int>(sizes.medium_size_),
477 static_cast<double>(sizes.medium_size_ * 100) /
479 static_cast<int>(sizes.large_size_),
480 static_cast<double>(sizes.large_size_ * 100) /
482 static_cast<int>(sizes.huge_size_),
483 static_cast<double>(sizes.huge_size_ * 100) /
485 (ratio > ratio_threshold) ?
"[fragmented]" :
"");
488 if (FLAG_always_compact && sizes.Total() != area_size) {
492 if (ratio <= ratio_threshold)
return 0;
494 return static_cast<int>(ratio - ratio_threshold);
505 const int kMaxMaxEvacuationCandidates = 1000;
506 int max_evacuation_candidates =
Min(
507 kMaxMaxEvacuationCandidates,
508 static_cast<int>(sqrt(static_cast<double>(number_of_pages / 2)) + 1));
510 if (FLAG_stress_compaction || FLAG_always_compact) {
511 max_evacuation_candidates = kMaxMaxEvacuationCandidates;
516 Candidate() : fragmentation_(0), page_(
NULL) { }
517 Candidate(
int f,
Page* p) : fragmentation_(f), page_(p) { }
519 int fragmentation() {
return fragmentation_; }
520 Page* page() {
return page_; }
529 REDUCE_MEMORY_FOOTPRINT
534 intptr_t reserved = number_of_pages * space->
AreaSize();
536 static const intptr_t kFreenessThreshold = 50;
538 if (over_reserved >= 2 * space->
AreaSize() &&
539 reduce_memory_footprint_) {
540 mode = REDUCE_MEMORY_FOOTPRINT;
544 max_evacuation_candidates += 2;
546 if (FLAG_trace_fragmentation) {
547 PrintF(
"Estimated over reserved memory: %.1f MB (setting threshold %d)\n",
548 static_cast<double>(over_reserved) /
MB,
549 static_cast<int>(kFreenessThreshold));
553 intptr_t estimated_release = 0;
555 Candidate candidates[kMaxMaxEvacuationCandidates];
558 int fragmentation = 0;
559 Candidate* least =
NULL;
561 PageIterator it(space);
562 if (it.has_next()) it.next();
564 while (it.has_next()) {
568 if (FLAG_stress_compaction) {
570 uintptr_t page_number =
reinterpret_cast<uintptr_t
>(p) >>
kPageSizeBits;
571 if ((counter & 1) == (page_number & 1)) fragmentation = 1;
572 }
else if (mode == REDUCE_MEMORY_FOOTPRINT) {
574 if (estimated_release >= ((over_reserved * 3) / 4)) {
578 intptr_t free_bytes = 0;
583 FreeList::SizeStats sizes;
585 free_bytes = sizes.Total();
588 int free_pct =
static_cast<int>(free_bytes * 100) / p->
area_size();
590 if (free_pct >= kFreenessThreshold) {
591 estimated_release += 2 * p->
area_size() - free_bytes;
592 fragmentation = free_pct;
597 if (FLAG_trace_fragmentation) {
598 PrintF(
"%p [%s]: %d (%.2f%%) free %s\n",
599 reinterpret_cast<void*>(p),
601 static_cast<int>(free_bytes),
602 static_cast<double>(free_bytes * 100) / p->
area_size(),
603 (fragmentation > 0) ?
"[fragmented]" :
"");
606 fragmentation = FreeListFragmentation(space, p);
609 if (fragmentation != 0) {
610 if (count < max_evacuation_candidates) {
611 candidates[count++] = Candidate(fragmentation, p);
614 for (
int i = 0; i < max_evacuation_candidates; i++) {
616 candidates[i].fragmentation() < least->fragmentation()) {
617 least = candidates + i;
621 if (least->fragmentation() < fragmentation) {
622 *least = Candidate(fragmentation, p);
629 for (
int i = 0; i < count; i++) {
633 if (count > 0 && FLAG_trace_fragmentation) {
634 PrintF(
"Collected %d evacuation candidates for space %s\n",
643 int npages = evacuation_candidates_.length();
644 for (
int i = 0; i < npages; i++) {
645 Page* p = evacuation_candidates_[i];
651 evacuation_candidates_.Rewind(0);
652 invalidated_code_.Rewind(0);
654 ASSERT_EQ(0, evacuation_candidates_.length());
664 flush_monomorphic_ics_ =
676 ASSERT(!FLAG_never_compact || !FLAG_always_compact);
678 #ifdef ENABLE_GDB_JIT_INTERFACE
681 compacting_collection_ =
false;
686 if (was_marked_incrementally_ && abort_incremental_marking_) {
690 was_marked_incrementally_ =
false;
695 if (!FLAG_never_compact && !was_marked_incrementally_) {
702 space = spaces.next()) {
703 space->PrepareForMarkCompact();
707 if (!was_marked_incrementally_ && FLAG_verify_heap) {
708 VerifyMarkbitsAreClean();
714 void MarkCompactCollector::Finish() {
716 ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS);
761 jsfunction_candidates_head_(
NULL),
762 shared_function_info_candidates_head_(
NULL) {}
765 SetNextCandidate(shared_info, shared_function_info_candidates_head_);
766 shared_function_info_candidates_head_ = shared_info;
770 ASSERT(function->code() ==
function->shared()->code());
772 SetNextCandidate(
function, jsfunction_candidates_head_);
773 jsfunction_candidates_head_ =
function;
777 ProcessSharedFunctionInfoCandidates();
778 ProcessJSFunctionCandidates();
782 void ProcessJSFunctionCandidates() {
785 JSFunction* candidate = jsfunction_candidates_head_;
787 while (candidate !=
NULL) {
788 next_candidate = GetNextCandidate(candidate);
792 Code* code = shared->code();
794 if (!code_mark.
Get()) {
795 shared->set_code(lazy_compile);
798 candidate->
set_code(shared->code());
806 RecordCodeEntrySlot(slot, target);
808 RecordSharedFunctionInfoCodeSlot(shared);
810 candidate = next_candidate;
813 jsfunction_candidates_head_ =
NULL;
817 void ProcessSharedFunctionInfoCandidates() {
818 Code* lazy_compile = isolate_->
builtins()->
builtin(Builtins::kLazyCompile);
820 SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
821 SharedFunctionInfo* next_candidate;
822 while (candidate !=
NULL) {
823 next_candidate = GetNextCandidate(candidate);
824 SetNextCandidate(candidate,
NULL);
826 Code* code = candidate->code();
828 if (!code_mark.Get()) {
829 candidate->set_code(lazy_compile);
832 RecordSharedFunctionInfoCodeSlot(candidate);
834 candidate = next_candidate;
837 shared_function_info_candidates_head_ =
NULL;
840 void RecordSharedFunctionInfoCodeSlot(SharedFunctionInfo* shared) {
847 static JSFunction** GetNextCandidateField(JSFunction* candidate) {
848 return reinterpret_cast<JSFunction**
>(
852 static JSFunction* GetNextCandidate(JSFunction* candidate) {
853 return *GetNextCandidateField(candidate);
856 static void SetNextCandidate(JSFunction* candidate,
857 JSFunction* next_candidate) {
858 *GetNextCandidateField(candidate) = next_candidate;
861 static SharedFunctionInfo** GetNextCandidateField(
862 SharedFunctionInfo* candidate) {
863 Code* code = candidate->
code();
864 return reinterpret_cast<SharedFunctionInfo**
>(
868 static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) {
869 return reinterpret_cast<SharedFunctionInfo*
>(
870 candidate->code()->gc_metadata());
873 static void SetNextCandidate(SharedFunctionInfo* candidate,
874 SharedFunctionInfo* next_candidate) {
875 candidate->code()->set_gc_metadata(next_candidate);
879 JSFunction* jsfunction_candidates_head_;
880 SharedFunctionInfo* shared_function_info_candidates_head_;
886 MarkCompactCollector::~MarkCompactCollector() {
887 if (code_flusher_ !=
NULL) {
888 delete code_flusher_;
889 code_flusher_ =
NULL;
894 static inline HeapObject* ShortCircuitConsString(
Object** p) {
908 if (!FLAG_clever_optimizations)
return object;
909 Map* map =
object->map();
913 Object* second =
reinterpret_cast<ConsString*
>(object)->unchecked_second();
914 Heap* heap = map->GetHeap();
915 if (second != heap->empty_string()) {
922 Object* first =
reinterpret_cast<ConsString*
>(object)->unchecked_first();
923 if (!heap->InNewSpace(
object) && heap->InNewSpace(first))
return object;
933 table_.GetVisitor(map)(map, obj);
980 &VisitSharedFunctionInfoAndFlushCode);
983 &VisitJSFunctionAndFlushCode);
986 &VisitRegExpAndFlushCode);
993 table_.RegisterSpecializations<DataObjectVisitor,
1012 const int kMinRangeForMarkingRecursion = 64;
1013 if (end - start >= kMinRangeForMarkingRecursion) {
1018 for (
Object** p = start; p < end; p++) {
1019 MarkObjectByPointer(collector, start, p);
1024 ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
1032 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
1042 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
1057 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
1058 rinfo->IsPatchedReturnSequence()) ||
1059 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
1060 rinfo->IsPatchedDebugBreakSlotSequence()));
1071 if (!(*p)->IsHeapObject())
return;
1072 HeapObject*
object = ShortCircuitConsString(p);
1073 collector->RecordSlot(anchor_slot, p,
object);
1075 collector->MarkObject(
object, mark);
1083 ASSERT(Isolate::Current()->heap()->Contains(obj));
1084 ASSERT(!
HEAP->mark_compact_collector()->IsMarked(obj));
1103 if (
check.HasOverflowed())
return false;
1107 for (
Object** p = start; p < end; p++) {
1109 if (!o->IsHeapObject())
continue;
1110 collector->RecordSlot(start, p, o);
1113 if (mark.
Get())
continue;
1114 VisitUnmarkedObject(collector, obj);
1124 class DataObjectVisitor {
1127 static void VisitSpecialized(
Map* map,
HeapObject*
object) {
1130 static void Visit(Map* map, HeapObject*
object) {
1134 typedef FlexibleBodyVisitor<StaticMarkingVisitor,
1135 JSObject::BodyDescriptor,
1136 void> JSObjectVisitor;
1138 typedef FlexibleBodyVisitor<StaticMarkingVisitor,
1139 StructBodyDescriptor,
1140 void> StructObjectVisitor;
1142 static void VisitJSWeakMap(Map* map, HeapObject*
object) {
1143 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
1144 JSWeakMap* weak_map =
reinterpret_cast<JSWeakMap*
>(object);
1148 weak_map->set_next(collector->encountered_weak_maps());
1149 collector->set_encountered_weak_maps(weak_map);
1154 BodyVisitorBase<StaticMarkingVisitor>::IteratePointers(
1159 BodyVisitorBase<StaticMarkingVisitor>::IteratePointers(
1166 Object* table_object = weak_map->table();
1167 if (!table_object->IsHashTable())
return;
1172 collector->RecordSlot(table_slot, table_slot, table);
1173 if (!table_mark.Get()) collector->SetMark(table, table_mark);
1179 static void VisitCode(Map* map, HeapObject*
object) {
1180 Heap* heap = map->GetHeap();
1181 Code* code =
reinterpret_cast<Code*
>(object);
1182 if (FLAG_cleanup_code_caches_at_gc) {
1183 code->ClearTypeFeedbackCells(heap);
1185 code->CodeIterateBody<StaticMarkingVisitor>(heap);
1192 static const int kCodeAgeThreshold = 5;
1194 static const int kRegExpCodeThreshold = 5;
1196 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
1197 Object* undefined = heap->undefined_value();
1198 return (info->script() != undefined) &&
1199 (reinterpret_cast<Script*>(info->script())->source() != undefined);
1203 inline static bool IsCompiled(JSFunction*
function) {
1204 return function->code() !=
1205 function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
1208 inline static bool IsCompiled(SharedFunctionInfo*
function) {
1209 return function->code() !=
1210 function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
1213 inline static bool IsFlushable(Heap* heap, JSFunction*
function) {
1214 SharedFunctionInfo* shared_info =
function->unchecked_shared();
1219 if (code_mark.Get()) {
1221 shared_info->set_code_age(0);
1227 if (function->code() != shared_info->code()) {
1231 return IsFlushable(heap, shared_info);
1234 inline static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info) {
1239 if (code_mark.Get()) {
1245 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
1250 Object* function_data = shared_info->function_data();
1251 if (function_data->IsFunctionTemplateInfo()) {
1261 if (!shared_info->allows_lazy_compilation()) {
1266 if (shared_info->is_toplevel()) {
1271 if (shared_info->code_age() < kCodeAgeThreshold) {
1272 shared_info->set_code_age(shared_info->code_age() + 1);
1280 static bool FlushCodeForFunction(Heap* heap, JSFunction*
function) {
1281 if (!IsFlushable(heap,
function))
return false;
1288 heap->mark_compact_collector()->code_flusher()->AddCandidate(
function);
1292 static inline bool IsValidNotBuiltinContext(
Object* ctx) {
1293 return ctx->IsContext() &&
1298 static void VisitSharedFunctionInfoGeneric(Map* map, HeapObject*
object) {
1299 SharedFunctionInfo* shared =
reinterpret_cast<SharedFunctionInfo*
>(object);
1301 if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
1303 FixedBodyVisitor<StaticMarkingVisitor,
1305 void>::Visit(map,
object);
1309 static void UpdateRegExpCodeAgeAndFlush(Heap* heap,
1321 if (!code->IsSmi() &&
1334 heap->mark_compact_collector()->
1335 RecordSlot(slot, slot, code);
1341 }
else if (code->IsSmi()) {
1350 if (value == ((heap->sweep_generation() - kRegExpCodeThreshold) & 0xff)) {
1367 static void VisitRegExpAndFlushCode(Map* map, HeapObject*
object) {
1368 Heap* heap = map->GetHeap();
1369 MarkCompactCollector* collector = heap->mark_compact_collector();
1370 if (!collector->is_code_flushing_enabled()) {
1371 VisitJSRegExpFields(map,
object);
1374 JSRegExp* re =
reinterpret_cast<JSRegExp*
>(object);
1376 UpdateRegExpCodeAgeAndFlush(heap, re,
true);
1377 UpdateRegExpCodeAgeAndFlush(heap, re,
false);
1379 VisitJSRegExpFields(map,
object);
1383 static void VisitSharedFunctionInfoAndFlushCode(Map* map,
1384 HeapObject*
object) {
1385 Heap* heap = map->GetHeap();
1386 SharedFunctionInfo* shared =
reinterpret_cast<SharedFunctionInfo*
>(object);
1387 if (shared->ic_age() != heap->global_ic_age()) {
1388 shared->ResetForNewContext(heap->global_ic_age());
1391 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
1392 if (!collector->is_code_flushing_enabled()) {
1393 VisitSharedFunctionInfoGeneric(map,
object);
1396 VisitSharedFunctionInfoAndFlushCodeGeneric(map,
object,
false);
1400 static void VisitSharedFunctionInfoAndFlushCodeGeneric(
1401 Map* map, HeapObject*
object,
bool known_flush_code_candidate) {
1402 Heap* heap = map->GetHeap();
1403 SharedFunctionInfo* shared =
reinterpret_cast<SharedFunctionInfo*
>(object);
1405 if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
1407 if (!known_flush_code_candidate) {
1408 known_flush_code_candidate = IsFlushable(heap, shared);
1409 if (known_flush_code_candidate) {
1410 heap->mark_compact_collector()->code_flusher()->AddCandidate(shared);
1414 VisitSharedFunctionInfoFields(heap,
object, known_flush_code_candidate);
1418 static void VisitCodeEntry(Heap* heap,
Address entry_address) {
1421 heap->mark_compact_collector()->MarkObject(code, mark);
1422 heap->mark_compact_collector()->
1423 RecordCodeEntrySlot(entry_address, code);
1426 static void VisitGlobalContext(Map* map, HeapObject*
object) {
1427 FixedBodyVisitor<StaticMarkingVisitor,
1429 void>::Visit(map,
object);
1431 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
1437 collector->RecordSlot(slot, slot, *slot);
1441 static void VisitJSFunctionAndFlushCode(Map* map, HeapObject*
object) {
1442 Heap* heap = map->GetHeap();
1443 MarkCompactCollector* collector = heap->mark_compact_collector();
1444 if (!collector->is_code_flushing_enabled()) {
1445 VisitJSFunction(map,
object);
1449 JSFunction* jsfunction =
reinterpret_cast<JSFunction*
>(object);
1451 bool flush_code_candidate =
false;
1452 if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) {
1453 flush_code_candidate = FlushCodeForFunction(heap, jsfunction);
1456 if (!flush_code_candidate) {
1457 Code* code = jsfunction->shared()->code();
1459 collector->MarkObject(code, code_mark);
1462 collector->MarkInlinedFunctionsCode(jsfunction->code());
1466 VisitJSFunctionFields(map,
1467 reinterpret_cast<JSFunction*>(
object),
1468 flush_code_candidate);
1472 static void VisitJSFunction(Map* map, HeapObject*
object) {
1473 VisitJSFunctionFields(map,
1474 reinterpret_cast<JSFunction*>(
object),
1479 #define SLOT_ADDR(obj, offset) \
1480 reinterpret_cast<Object**>((obj)->address() + offset)
1483 static inline void VisitJSFunctionFields(Map* map,
1485 bool flush_code_candidate) {
1486 Heap* heap = map->GetHeap();
1492 if (!flush_code_candidate) {
1499 SharedFunctionInfo* shared_info =
object->unchecked_shared();
1501 if (!shared_info_mark.Get()) {
1502 Map* shared_info_map = shared_info->map();
1503 MarkBit shared_info_map_mark =
1505 heap->mark_compact_collector()->SetMark(shared_info, shared_info_mark);
1506 heap->mark_compact_collector()->MarkObject(shared_info_map,
1507 shared_info_map_mark);
1508 VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map,
1522 static inline void VisitJSRegExpFields(Map* map,
1523 HeapObject*
object) {
1524 int last_property_offset =
1526 VisitPointers(map->GetHeap(),
1528 SLOT_ADDR(
object, last_property_offset));
1532 static void VisitSharedFunctionInfoFields(Heap* heap,
1534 bool flush_code_candidate) {
1537 if (!flush_code_candidate) {
1548 typedef void (*Callback)(Map* map, HeapObject* object);
1550 static VisitorDispatchTable<Callback> table_;
1554 VisitorDispatchTable<StaticMarkingVisitor::Callback>
1555 StaticMarkingVisitor::table_;
1563 StaticMarkingVisitor::VisitPointer(heap_, p);
1567 StaticMarkingVisitor::VisitPointers(heap_, start, end);
1578 : collector_(collector) {}
1581 collector_->PrepareThreadForCodeFlushing(isolate, top);
1592 : collector_(collector) {}
1600 if (obj->IsSharedFunctionInfo()) {
1604 collector_->MarkObject(shared->code(), code_mark);
1605 collector_->MarkObject(shared, shared_mark);
1614 void MarkCompactCollector::MarkInlinedFunctionsCode(Code* code) {
1618 DeoptimizationInputData* data =
1621 FixedArray* literals = data->LiteralArray();
1623 for (
int i = 0, count = data->InlinedFunctionCount()->value();
1627 Code* inlined_code = inlined->shared()->
code();
1629 MarkObject(inlined_code, inlined_code_mark);
1634 void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate,
1635 ThreadLocalTop* top) {
1636 for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
1642 Code* code = frame->unchecked_code();
1644 MarkObject(code, code_mark);
1645 if (frame->is_optimized()) {
1646 MarkInlinedFunctionsCode(frame->LookupCode());
1652 void MarkCompactCollector::PrepareForCodeFlushing() {
1656 if (!FLAG_flush_code || was_marked_incrementally_) {
1661 #ifdef ENABLE_DEBUGGER_SUPPORT
1662 if (
heap()->isolate()->debug()->IsLoaded() ||
1663 heap()->isolate()->debug()->has_break_points()) {
1673 HeapObject* descriptor_array =
heap()->empty_descriptor_array();
1675 MarkObject(descriptor_array, descriptor_array_mark);
1678 ASSERT(
this ==
heap()->mark_compact_collector());
1679 PrepareThreadForCodeFlushing(
heap()->isolate(),
1680 heap()->isolate()->thread_local_top());
1686 &code_marking_visitor);
1692 ProcessMarkingDeque();
1700 : collector_(heap->mark_compact_collector()) { }
1703 MarkObjectByPointer(p);
1707 for (
Object** p = start; p < end; p++) MarkObjectByPointer(p);
1711 void MarkObjectByPointer(
Object** p) {
1712 if (!(*p)->IsHeapObject())
return;
1715 HeapObject*
object = ShortCircuitConsString(p);
1717 if (mark_bit.
Get())
return;
1719 Map* map =
object->
map();
1721 collector_->SetMark(
object, mark_bit);
1725 collector_->MarkObject(map, map_mark);
1730 collector_->EmptyMarkingDeque();
1733 MarkCompactCollector* collector_;
1741 : heap_(heap), pointers_removed_(0) { }
1745 for (
Object** p = start; p < end; p++) {
1747 if (o->IsHeapObject() &&
1754 if (o->IsExternalString()) {
1758 *p = heap_->the_hole_value();
1759 pointers_removed_++;
1765 return pointers_removed_;
1770 int pointers_removed_;
1788 void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject*
object) {
1791 if (object->IsMap()) {
1800 marker_.MarkMapContents(map);
1820 Object** proto_trans_slot =
1823 if (prototype_transitions->IsFixedArray()) {
1824 mark_compact_collector()->RecordSlot(proto_trans_slot,
1826 prototype_transitions);
1831 prototype_transitions->
Size());
1840 Object** descriptor_array_slot =
1842 Object* descriptor_array = *descriptor_array_slot;
1843 if (!descriptor_array->IsSmi()) {
1844 MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(descriptor_array));
1856 for (
Object** slot = start_slot; slot < end_slot; slot++) {
1859 mark_compact_collector()->RecordSlot(start_slot, slot, obj);
1860 base_marker()->MarkObjectAndPush(reinterpret_cast<HeapObject*>(obj));
1868 ASSERT(descriptors != descriptors->
GetHeap()->empty_descriptor_array());
1870 if (!base_marker()->MarkObjectWithoutPush(descriptors))
return;
1878 Object* enum_cache = *enum_cache_slot;
1879 base_marker()->MarkObjectAndPush(
1880 reinterpret_cast<HeapObject*>(enum_cache));
1881 mark_compact_collector()->RecordSlot(descriptor_start,
1887 if (descriptors->elements_transition_map() !=
NULL) {
1889 Object* transitions = *transitions_slot;
1890 base_marker()->MarkObjectAndPush(
1891 reinterpret_cast<HeapObject*>(transitions));
1892 mark_compact_collector()->RecordSlot(descriptor_start,
1903 if (key->IsHeapObject()) {
1904 base_marker()->MarkObjectAndPush(reinterpret_cast<HeapObject*>(key));
1905 mark_compact_collector()->RecordSlot(descriptor_start, key_slot, key);
1909 if (!(*value_slot)->IsHeapObject())
continue;
1912 mark_compact_collector()->RecordSlot(descriptor_start,
1916 PropertyDetails details(descriptors->
GetDetails(i));
1918 switch (details.type()) {
1924 base_marker()->MarkObjectAndPush(value);
1927 if (!value->IsAccessorPair()) {
1928 base_marker()->MarkObjectAndPush(value);
1929 }
else if (base_marker()->MarkObjectWithoutPush(value)) {
1948 if (accessor->IsMap())
return;
1949 mark_compact_collector()->RecordSlot(slot, slot, accessor);
1950 base_marker()->MarkObjectAndPush(accessor);
1958 static void DiscoverGreyObjectsWithIterator(
Heap* heap,
1965 Map* filler_map = heap->one_pointer_filler_map();
1968 object = it->Next()) {
1974 if (marking_deque->
IsFull())
return;
1980 static inline int MarkWordToObjectStarts(uint32_t mark_bits,
int* starts);
1983 static void DiscoverGreyObjectsOnPage(MarkingDeque* marking_deque, Page* p) {
1984 ASSERT(!marking_deque->IsFull());
1992 int last_cell_index =
1993 Bitmap::IndexToCell(
1994 Bitmap::CellAlignIndex(
1995 p->AddressToMarkbitIndex(p->area_end())));
1997 Address cell_base = p->area_start();
1998 int cell_index = Bitmap::IndexToCell(
1999 Bitmap::CellAlignIndex(
2000 p->AddressToMarkbitIndex(cell_base)));
2004 cell_index < last_cell_index;
2006 ASSERT((
unsigned)cell_index ==
2007 Bitmap::IndexToCell(
2008 Bitmap::CellAlignIndex(
2009 p->AddressToMarkbitIndex(cell_base))));
2012 if (current_cell == 0)
continue;
2019 while (grey_objects != 0) {
2020 int trailing_zeros = CompilerIntrinsics::CountTrailingZeros(grey_objects);
2021 grey_objects >>= trailing_zeros;
2022 offset += trailing_zeros;
2023 MarkBit markbit(&cells[cell_index], 1 << offset,
false);
2029 marking_deque->PushBlack(
object);
2030 if (marking_deque->IsFull())
return;
2040 static void DiscoverGreyObjectsInSpace(Heap* heap,
2041 MarkingDeque* marking_deque,
2042 PagedSpace* space) {
2043 if (!space->was_swept_conservatively()) {
2044 HeapObjectIterator it(space);
2045 DiscoverGreyObjectsWithIterator(heap, marking_deque, &it);
2047 PageIterator it(space);
2048 while (it.has_next()) {
2049 Page* p = it.next();
2050 DiscoverGreyObjectsOnPage(marking_deque, p);
2051 if (marking_deque->IsFull())
return;
2057 bool MarkCompactCollector::IsUnmarkedHeapObject(
Object** p) {
2059 if (!o->IsHeapObject())
return false;
2066 void MarkCompactCollector::MarkSymbolTable() {
2067 SymbolTable* symbol_table =
heap()->symbol_table();
2070 SetMark(symbol_table, symbol_table_mark);
2073 symbol_table->IteratePrefix(&marker);
2074 ProcessMarkingDeque();
2078 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
2088 RefillMarkingDeque();
2089 EmptyMarkingDeque();
2094 void MarkCompactCollector::MarkObjectGroups() {
2095 List<ObjectGroup*>* object_groups =
2099 for (
int i = 0; i < object_groups->length(); i++) {
2100 ObjectGroup* entry = object_groups->at(i);
2103 Object*** objects = entry->objects_;
2104 bool group_marked =
false;
2105 for (
size_t j = 0; j < entry->length_; j++) {
2106 Object*
object = *objects[j];
2107 if (object->IsHeapObject()) {
2111 group_marked =
true;
2117 if (!group_marked) {
2118 (*object_groups)[last++] = entry;
2124 for (
size_t j = 0; j < entry->length_; ++j) {
2125 Object*
object = *objects[j];
2126 if (object->IsHeapObject()) {
2129 MarkObject(heap_object, mark);
2136 object_groups->at(i) =
NULL;
2138 object_groups->Rewind(last);
2142 void MarkCompactCollector::MarkImplicitRefGroups() {
2143 List<ImplicitRefGroup*>* ref_groups =
2147 for (
int i = 0; i < ref_groups->length(); i++) {
2148 ImplicitRefGroup* entry = ref_groups->at(i);
2152 (*ref_groups)[last++] = entry;
2156 Object*** children = entry->children_;
2158 for (
size_t j = 0; j < entry->length_; ++j) {
2159 if ((*children[j])->IsHeapObject()) {
2162 MarkObject(child, mark);
2170 ref_groups->Rewind(last);
2178 void MarkCompactCollector::EmptyMarkingDeque() {
2179 while (!marking_deque_.
IsEmpty()) {
2180 while (!marking_deque_.
IsEmpty()) {
2181 HeapObject*
object = marking_deque_.
Pop();
2182 ASSERT(object->IsHeapObject());
2186 Map* map =
object->map();
2188 MarkObject(map, map_mark);
2205 void MarkCompactCollector::RefillMarkingDeque() {
2208 SemiSpaceIterator new_it(
heap()->new_space());
2209 DiscoverGreyObjectsWithIterator(
heap(), &marking_deque_, &new_it);
2210 if (marking_deque_.
IsFull())
return;
2212 DiscoverGreyObjectsInSpace(
heap(),
2214 heap()->old_pointer_space());
2215 if (marking_deque_.
IsFull())
return;
2217 DiscoverGreyObjectsInSpace(
heap(),
2219 heap()->old_data_space());
2220 if (marking_deque_.
IsFull())
return;
2222 DiscoverGreyObjectsInSpace(
heap(),
2224 heap()->code_space());
2225 if (marking_deque_.
IsFull())
return;
2227 DiscoverGreyObjectsInSpace(
heap(),
2229 heap()->map_space());
2230 if (marking_deque_.
IsFull())
return;
2232 DiscoverGreyObjectsInSpace(
heap(),
2234 heap()->cell_space());
2235 if (marking_deque_.
IsFull())
return;
2237 LargeObjectIterator lo_it(
heap()->lo_space());
2238 DiscoverGreyObjectsWithIterator(
heap(),
2241 if (marking_deque_.
IsFull())
return;
2251 void MarkCompactCollector::ProcessMarkingDeque() {
2252 EmptyMarkingDeque();
2254 RefillMarkingDeque();
2255 EmptyMarkingDeque();
2260 void MarkCompactCollector::ProcessExternalMarking() {
2261 bool work_to_do =
true;
2263 while (work_to_do) {
2265 MarkImplicitRefGroups();
2266 work_to_do = !marking_deque_.
IsEmpty();
2267 ProcessMarkingDeque();
2272 void MarkCompactCollector::MarkLiveObjects() {
2273 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK);
2277 PostponeInterruptsScope postpone(
heap()->isolate());
2279 bool incremental_marking_overflowed =
false;
2281 if (was_marked_incrementally_) {
2289 incremental_marking_overflowed =
2290 incremental_marking->marking_deque()->overflowed();
2291 incremental_marking->marking_deque()->ClearOverflowed();
2294 incremental_marking->Abort();
2298 ASSERT(state_ == PREPARE_GC);
2299 state_ = MARK_LIVE_OBJECTS;
2305 if (FLAG_force_marking_deque_overflows) {
2306 marking_deque_end = marking_deque_start + 64 *
kPointerSize;
2308 marking_deque_.
Initialize(marking_deque_start,
2312 if (incremental_marking_overflowed) {
2317 PrepareForCodeFlushing();
2319 if (was_marked_incrementally_) {
2323 HeapObjectIterator cell_iterator(
heap()->cell_space());
2325 while ((cell = cell_iterator.Next()) !=
NULL) {
2326 ASSERT(cell->IsJSGlobalPropertyCell());
2329 StaticMarkingVisitor::VisitPointer(
2331 reinterpret_cast<Object**>(cell->address() + offset));
2338 MarkRoots(&root_visitor);
2343 ProcessExternalMarking();
2352 &IsUnmarkedHeapObject);
2356 RefillMarkingDeque();
2357 EmptyMarkingDeque();
2362 ProcessExternalMarking();
2368 void MarkCompactCollector::AfterMarking() {
2378 SymbolTable* symbol_table =
heap()->symbol_table();
2379 SymbolTableCleaner v(
heap());
2380 symbol_table->IterateElements(&v);
2381 symbol_table->ElementsRemoved(v.PointersRemoved());
2386 MarkCompactWeakObjectRetainer mark_compact_object_retainer;
2398 if (!FLAG_watch_ic_patching) {
2405 void MarkCompactCollector::ProcessMapCaches() {
2406 Object* raw_context =
heap()->global_contexts_list_;
2407 while (raw_context !=
heap()->undefined_value()) {
2408 Context* context =
reinterpret_cast<Context*
>(raw_context);
2410 HeapObject* raw_map_cache =
2416 raw_map_cache !=
heap()->undefined_value()) {
2417 MapCache* map_cache =
reinterpret_cast<MapCache*
>(raw_map_cache);
2418 int existing_elements = map_cache->NumberOfElements();
2419 int used_elements = 0;
2421 i < map_cache->length();
2423 Object* raw_key = map_cache->get(i);
2424 if (raw_key ==
heap()->undefined_value() ||
2425 raw_key ==
heap()->the_hole_value())
continue;
2427 Object* raw_map = map_cache->get(i + 1);
2428 if (raw_map->IsHeapObject() &&
IsMarked(raw_map)) {
2432 ASSERT(raw_map->IsMap());
2433 map_cache->set_the_hole(i);
2434 map_cache->set_the_hole(i + 1);
2437 if (used_elements == 0) {
2443 map_cache->ElementsRemoved(existing_elements - used_elements);
2445 MarkObject(map_cache, map_cache_markbit);
2452 ProcessMarkingDeque();
2456 void MarkCompactCollector::ReattachInitialMaps() {
2457 HeapObjectIterator map_iterator(
heap()->map_space());
2458 for (HeapObject* obj = map_iterator.Next();
2460 obj = map_iterator.Next()) {
2461 if (obj->IsFreeSpace())
continue;
2467 if (map->attached_to_shared_function_info()) {
2474 void MarkCompactCollector::ClearNonLiveTransitions() {
2475 HeapObjectIterator map_iterator(
heap()->map_space());
2479 for (HeapObject* obj = map_iterator.Next();
2480 obj !=
NULL; obj = map_iterator.Next()) {
2481 Map* map =
reinterpret_cast<Map*
>(obj);
2483 if (map->IsFreeSpace())
continue;
2490 if (map_mark.Get() &&
2491 map->attached_to_shared_function_info()) {
2495 map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map);
2498 ClearNonLivePrototypeTransitions(map);
2499 ClearNonLiveMapTransitions(map, map_mark);
2504 void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) {
2505 int number_of_transitions = map->NumberOfProtoTransitions();
2506 FixedArray* prototype_transitions = map->prototype_transitions();
2508 int new_number_of_transitions = 0;
2513 for (
int i = 0; i < number_of_transitions; i++) {
2514 Object* prototype = prototype_transitions->get(proto_offset + i * step);
2515 Object* cached_map = prototype_transitions->get(map_offset + i * step);
2517 int proto_index = proto_offset + new_number_of_transitions * step;
2518 int map_index = map_offset + new_number_of_transitions * step;
2519 if (new_number_of_transitions != i) {
2520 prototype_transitions->set_unchecked(
2525 prototype_transitions->set_unchecked(
2534 RecordSlot(slot, slot, prototype);
2535 new_number_of_transitions++;
2539 if (new_number_of_transitions != number_of_transitions) {
2540 map->SetNumberOfProtoTransitions(new_number_of_transitions);
2544 for (
int i = new_number_of_transitions * step;
2545 i < number_of_transitions * step;
2547 prototype_transitions->set_undefined(heap_, header + i);
2552 void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map,
2554 Object* potential_parent = map->GetBackPointer();
2555 if (!potential_parent->IsMap())
return;
2556 Map* parent =
Map::cast(potential_parent);
2560 bool current_is_alive = map_mark.Get();
2562 if (!current_is_alive && parent_is_alive) {
2563 parent->ClearNonLiveTransitions(
heap());
2568 void MarkCompactCollector::ProcessWeakMaps() {
2572 JSWeakMap* weak_map =
reinterpret_cast<JSWeakMap*
>(weak_map_obj);
2574 Object** anchor =
reinterpret_cast<Object**
>(table->address());
2575 for (
int i = 0; i < table->Capacity(); i++) {
2580 RecordSlot(anchor, key_slot, *key_slot);
2583 ObjectHashTable::EntryToValueIndex(i)));
2584 StaticMarkingVisitor::MarkObjectByPointer(
this, anchor, value_slot);
2587 weak_map_obj = weak_map->next();
2592 void MarkCompactCollector::ClearWeakMaps() {
2596 JSWeakMap* weak_map =
reinterpret_cast<JSWeakMap*
>(weak_map_obj);
2598 for (
int i = 0; i < table->Capacity(); i++) {
2600 table->RemoveEntry(i);
2603 weak_map_obj = weak_map->next();
2634 for (
int remaining = size / kPointerSize; remaining > 0; remaining--) {
2641 }
else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) {
2643 &migration_slots_buffer_,
2644 reinterpret_cast<Object**>(dst_slot),
2658 &migration_slots_buffer_,
2665 PROFILE(
heap()->isolate(), CodeMoveEvent(src, dst));
2668 &migration_slots_buffer_,
2692 for (
Object** p = start; p < end; p++) UpdatePointer(p);
2696 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
2697 Object* target = rinfo->target_object();
2699 rinfo->set_target_object(target);
2703 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
2706 rinfo->set_target_address(
Code::cast(target)->instruction_start());
2710 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
2711 rinfo->IsPatchedReturnSequence()) ||
2712 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
2713 rinfo->IsPatchedDebugBreakSlotSequence()));
2716 rinfo->set_call_address(
Code::cast(target)->instruction_start());
2722 if (!obj->IsHeapObject())
return;
2726 MapWord map_word = heap_obj->
map_word();
2727 if (map_word.IsForwardingAddress()) {
2729 MarkCompactCollector::IsOnEvacuationCandidate(heap_obj));
2730 HeapObject* target = map_word.ToForwardingAddress();
2733 !MarkCompactCollector::IsOnEvacuationCandidate(target));
2738 inline void UpdatePointer(
Object** p) {
2746 static void UpdatePointer(HeapObject** p, HeapObject*
object) {
2749 Address old_addr =
object->address();
2756 if (new_addr !=
NULL) {
2764 *p =
reinterpret_cast<HeapObject*
>(
Smi::FromInt(0x0f100d00 >> 1));
2769 static String* UpdateReferenceInExternalStringTableEntry(Heap* heap,
2773 if (map_word.IsForwardingAddress()) {
2786 MaybeObject* maybe_result =
2788 if (maybe_result->ToObject(&result)) {
2795 increment_promoted_objects_size(object_size);
2801 ASSERT(target_space ==
heap()->old_pointer_space() ||
2802 target_space ==
heap()->old_data_space());
2803 MaybeObject* maybe_result = target_space->
AllocateRaw(object_size);
2804 if (maybe_result->ToObject(&result)) {
2811 increment_promoted_objects_size(object_size);
2820 void MarkCompactCollector::EvacuateNewSpace() {
2838 int survivors_size = 0;
2847 object = from_it.Next()) {
2849 if (mark_bit.
Get()) {
2853 int size =
object->Size();
2854 survivors_size += size;
2862 MaybeObject* allocation = new_space->AllocateRaw(size);
2863 if (allocation->IsFailure()) {
2870 allocation = new_space->AllocateRaw(size);
2871 ASSERT(!allocation->IsFailure());
2873 Object* target = allocation->ToObjectUnchecked();
2893 void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) {
2894 AlwaysAllocateScope always_allocate;
2895 PagedSpace* space =
static_cast<PagedSpace*
>(p->owner());
2896 ASSERT(p->IsEvacuationCandidate() && !p->WasSwept());
2898 p->MarkSweptPrecisely();
2900 int last_cell_index =
2901 Bitmap::IndexToCell(
2902 Bitmap::CellAlignIndex(
2903 p->AddressToMarkbitIndex(p->area_end())));
2905 Address cell_base = p->area_start();
2906 int cell_index = Bitmap::IndexToCell(
2907 Bitmap::CellAlignIndex(
2908 p->AddressToMarkbitIndex(cell_base)));
2913 cell_index < last_cell_index;
2915 ASSERT((
unsigned)cell_index ==
2916 Bitmap::IndexToCell(
2917 Bitmap::CellAlignIndex(
2918 p->AddressToMarkbitIndex(cell_base))));
2919 if (cells[cell_index] == 0)
continue;
2921 int live_objects = MarkWordToObjectStarts(cells[cell_index], offsets);
2922 for (
int i = 0; i < live_objects; i++) {
2927 int size =
object->Size();
2929 MaybeObject* target = space->AllocateRaw(size);
2930 if (target->IsFailure()) {
2936 Object* target_object = target->ToObjectUnchecked();
2942 ASSERT(object->map_word().IsForwardingAddress());
2946 cells[cell_index] = 0;
2948 p->ResetLiveBytes();
2952 void MarkCompactCollector::EvacuatePages() {
2953 int npages = evacuation_candidates_.length();
2954 for (
int i = 0; i < npages; i++) {
2955 Page* p = evacuation_candidates_[i];
2956 ASSERT(p->IsEvacuationCandidate() ||
2958 if (p->IsEvacuationCandidate()) {
2961 if (static_cast<PagedSpace*>(p->owner())->CanExpand()) {
2962 EvacuateLiveObjectsFromPage(p);
2966 for (
int j = i; j < npages; j++) {
2967 Page* page = evacuation_candidates_[j];
2968 slots_buffer_allocator_.
DeallocateChain(page->slots_buffer_address());
2969 page->ClearEvacuationCandidate();
2982 if (object->IsHeapObject()) {
2984 MapWord map_word = heap_object->
map_word();
2985 if (map_word.IsForwardingAddress()) {
2986 return map_word.ToForwardingAddress();
2994 static inline void UpdateSlot(ObjectVisitor* v,
2997 switch (slot_type) {
2999 RelocInfo rinfo(addr, RelocInfo::CODE_TARGET, 0,
NULL);
3004 v->VisitCodeEntry(addr);
3013 RelocInfo rinfo(addr, RelocInfo::DEBUG_BREAK_SLOT, 0,
NULL);
3014 if (rinfo.IsPatchedDebugBreakSlotSequence()) rinfo.Visit(v);
3018 RelocInfo rinfo(addr, RelocInfo::JS_RETURN, 0,
NULL);
3019 if (rinfo.IsPatchedReturnSequence()) rinfo.Visit(v);
3023 RelocInfo rinfo(addr, RelocInfo::EMBEDDED_OBJECT, 0,
NULL);
3052 template<SweepingMode sweeping_mode, SkipListRebuildingMode skip_list_mode>
3053 static void SweepPrecisely(PagedSpace* space,
3056 ASSERT(!p->IsEvacuationCandidate() && !p->WasSwept());
3062 p->MarkSweptPrecisely();
3064 int last_cell_index =
3065 Bitmap::IndexToCell(
3066 Bitmap::CellAlignIndex(
3067 p->AddressToMarkbitIndex(p->area_end())));
3069 Address free_start = p->area_start();
3071 Bitmap::IndexToCell(
3072 Bitmap::CellAlignIndex(
3073 p->AddressToMarkbitIndex(free_start)));
3075 ASSERT(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0);
3076 Address object_address = free_start;
3079 SkipList* skip_list = p->skip_list();
3080 int curr_region = -1;
3086 cell_index < last_cell_index;
3088 ASSERT((
unsigned)cell_index ==
3089 Bitmap::IndexToCell(
3090 Bitmap::CellAlignIndex(
3091 p->AddressToMarkbitIndex(object_address))));
3092 int live_objects = MarkWordToObjectStarts(cells[cell_index], offsets);
3094 for ( ; live_objects != 0; live_objects--) {
3096 if (free_end != free_start) {
3097 space->Free(free_start, static_cast<int>(free_end - free_start));
3101 Map* map = live_object->map();
3102 int size = live_object->SizeFromMap(map);
3104 live_object->IterateBody(map->instance_type(), size, v);
3107 int new_region_start =
3109 int new_region_end =
3111 if (new_region_start != curr_region ||
3112 new_region_end != curr_region) {
3113 skip_list->AddObject(free_end, size);
3114 curr_region = new_region_end;
3117 free_start = free_end + size;
3120 cells[cell_index] = 0;
3122 if (free_start != p->area_end()) {
3123 space->Free(free_start, static_cast<int>(p->area_end() - free_start));
3125 p->ResetLiveBytes();
3129 static bool SetMarkBitsUnderInvalidatedCode(Code* code,
bool value) {
3132 if (p->IsEvacuationCandidate() ||
3137 Address code_start = code->address();
3138 Address code_end = code_start + code->Size();
3141 uint32_t end_index =
3144 Bitmap* b = p->markbits();
3146 MarkBit start_mark_bit = b->MarkBitFromIndex(start_index);
3147 MarkBit end_mark_bit = b->MarkBitFromIndex(end_index);
3156 if (start_cell == end_cell) {
3157 *start_cell |= start_mask & end_mask;
3159 *start_cell |= start_mask;
3163 *end_cell |= end_mask;
3175 static bool IsOnInvalidatedCodeObject(
Address addr) {
3183 if (p->owner()->identity() !=
CODE_SPACE)
return false;
3190 return mark_bit.
Get();
3196 !ShouldSkipEvacuationSlotRecording(code)) {
3203 invalidated_code_.Add(code);
3208 bool MarkCompactCollector::MarkInvalidatedCode() {
3209 bool code_marked =
false;
3211 int length = invalidated_code_.length();
3212 for (
int i = 0; i < length; i++) {
3213 Code* code = invalidated_code_[i];
3215 if (SetMarkBitsUnderInvalidatedCode(code,
true)) {
3224 void MarkCompactCollector::RemoveDeadInvalidatedCode() {
3225 int length = invalidated_code_.length();
3226 for (
int i = 0; i < length; i++) {
3227 if (!
IsMarked(invalidated_code_[i])) invalidated_code_[i] =
NULL;
3232 void MarkCompactCollector::ProcessInvalidatedCode(ObjectVisitor* visitor) {
3233 int length = invalidated_code_.length();
3234 for (
int i = 0; i < length; i++) {
3235 Code* code = invalidated_code_[i];
3237 code->Iterate(visitor);
3238 SetMarkBitsUnderInvalidatedCode(code,
false);
3241 invalidated_code_.Rewind(0);
3245 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
3246 bool code_slots_filtering_required;
3247 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
3248 code_slots_filtering_required = MarkInvalidatedCode();
3254 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_EVACUATE_PAGES);
3259 PointersUpdatingVisitor updating_visitor(
heap());
3261 { GCTracer::Scope gc_scope(tracer_,
3262 GCTracer::Scope::MC_UPDATE_NEW_TO_NEW_POINTERS);
3264 SemiSpaceIterator to_it(
heap()->new_space()->
bottom(),
3265 heap()->new_space()->top());
3266 for (HeapObject*
object = to_it.Next();
3268 object = to_it.Next()) {
3269 Map* map =
object->map();
3270 object->IterateBody(map->instance_type(),
3271 object->SizeFromMap(map),
3276 { GCTracer::Scope gc_scope(tracer_,
3277 GCTracer::Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS);
3283 { GCTracer::Scope gc_scope(tracer_,
3284 GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS);
3285 StoreBufferRebuildScope scope(heap_,
3287 &Heap::ScavengeStoreBufferCallback);
3291 { GCTracer::Scope gc_scope(tracer_,
3292 GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED);
3294 migration_slots_buffer_,
3295 code_slots_filtering_required);
3296 if (FLAG_trace_fragmentation) {
3297 PrintF(
" migration slots buffer: %d\n",
3301 if (compacting_ && was_marked_incrementally_) {
3303 LargeObjectIterator it(heap_->
lo_space());
3304 for (HeapObject* obj = it.Next(); obj !=
NULL; obj = it.Next()) {
3311 obj->Iterate(&updating_visitor);
3318 int npages = evacuation_candidates_.length();
3319 { GCTracer::Scope gc_scope(
3320 tracer_, GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED);
3321 for (
int i = 0; i < npages; i++) {
3322 Page* p = evacuation_candidates_[i];
3323 ASSERT(p->IsEvacuationCandidate() ||
3326 if (p->IsEvacuationCandidate()) {
3329 code_slots_filtering_required);
3330 if (FLAG_trace_fragmentation) {
3331 PrintF(
" page %p slots buffer: %d\n",
3332 reinterpret_cast<void*>(p),
3339 SkipList* list = p->skip_list();
3340 if (list !=
NULL) list->Clear();
3342 if (FLAG_gc_verbose) {
3344 reinterpret_cast<intptr_t>(p));
3346 PagedSpace* space =
static_cast<PagedSpace*
>(p->owner());
3349 switch (space->identity()) {
3354 SweepPrecisely<SWEEP_AND_VISIT_LIVE_OBJECTS, IGNORE_SKIP_LIST>(
3355 space, p, &updating_visitor);
3358 SweepPrecisely<SWEEP_AND_VISIT_LIVE_OBJECTS, REBUILD_SKIP_LIST>(
3359 space, p, &updating_visitor);
3369 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_UPDATE_MISC_POINTERS);
3372 HeapObjectIterator cell_iterator(heap_->
cell_space());
3373 for (HeapObject* cell = cell_iterator.Next();
3375 cell = cell_iterator.Next()) {
3376 if (cell->IsJSGlobalPropertyCell()) {
3378 reinterpret_cast<Address>(cell) +
3380 updating_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
3387 heap_->symbol_table()->Iterate(&updating_visitor);
3391 &UpdateReferenceInExternalStringTableEntry);
3393 if (!FLAG_watch_ic_patching) {
3399 EvacuationWeakObjectRetainer evacuation_object_retainer;
3404 ProcessInvalidatedCode(&updating_visitor);
3409 if (FLAG_verify_heap) {
3410 VerifyEvacuation(heap_);
3416 for (
int i = 0; i < npages; i++) {
3417 Page* p = evacuation_candidates_[i];
3418 if (!p->IsEvacuationCandidate())
continue;
3419 PagedSpace* space =
static_cast<PagedSpace*
>(p->owner());
3420 space->Free(p->area_start(), p->area_size());
3421 p->set_scan_on_scavenge(
false);
3423 p->ResetLiveBytes();
3424 space->ReleasePage(p);
3426 evacuation_candidates_.Rewind(0);
3427 compacting_ =
false;
3431 static const int kStartTableEntriesPerLine = 5;
3432 static const int kStartTableLines = 171;
3433 static const int kStartTableInvalidLine = 127;
3434 static const int kStartTableUnusedEntry = 126;
3436 #define _ kStartTableUnusedEntry
3437 #define X kStartTableInvalidLine
3626 static inline int MarkWordToObjectStarts(uint32_t mark_bits,
int* starts) {
3631 ASSERT((mark_bits & 0x180) != 0x180);
3632 ASSERT((mark_bits & 0x18000) != 0x18000);
3633 ASSERT((mark_bits & 0x1800000) != 0x1800000);
3635 while (mark_bits != 0) {
3636 int byte = (mark_bits & 0xff);
3639 ASSERT(byte < kStartTableLines);
3640 char* table =
kStartTable + byte * kStartTableEntriesPerLine;
3641 int objects_in_these_8_words = table[0];
3642 ASSERT(objects_in_these_8_words != kStartTableInvalidLine);
3643 ASSERT(objects_in_these_8_words < kStartTableEntriesPerLine);
3644 for (
int i = 0; i < objects_in_these_8_words; i++) {
3645 starts[objects++] = offset + table[1 + i];
3654 static inline Address DigestFreeStart(
Address approximate_free_start,
3655 uint32_t free_start_cell) {
3656 ASSERT(free_start_cell != 0);
3659 ASSERT((free_start_cell & (free_start_cell << 1)) == 0);
3662 uint32_t cell = free_start_cell;
3663 int offset_of_last_live;
3664 if ((cell & 0x80000000u) != 0) {
3666 offset_of_last_live = 31;
3675 cell = (cell + 1) >> 1;
3676 int live_objects = MarkWordToObjectStarts(cell, offsets);
3677 ASSERT(live_objects == 1);
3678 offset_of_last_live = offsets[live_objects - 1];
3681 approximate_free_start + offset_of_last_live *
kPointerSize;
3683 Address free_start = last_live_start + last_live->Size();
3688 static inline Address StartOfLiveObject(
Address block_address, uint32_t cell) {
3692 ASSERT((cell & (cell << 1)) == 0);
3695 if (cell == 0x80000000u) {
3698 uint32_t first_set_bit = ((cell ^ (cell - 1)) + 1) >> 1;
3699 ASSERT((first_set_bit & cell) == first_set_bit);
3700 int live_objects = MarkWordToObjectStarts(first_set_bit, offsets);
3701 ASSERT(live_objects == 1);
3719 int last_cell_index =
3720 Bitmap::IndexToCell(
3721 Bitmap::CellAlignIndex(
3725 Bitmap::IndexToCell(
3726 Bitmap::CellAlignIndex(
3729 intptr_t freed_bytes = 0;
3736 cell_index < last_cell_index;
3738 if (cells[cell_index] != 0)
break;
3740 size_t size = block_address - p->
area_start();
3741 if (cell_index == last_cell_index) {
3743 static_cast<int>(size)));
3749 Address free_end = StartOfLiveObject(block_address, cells[cell_index]);
3753 static_cast<int>(size));
3760 Address free_start = block_address;
3761 uint32_t free_start_cell = cells[cell_index];
3764 cell_index < last_cell_index;
3766 ASSERT((
unsigned)cell_index ==
3767 Bitmap::IndexToCell(
3768 Bitmap::CellAlignIndex(
3770 uint32_t cell = cells[cell_index];
3774 if (block_address - free_start > 32 * kPointerSize) {
3775 free_start = DigestFreeStart(free_start, free_start_cell);
3776 if (block_address - free_start > 32 * kPointerSize) {
3781 free_end = StartOfLiveObject(block_address, cell);
3782 freed_bytes += space->
Free(free_start,
3783 static_cast<int>(free_end - free_start));
3787 free_start = block_address;
3788 free_start_cell = cell;
3790 cells[cell_index] = 0;
3795 if (block_address - free_start > 32 * kPointerSize) {
3796 free_start = DigestFreeStart(free_start, free_start_cell);
3797 freed_bytes += space->
Free(free_start,
3798 static_cast<int>(block_address - free_start));
3806 void MarkCompactCollector::SweepSpace(
PagedSpace* space, SweeperType sweeper) {
3812 PageIterator it(space);
3814 intptr_t freed_bytes = 0;
3815 int pages_swept = 0;
3817 bool lazy_sweeping_active =
false;
3818 bool unused_page_present =
false;
3821 intptr_t space_left =
3822 Min(
heap()->OldGenPromotionLimit(old_space_size),
3823 heap()->OldGenAllocationLimit(old_space_size)) - old_space_size;
3825 while (it.has_next()) {
3826 Page* p = it.next();
3833 ASSERT(evacuation_candidates_.length() > 0);
3844 if (unused_page_present) {
3845 if (FLAG_gc_verbose) {
3847 reinterpret_cast<intptr_t>(p));
3855 unused_page_present =
true;
3858 if (lazy_sweeping_active) {
3859 if (FLAG_gc_verbose) {
3861 reinterpret_cast<intptr_t>(p));
3869 if (FLAG_gc_verbose) {
3871 reinterpret_cast<intptr_t>(p));
3878 if (FLAG_gc_verbose) {
3880 reinterpret_cast<intptr_t>(p));
3884 if (space_left + freed_bytes > newspace_size) {
3886 lazy_sweeping_active =
true;
3888 if (FLAG_gc_verbose) {
3896 if (FLAG_gc_verbose) {
3898 reinterpret_cast<intptr_t>(p));
3901 SweepPrecisely<SWEEP_ONLY, REBUILD_SKIP_LIST>(space, p,
NULL);
3903 SweepPrecisely<SWEEP_ONLY, IGNORE_SKIP_LIST>(space, p,
NULL);
3914 if (FLAG_gc_verbose) {
3915 PrintF(
"SweepSpace: %s (%d pages swept)\n",
3925 void MarkCompactCollector::SweepSpaces() {
3926 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP);
3928 state_ = SWEEP_SPACES;
3933 if (sweep_precisely_) how_to_sweep =
PRECISE;
3939 SweepSpace(
heap()->old_pointer_space(), how_to_sweep);
3940 SweepSpace(
heap()->old_data_space(), how_to_sweep);
3942 RemoveDeadInvalidatedCode();
3947 EvacuateNewSpaceAndCandidates();
3961 if (code_flusher_ !=
NULL)
return;
3964 if (code_flusher_ ==
NULL)
return;
3965 delete code_flusher_;
3966 code_flusher_ =
NULL;
3976 #ifdef ENABLE_GDB_JIT_INTERFACE
3977 if (obj->IsCode()) {
3978 GDBJITInterface::RemoveCode(reinterpret_cast<Code*>(obj));
3981 if (obj->IsCode()) {
4009 *buffer_address = buffer;
4012 buffer->
Add(reinterpret_cast<ObjectSlot>(type));
4013 buffer->
Add(reinterpret_cast<ObjectSlot>(addr));
4019 if (RelocInfo::IsCodeTarget(rmode)) {
4021 }
else if (RelocInfo::IsEmbeddedObject(rmode)) {
4023 }
else if (RelocInfo::IsDebugBreakSlot(rmode)) {
4025 }
else if (RelocInfo::IsJSReturn(rmode)) {
4036 (rinfo->host() ==
NULL ||
4037 !ShouldSkipEvacuationSlotRecording(rinfo->host()))) {
4040 SlotTypeForRMode(rinfo->rmode()),
4052 !ShouldSkipEvacuationSlotRecording(reinterpret_cast<Object**>(slot))) {
4073 for (
int slot_idx = 0; slot_idx < idx_; ++slot_idx) {
4081 DecodeSlotType(slot),
4082 reinterpret_cast<Address>(slots_[slot_idx]));
4091 for (
int slot_idx = 0; slot_idx < idx_; ++slot_idx) {
4094 if (!IsOnInvalidatedCodeObject(reinterpret_cast<Address>(slot))) {
4101 if (!IsOnInvalidatedCodeObject(pc)) {
4103 DecodeSlotType(slot),
4104 reinterpret_cast<Address>(slots_[slot_idx]));
4123 while (buffer !=
NULL) {
4126 buffer = next_buffer;
4128 *buffer_address =
NULL;
static bool IsBlack(MarkBit mark_bit)
RootMarkingVisitor(Heap *heap)
static int SizeOf(Map *map, HeapObject *object)
const uint32_t kShortcutTypeTag
static void VisitGlobalPropertyCell(Heap *heap, RelocInfo *rinfo)
void ClearEvacuationCandidate()
static const int kPointerFieldsEndOffset
static const char * kGreyBitPattern
void RemoveImplicitRefGroups()
static uint32_t FastAddressToMarkbitIndex(Address addr)
List< ImplicitRefGroup * > * implicit_ref_groups()
Code * builtin(Name name)
static bool IsTypedSlot(ObjectSlot slot)
static const int kCodeOffset
void VisitEmbeddedPointer(RelocInfo *rinfo)
#define CHECK_EQ(expected, value)
static Object *& Object_at(Address addr)
INLINE(static void VisitPointer(Heap *heap, Object **p))
static const int kCodeEntryOffset
static const int kGetterOffset
static const int kStartOffset
static int EntryToIndex(int entry)
FixedBodyDescriptor< kNameOffset, kThisPropertyAssignmentsOffset+kPointerSize, kSize > BodyDescriptor
static const int kElementsStartIndex
Address FromSpacePageHigh()
void VisitPointer(Object **p)
static void ReportDeleteIfNeeded(HeapObject *obj, Isolate *isolate)
CompilationCache * compilation_cache()
SharedFunctionInfoMarkingVisitor(MarkCompactCollector *collector)
void PrintF(const char *format,...)
void AddCandidate(JSFunction *function)
bool InNewSpace(Object *object)
INLINE(static void VisitPointers(Heap *heap, Object **start, Object **end))
static String * cast(Object *obj)
friend class CodeMarkingVisitor
HandleScopeImplementer * handle_scope_implementer()
static const char * kWhiteBitPattern
friend class RootMarkingVisitor
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes)
void Prepare(GCTracer *tracer)
static Smi * FromInt(int value)
static Object * GetObjectFromEntryAddress(Address location_of_address)
void FinalizeExternalString(String *string)
static MemoryChunk * FromAddress(Address a)
static void VisitCodeTarget(Heap *heap, RelocInfo *rinfo)
static HeapObject * cast(Object *obj)
static const int kProtoTransitionElementsPerEntry
static AccessorPair * cast(Object *obj)
static Map * cast(Object *obj)
void ResetAllocationInfo()
static void IterateElements(ObjectVisitor *v)
static const int kInstanceDescriptorsOrBitField3Offset
static const int kSetterOffset
bool InFromSpace(Object *object)
static void Clear(MemoryChunk *chunk)
void Relocate(intptr_t delta)
virtual void VisitPointers(Object **start, Object **end)
static void MoveBlock(Address dst, Address src, int byte_size)
void IterateWeakRoots(ObjectVisitor *v)
const char * AllocationSpaceName(AllocationSpace space)
void UpdateSlots(Heap *heap)
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
#define ASSERT(condition)
static const uint32_t kBitsPerCell
static void IncrementLiveBytesFromGC(Address address, int by)
bool StartCompaction(CompactionMode mode)
#define PROFILE(isolate, Call)
void VisitCodeTarget(RelocInfo *rinfo)
static void VisitExternalReference(RelocInfo *rinfo)
void UpdateSlotsWithFilter(Heap *heap)
OldSpace * TargetSpace(HeapObject *object)
void VisitPointers(Object **start, Object **end)
static bool IsGrey(MarkBit mark_bit)
static Context * cast(Object *context)
static const char * kBlackBitPattern
static bool IsWhite(MarkBit mark_bit)
ThreadManager * thread_manager()
void AddCandidate(SharedFunctionInfo *shared_info)
bool NonFailureIsHeapObject()
Object ** GetKeySlot(int descriptor_number)
friend class SharedFunctionInfoMarkingVisitor
static const int kTableOffset
static const int kPrototypeTransitionsOrBackPointerOffset
static const int kEntrySize
void FreeUnmarkedObjects()
static Code * cast(Object *obj)
void ClearSweptConservatively()
static Object ** RawField(HeapObject *obj, int offset)
StoreBuffer * store_buffer()
static Smi * cast(Object *object)
static int SizeOfChain(SlotsBuffer *buffer)
void IterateArchivedThreads(ThreadVisitor *v)
void IncreaseUnsweptFreeBytes(Page *p)
static MarkBit MarkBitFrom(Address addr)
bool TryPromoteObject(HeapObject *object, int object_size)
Object ** GetTransitionsSlot()
static bool IsMarked(Object *obj)
HANDLE HANDLE LPSTACKFRAME64 StackFrame
SlotsBuffer * AllocateBuffer(SlotsBuffer *next_buffer)
void ClearSweptPrecisely()
bool HasSpaceForTypedSlot()
SymbolTableCleaner(Heap *heap)
void VisitPointer(Object **slot)
void AddEvacuationCandidate(Page *p)
void VisitPointer(Object **p)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
void VisitPointers(Object **start, Object **end)
static const int kGCMetadataOffset
Object ** GetEnumCacheSlot()
CodeMarkingVisitor(MarkCompactCollector *collector)
void ClearCacheOnMap(Map *map)
void IteratePointersToNewSpace(ObjectSlotCallback callback)
#define HEAP_PROFILE(heap, call)
static const int kProtoTransitionMapOffset
RuntimeProfiler * runtime_profiler()
static const int kScopeInfoOffset
void PushBlack(HeapObject *object)
static NewSpacePage * FromAddress(Address address_in_page)
virtual intptr_t SizeOfObjects()
bool context_exit_happened()
static void VisitEmbeddedPointer(Heap *heap, RelocInfo *rinfo)
v8::Handle< v8::Object > bottom
void Initialize(Address low, Address high)
void CollectEvacuationCandidates(PagedSpace *space)
void EvictEvacuationCandidatesFromFreeLists()
static bool VisitUnmarkedObjects(Heap *heap, Object **start, Object **end)
static Address & Address_at(Address addr)
void CodeIterateBody(ObjectVisitor *v)
void MarkEvacuationCandidate()
INLINE(static void MarkObjectByPointer(MarkCompactCollector *collector, Object **anchor_slot, Object **p))
void InvalidateCode(Code *code)
bool IsAligned(T value, U alignment)
void CountFreeListItems(Page *p, FreeList::SizeStats *sizes)
GlobalHandles * global_handles()
void IncrementYoungSurvivorsCounter(int survived)
char kStartTable[kStartTableLines *kStartTableEntriesPerLine]
virtual Object * RetainAs(Object *object)
static void VisitRuntimeEntry(RelocInfo *rinfo)
PointersUpdatingVisitor(Heap *heap)
const uint32_t kShortcutTypeMask
static const int kNameOffset
static void IterateBody(Map *map, HeapObject *obj)
INLINE(static void VisitUnmarkedObject(MarkCompactCollector *collector, HeapObject *obj))
OldSpace * old_pointer_space()
static const int kPropertiesOffset
bool TransferMark(Address old_start, Address new_start)
static void MarkBlack(MarkBit mark_bit)
static Code * GetCodeFromTargetAddress(Address address)
static void VisitDebugTarget(Heap *heap, RelocInfo *rinfo)
bool is_inline_cache_stub()
void set_age_mark(Address mark)
static void GreyToBlack(MarkBit markbit)
void VisitThread(Isolate *isolate, ThreadLocalTop *top)
static const int kMaxNonCodeHeapObjectSize
Object ** global_contexts_list_address()
void DeallocateBuffer(SlotsBuffer *buffer)
void Iterate(ObjectVisitor *v)
void UpdateSamplesAfterCompact(ObjectVisitor *visitor)
MarkingVisitor(Heap *heap)
void MarkDescriptorArray(DescriptorArray *descriptors)
PropertyDetails GetDetails(int descriptor_number)
InlineCacheState ic_state()
LargeObjectSpace * lo_space()
int Free(Address start, int size_in_bytes)
void ReleasePage(Page *page)
void MarkSweptConservatively()
static const int kNonWeakFieldsEndOffset
static int OffsetOfElementAt(int index)
static intptr_t SweepConservatively(PagedSpace *space, Page *p)
virtual Object * RetainAs(Object *object)
#define T(name, string, precedence)
void RecordRelocSlot(RelocInfo *rinfo, Object *target)
static const int kProtoTransitionHeaderSize
void DeallocateChain(SlotsBuffer **buffer_address)
void UpdateReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
void MigrateObject(Address dst, Address src, int size, AllocationSpace to_old_space)
List< ObjectGroup * > * object_groups()
void ProcessWeakReferences(WeakObjectRetainer *retainer)
bool is_code_flushing_enabled() const
void Add(ObjectSlot slot)
InnerPointerToCodeCache * inner_pointer_to_code_cache()
void VisitPointers(Object **start, Object **end)
void WhiteToGreyAndPush(HeapObject *obj, MarkBit mark_bit)
void IterateRoots(ObjectVisitor *v, VisitMode mode)
friend class MarkingVisitor
static ObjectHashTable * cast(Object *obj)
void VisitPointers(Object **start, Object **end)
void set_encountered_weak_maps(Object *weak_map)
void CheckNewSpaceExpansionCriteria()
void IdentifyWeakHandles(WeakSlotCallback f)
#define SLOT_ADDR(obj, offset)
static JSGlobalPropertyCell * cast(Object *obj)
IncrementalMarking * incremental_marking()
Object * GetBackPointer()
static bool ChainLengthThresholdReached(SlotsBuffer *buffer)
SlotsBuffer ** slots_buffer_address()
void Iterate(v8::internal::ObjectVisitor *v)
static void UpdateSlot(Heap *heap, Object **slot)
static int code_index(bool is_ascii)
int number_of_descriptors()
static const int kProtoTransitionPrototypeOffset
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
void IterateFunctions(ObjectVisitor *v)
InstanceType instance_type()
bool IsEvacuationCandidate()
static HeapObject * FromAddress(Address address)
static FixedArray * cast(Object *obj)
void set_was_swept_conservatively(bool b)
void RemoveObjectGroups()
intptr_t PromotedSpaceSizeOfObjects()
static const int kCompilationErrorValue
void EvictEvacuationCandidate(Page *page)
void SetPagesToSweep(Page *first)
void RestartIfNotMarking()
static const char * kImpossibleBitPattern
CodeFlusher(Isolate *isolate)
void MarkAccessorPairSlot(AccessorPair *accessors, int offset)
void MarkMapContents(Map *map)
void set_code(Code *code)
static const int kUninitializedValue
MUST_USE_RESULT MaybeObject * AllocateRaw(int object_size, Executability executable)
void VisitDebugTarget(RelocInfo *rinfo)
void VisitPointer(Object **p)
void check(i::Vector< const char > string)
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
Object ** GetValueSlot(int descriptor_number)
static void UpdateSlotsRecordedIn(Heap *heap, SlotsBuffer *buffer, bool code_slots_filtering_required)
static int saved_code_index(bool is_ascii)
uint32_t AddressToMarkbitIndex(Address addr)
static void ProcessNonLive(HeapObject *obj)
static const int kValueOffset
static bool AddTo(SlotsBufferAllocator *allocator, SlotsBuffer **buffer_address, ObjectSlot slot, AdditionMode mode)
static void VisitExternalReference(Address *p)
OldSpace * old_data_space()
static void Clear(Address address)
MarkCompactCollector * mark_compact_collector()
void EnableCodeFlushing(bool enable)
static int RegionNumber(Address addr)
FixedBodyDescriptor< kHeaderSize, kHeaderSize+FIRST_WEAK_SLOT *kPointerSize, kSize > MarkCompactBodyDescriptor
AllocationSpace identity()
void RecordCodeEntrySlot(Address slot, Code *target)
static const int kPointerFieldsBeginOffset
Object * encountered_weak_maps()
Address FromSpacePageLow()
static JSFunction * cast(Object *obj)