58 MarkCompactCollector::MarkCompactCollector() :  
 
   62       sweep_precisely_(
false),
 
   63       reduce_memory_footprint_(
false),
 
   64       abort_incremental_marking_(
false),
 
   66       was_marked_incrementally_(
false),
 
   68       migration_slots_buffer_(
NULL),
 
   71       encountered_weak_maps_(
NULL) { }
 
   75 class VerifyMarkingVisitor: 
public ObjectVisitor {
 
   78     for (
Object** current = start; current < end; current++) {
 
   79       if ((*current)->IsHeapObject()) {
 
   81         CHECK(
HEAP->mark_compact_collector()->IsMarked(
object));
 
   89   VerifyMarkingVisitor visitor;
 
   98       CHECK(current >= next_object_must_be_here_or_later);
 
   99       object->Iterate(&visitor);
 
  100       next_object_must_be_here_or_later = current + 
object->Size();
 
  106 static void VerifyMarking(NewSpace* space) {
 
  108   NewSpacePageIterator it(space->bottom(), end);
 
  113   while (it.has_next()) {
 
  114     NewSpacePage* page = it.next();
 
  115     Address limit = it.has_next() ? page->area_end() : end;
 
  116     CHECK(limit == end || !page->Contains(end));
 
  117     VerifyMarking(page->area_start(), limit);
 
  122 static void VerifyMarking(PagedSpace* space) {
 
  123   PageIterator it(space);
 
  125   while (it.has_next()) {
 
  127     VerifyMarking(p->area_start(), p->area_end());
 
  132 static void VerifyMarking(Heap* heap) {
 
  133   VerifyMarking(heap->old_pointer_space());
 
  134   VerifyMarking(heap->old_data_space());
 
  135   VerifyMarking(heap->code_space());
 
  136   VerifyMarking(heap->cell_space());
 
  137   VerifyMarking(heap->map_space());
 
  138   VerifyMarking(heap->new_space());
 
  140   VerifyMarkingVisitor visitor;
 
  142   LargeObjectIterator it(heap->lo_space());
 
  143   for (HeapObject* obj = it.Next(); obj != 
NULL; obj = it.Next()) {
 
  145       obj->Iterate(&visitor);
 
  153 class VerifyEvacuationVisitor: 
public ObjectVisitor {
 
  156     for (
Object** current = start; current < end; current++) {
 
  157       if ((*current)->IsHeapObject()) {
 
  159         CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(
object));
 
  167   VerifyEvacuationVisitor visitor;
 
  176       CHECK(current >= next_object_must_be_here_or_later);
 
  177       object->Iterate(&visitor);
 
  178       next_object_must_be_here_or_later = current + 
object->Size();
 
  184 static void VerifyEvacuation(NewSpace* space) {
 
  185   NewSpacePageIterator it(space->bottom(), space->top());
 
  186   VerifyEvacuationVisitor visitor;
 
  188   while (it.has_next()) {
 
  189     NewSpacePage* page = it.next();
 
  190     Address current = page->area_start();
 
  191     Address limit = it.has_next() ? page->area_end() : space->top();
 
  192     CHECK(limit == space->top() || !page->Contains(space->top()));
 
  193     while (current < limit) {
 
  195       object->Iterate(&visitor);
 
  196       current += 
object->Size();
 
  202 static void VerifyEvacuation(PagedSpace* space) {
 
  203   PageIterator it(space);
 
  205   while (it.has_next()) {
 
  207     if (p->IsEvacuationCandidate()) 
continue;
 
  208     VerifyEvacuation(p->area_start(), p->area_end());
 
  213 static void VerifyEvacuation(Heap* heap) {
 
  214   VerifyEvacuation(heap->old_pointer_space());
 
  215   VerifyEvacuation(heap->old_data_space());
 
  216   VerifyEvacuation(heap->code_space());
 
  217   VerifyEvacuation(heap->cell_space());
 
  218   VerifyEvacuation(heap->map_space());
 
  219   VerifyEvacuation(heap->new_space());
 
  221   VerifyEvacuationVisitor visitor;
 
  222   heap->IterateStrongRoots(&visitor, 
VISIT_ALL);
 
  224 #endif  // VERIFY_HEAP 
  228 class VerifyNativeContextSeparationVisitor: 
public ObjectVisitor {
 
  230   VerifyNativeContextSeparationVisitor() : current_native_context_(
NULL) {}
 
  233     for (
Object** current = start; current < end; current++) {
 
  234       if ((*current)->IsHeapObject()) {
 
  236         if (object->IsString()) 
continue;
 
  237         switch (object->map()->instance_type()) {
 
  259             if (object->IsContext()) {
 
  260               CheckContext(
object);
 
  263               int length = array->length();
 
  266               array->set_length(0);
 
  269               array->set_length(length);
 
  276             object->Iterate(
this);
 
  297   void CheckContext(
Object* context) {
 
  298     if (!context->IsContext()) 
return;
 
  300     if (current_native_context_ == 
NULL) {
 
  301       current_native_context_ = native_context;
 
  303       CHECK_EQ(current_native_context_, native_context);
 
  307   Context* current_native_context_;
 
  311 static void VerifyNativeContextSeparation(Heap* heap) {
 
  312   HeapObjectIterator it(heap->code_space());
 
  314   for (
Object* 
object = it.Next(); 
object != 
NULL; 
object = it.Next()) {
 
  315     VerifyNativeContextSeparationVisitor visitor;
 
  324   evacuation_candidates_.Add(p);
 
  328 static void TraceFragmentation(
PagedSpace* space) {
 
  330   intptr_t reserved = (number_of_pages * space->
AreaSize());
 
  332   PrintF(
"[%s]: %d pages, %d (%.1f%%) free\n",
 
  335          static_cast<int>(free),
 
  336          static_cast<double>(free) * 100 / reserved);
 
  342     ASSERT(evacuation_candidates_.length() == 0);
 
  344 #ifdef ENABLE_GDB_JIT_INTERFACE 
  346     if (FLAG_gdbjit) 
return false;
 
  352     if (FLAG_compact_code_space &&
 
  354          FLAG_incremental_code_compaction)) {
 
  356     } 
else if (FLAG_trace_fragmentation) {
 
  357       TraceFragmentation(
heap()->code_space());
 
  360     if (FLAG_trace_fragmentation) {
 
  361       TraceFragmentation(
heap()->map_space());
 
  362       TraceFragmentation(
heap()->cell_space());
 
  369     compacting_ = evacuation_candidates_.length() > 0;
 
  379   ASSERT(state_ == PREPARE_GC);
 
  385   if (FLAG_collect_maps) ClearNonLiveTransitions();
 
  390   if (FLAG_verify_heap) {
 
  391     VerifyMarking(heap_);
 
  397   if (!FLAG_collect_maps) ReattachInitialMaps();
 
  400   if (FLAG_verify_native_context_separation) {
 
  401     VerifyNativeContextSeparation(heap_);
 
  412 void MarkCompactCollector::VerifyMarkbitsAreClean(
PagedSpace* space) {
 
  413   PageIterator it(space);
 
  415   while (it.has_next()) {
 
  423 void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
 
  424   NewSpacePageIterator it(space->bottom(), space->top());
 
  426   while (it.has_next()) {
 
  427     NewSpacePage* p = it.next();
 
  428     CHECK(p->markbits()->IsClean());
 
  434 void MarkCompactCollector::VerifyMarkbitsAreClean() {
 
  439   VerifyMarkbitsAreClean(heap_->
map_space());
 
  440   VerifyMarkbitsAreClean(heap_->
new_space());
 
  442   LargeObjectIterator it(heap_->
lo_space());
 
  443   for (HeapObject* obj = it.Next(); obj != 
NULL; obj = it.Next()) {
 
  449 #endif  // VERIFY_HEAP 
  452 static void ClearMarkbitsInPagedSpace(PagedSpace* space) {
 
  453   PageIterator it(space);
 
  455   while (it.has_next()) {
 
  461 static void ClearMarkbitsInNewSpace(NewSpace* space) {
 
  462   NewSpacePageIterator it(space->ToSpaceStart(), space->ToSpaceEnd());
 
  464   while (it.has_next()) {
 
  471   ClearMarkbitsInPagedSpace(heap_->
code_space());
 
  472   ClearMarkbitsInPagedSpace(heap_->
map_space());
 
  475   ClearMarkbitsInPagedSpace(heap_->
cell_space());
 
  476   ClearMarkbitsInNewSpace(heap_->
new_space());
 
  496   if (old_start == new_start) 
return false;
 
  502   ObjectColor old_color = Color(old_mark_bit);
 
  506     old_mark_bit.
Clear();
 
  512     old_mark_bit.
Clear();
 
  521   ObjectColor new_color = Color(new_mark_bit);
 
  522   ASSERT(new_color == old_color);
 
  549 static int FreeListFragmentation(PagedSpace* space, Page* p) {
 
  551   if (!p->WasSwept()) {
 
  552     if (FLAG_trace_fragmentation) {
 
  553       PrintF(
"%p [%s]: %d bytes live (unswept)\n",
 
  554              reinterpret_cast<void*>(p),
 
  561   FreeList::SizeStats sizes;
 
  562   space->CountFreeListItems(p, &sizes);
 
  565   intptr_t ratio_threshold;
 
  566   intptr_t area_size = space->AreaSize();
 
  568     ratio = (sizes.medium_size_ * 10 + sizes.large_size_ * 2) * 100 /
 
  570     ratio_threshold = 10;
 
  572     ratio = (sizes.small_size_ * 5 + sizes.medium_size_) * 100 /
 
  574     ratio_threshold = 15;
 
  577   if (FLAG_trace_fragmentation) {
 
  578     PrintF(
"%p [%s]: %d (%.2f%%) %d (%.2f%%) %d (%.2f%%) %d (%.2f%%) %s\n",
 
  579            reinterpret_cast<void*>(p),
 
  581            static_cast<int>(sizes.small_size_),
 
  582            static_cast<double>(sizes.small_size_ * 100) /
 
  584            static_cast<int>(sizes.medium_size_),
 
  585            static_cast<double>(sizes.medium_size_ * 100) /
 
  587            static_cast<int>(sizes.large_size_),
 
  588            static_cast<double>(sizes.large_size_ * 100) /
 
  590            static_cast<int>(sizes.huge_size_),
 
  591            static_cast<double>(sizes.huge_size_ * 100) /
 
  593            (ratio > ratio_threshold) ? 
"[fragmented]" : 
"");
 
  596   if (FLAG_always_compact && sizes.Total() != area_size) {
 
  600   if (ratio <= ratio_threshold) 
return 0;  
 
  602   return static_cast<int>(ratio - ratio_threshold);
 
  611   static const int kMaxMaxEvacuationCandidates = 1000;
 
  613   int max_evacuation_candidates =
 
  614       static_cast<int>(sqrt(number_of_pages / 2.0) + 1);
 
  616   if (FLAG_stress_compaction || FLAG_always_compact) {
 
  617     max_evacuation_candidates = kMaxMaxEvacuationCandidates;
 
  622     Candidate() : fragmentation_(0), page_(
NULL) { }
 
  623     Candidate(
int f, 
Page* p) : fragmentation_(f), page_(p) { }
 
  625     int fragmentation() { 
return fragmentation_; }
 
  626     Page* page() { 
return page_; }
 
  635     REDUCE_MEMORY_FOOTPRINT
 
  640   intptr_t reserved = number_of_pages * space->
AreaSize();
 
  642   static const intptr_t kFreenessThreshold = 50;
 
  644   if (reduce_memory_footprint_ && over_reserved >= space->
AreaSize()) {
 
  648     mode = REDUCE_MEMORY_FOOTPRINT;
 
  649     max_evacuation_candidates += 2;
 
  653   if (over_reserved > reserved / 3 && over_reserved >= 2 * space->
AreaSize()) {
 
  657     mode = REDUCE_MEMORY_FOOTPRINT;
 
  658     max_evacuation_candidates *= 2;
 
  661   if (FLAG_trace_fragmentation && mode == REDUCE_MEMORY_FOOTPRINT) {
 
  662     PrintF(
"Estimated over reserved memory: %.1f / %.1f MB (threshold %d)\n",
 
  663            static_cast<double>(over_reserved) / 
MB,
 
  664            static_cast<double>(reserved) / 
MB,
 
  665            static_cast<int>(kFreenessThreshold));
 
  668   intptr_t estimated_release = 0;
 
  670   Candidate candidates[kMaxMaxEvacuationCandidates];
 
  672   max_evacuation_candidates =
 
  673       Min(kMaxMaxEvacuationCandidates, max_evacuation_candidates);
 
  676   int fragmentation = 0;
 
  677   Candidate* least = 
NULL;
 
  679   PageIterator it(space);
 
  680   if (it.has_next()) it.next();  
 
  682   while (it.has_next()) {
 
  686     if (FLAG_stress_compaction) {
 
  688       uintptr_t page_number = 
reinterpret_cast<uintptr_t
>(p) >> 
kPageSizeBits;
 
  689       if ((counter & 1) == (page_number & 1)) fragmentation = 1;
 
  690     } 
else if (mode == REDUCE_MEMORY_FOOTPRINT) {
 
  692       if (estimated_release >= ((over_reserved * 3) / 4)) {
 
  696       intptr_t free_bytes = 0;
 
  701         FreeList::SizeStats sizes;
 
  703         free_bytes = sizes.Total();
 
  706       int free_pct = 
static_cast<int>(free_bytes * 100) / p->
area_size();
 
  708       if (free_pct >= kFreenessThreshold) {
 
  709         estimated_release += 2 * p->
area_size() - free_bytes;
 
  710         fragmentation = free_pct;
 
  715       if (FLAG_trace_fragmentation) {
 
  716         PrintF(
"%p [%s]: %d (%.2f%%) free %s\n",
 
  717                reinterpret_cast<void*>(p),
 
  719                static_cast<int>(free_bytes),
 
  720                static_cast<double>(free_bytes * 100) / p->
area_size(),
 
  721                (fragmentation > 0) ? 
"[fragmented]" : 
"");
 
  724       fragmentation = FreeListFragmentation(space, p);
 
  727     if (fragmentation != 0) {
 
  728       if (count < max_evacuation_candidates) {
 
  729         candidates[count++] = Candidate(fragmentation, p);
 
  732           for (
int i = 0; i < max_evacuation_candidates; i++) {
 
  734                 candidates[i].fragmentation() < least->fragmentation()) {
 
  735               least = candidates + i;
 
  739         if (least->fragmentation() < fragmentation) {
 
  740           *least = Candidate(fragmentation, p);
 
  747   for (
int i = 0; i < count; i++) {
 
  751   if (count > 0 && FLAG_trace_fragmentation) {
 
  752     PrintF(
"Collected %d evacuation candidates for space %s\n",
 
  761     int npages = evacuation_candidates_.length();
 
  762     for (
int i = 0; i < npages; i++) {
 
  763       Page* p = evacuation_candidates_[i];
 
  769     evacuation_candidates_.Rewind(0);
 
  770     invalidated_code_.Rewind(0);
 
  772   ASSERT_EQ(0, evacuation_candidates_.length());
 
  788   ASSERT(!FLAG_never_compact || !FLAG_always_compact);
 
  791   if (was_marked_incrementally_ && abort_incremental_marking_) {
 
  795     was_marked_incrementally_ = 
false;
 
  800   if (!FLAG_never_compact && !was_marked_incrementally_) {
 
  807        space = spaces.next()) {
 
  808     space->PrepareForMarkCompact();
 
  812   if (!was_marked_incrementally_ && FLAG_verify_heap) {
 
  813     VerifyMarkbitsAreClean();
 
  819 void MarkCompactCollector::Finish() {
 
  821   ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS);
 
  862 void CodeFlusher::ProcessJSFunctionCandidates() {
 
  863   Code* lazy_compile = isolate_->
builtins()->
builtin(Builtins::kLazyCompile);
 
  864   Object* undefined = isolate_->
heap()->undefined_value();
 
  866   JSFunction* candidate = jsfunction_candidates_head_;
 
  867   JSFunction* next_candidate;
 
  868   while (candidate != 
NULL) {
 
  869     next_candidate = GetNextCandidate(candidate);
 
  870     ClearNextCandidate(candidate, undefined);
 
  872     SharedFunctionInfo* shared = candidate->shared();
 
  874     Code* 
code = shared->code();
 
  876     if (!code_mark.Get()) {
 
  877       shared->set_code(lazy_compile);
 
  878       candidate->set_code(lazy_compile);
 
  879     } 
else if (code == lazy_compile) {
 
  880       candidate->set_code(lazy_compile);
 
  888         RecordCodeEntrySlot(slot, target);
 
  890     Object** shared_code_slot =
 
  893         RecordSlot(shared_code_slot, shared_code_slot, *shared_code_slot);
 
  895     candidate = next_candidate;
 
  898   jsfunction_candidates_head_ = 
NULL;
 
  902 void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
 
  903   Code* lazy_compile = isolate_->
builtins()->
builtin(Builtins::kLazyCompile);
 
  905   SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
 
  906   SharedFunctionInfo* next_candidate;
 
  907   while (candidate != 
NULL) {
 
  908     next_candidate = GetNextCandidate(candidate);
 
  909     ClearNextCandidate(candidate);
 
  911     Code* code = candidate->code();
 
  913     if (!code_mark.Get()) {
 
  914       candidate->set_code(lazy_compile);
 
  920         RecordSlot(code_slot, code_slot, *code_slot);
 
  922     candidate = next_candidate;
 
  925   shared_function_info_candidates_head_ = 
NULL;
 
  929 MarkCompactCollector::~MarkCompactCollector() {
 
  930   if (code_flusher_ != 
NULL) {
 
  931     delete code_flusher_;
 
  932     code_flusher_ = 
NULL;
 
  937 static inline HeapObject* ShortCircuitConsString(
Object** p) {
 
  951   if (!FLAG_clever_optimizations) 
return object;
 
  952   Map* map = 
object->map();
 
  956   Object* second = 
reinterpret_cast<ConsString*
>(object)->unchecked_second();
 
  957   Heap* heap = map->GetHeap();
 
  958   if (second != heap->empty_string()) {
 
  965   Object* first = 
reinterpret_cast<ConsString*
>(object)->unchecked_first();
 
  966   if (!heap->InNewSpace(
object) && heap->InNewSpace(first)) 
return object;
 
  984   template<MarkCompactMarkingVisitor::VisitorId 
id>
 
  998     const int kMinRangeForMarkingRecursion = 64;
 
  999     if (end - start >= kMinRangeForMarkingRecursion) {
 
 1004     for (
Object** p = start; p < end; p++) {
 
 1005       MarkObjectByPointer(collector, start, p);
 
 1019     if (!mark_bit.
Get()) {
 
 1030     if (!(*p)->IsHeapObject()) 
return;
 
 1031     HeapObject* 
object = ShortCircuitConsString(p);
 
 1032     collector->RecordSlot(anchor_slot, p, 
object);
 
 1034     collector->MarkObject(
object, mark);
 
 1042     ASSERT(Isolate::Current()->heap()->Contains(obj));
 
 1043     ASSERT(!
HEAP->mark_compact_collector()->IsMarked(obj));
 
 1062     if (
check.HasOverflowed()) 
return false;
 
 1066     for (
Object** p = start; p < end; p++) {
 
 1068       if (!o->IsHeapObject()) 
continue;
 
 1069       collector->RecordSlot(start, p, o);
 
 1072       if (mark.
Get()) 
continue;
 
 1073       VisitUnmarkedObject(collector, obj);
 
 1107     Object* table_object = weak_map->table();
 
 1108     if (!table_object->IsHashTable()) 
return;
 
 1113     collector->RecordSlot(table_slot, table_slot, table);
 
 1114     if (!table_mark.
Get()) collector->SetMark(table, table_mark);
 
 1122   static inline void TrackObjectStatsAndVisit(
Map* map, 
HeapObject* obj);
 
 1126   static const int kRegExpCodeThreshold = 5;
 
 1128   static void UpdateRegExpCodeAgeAndFlush(
Heap* heap,
 
 1140     if (!code->IsSmi() &&
 
 1154           RecordSlot(slot, slot, code);
 
 1160     } 
else if (code->IsSmi()) {
 
 1186   static void VisitRegExpAndFlushCode(Map* map, HeapObject* 
object) {
 
 1187     Heap* heap = map->GetHeap();
 
 1189     if (!collector->is_code_flushing_enabled()) {
 
 1193     JSRegExp* re = 
reinterpret_cast<JSRegExp*
>(object);
 
 1195     UpdateRegExpCodeAgeAndFlush(heap, re, 
true);
 
 1196     UpdateRegExpCodeAgeAndFlush(heap, re, 
false);
 
 1201   static VisitorDispatchTable<Callback> non_count_table_;
 
 1210   if (fixed_array->
map() != heap->fixed_cow_array_map() &&
 
 1211       fixed_array->
map() != heap->fixed_double_array_map() &&
 
 1212       fixed_array != heap->empty_fixed_array()) {
 
 1213     if (fixed_array->IsDictionary()) {
 
 1216                               fixed_array->
Size());
 
 1220                               fixed_array->
Size());
 
 1229   int object_size = obj->
Size();
 
 1231   non_count_table_.GetVisitorById(
id)(map, obj);
 
 1232   if (obj->IsJSObject()) {
 
 1235                                DICTIONARY_ELEMENTS_SUB_TYPE,
 
 1236                                FAST_ELEMENTS_SUB_TYPE);
 
 1238                                DICTIONARY_PROPERTIES_SUB_TYPE,
 
 1239                                FAST_PROPERTIES_SUB_TYPE);
 
 1244 template<MarkCompactMarkingVisitor::VisitorId 
id>
 
 1247   ObjectStatsVisitBase(
id, map, obj);
 
 1261         array != heap->empty_descriptor_array()) {
 
 1262       int fixed_array_size = array->
Size();
 
 1264                               DESCRIPTOR_ARRAY_SUB_TYPE,
 
 1268       int fixed_array_size = map_obj->transitions()->
Size();
 
 1270                               TRANSITION_ARRAY_SUB_TYPE,
 
 1273     if (map_obj->code_cache() != heap->empty_fixed_array()) {
 
 1276           MAP_CODE_CACHE_SUB_TYPE,
 
 1290     int object_size = obj->
Size();
 
 1305     if (sfi->scope_info() != heap->empty_fixed_array()) {
 
 1308           SCOPE_INFO_SUB_TYPE,
 
 1323     if (fixed_array == heap->symbol_table()) {
 
 1326           SYMBOL_TABLE_SUB_TYPE,
 
 1327           fixed_array->
Size());
 
 1337   table_.Register(kVisitJSRegExp,
 
 1338                   &VisitRegExpAndFlushCode);
 
 1340   if (FLAG_track_gc_object_stats) {
 
 1342     non_count_table_.CopyFrom(&
table_);
 
 1343 #define VISITOR_ID_COUNT_FUNCTION(id)                                   \ 
 1344     table_.Register(kVisit##id, ObjectStatsTracker<kVisit##id>::Visit); 
 1346 #undef VISITOR_ID_COUNT_FUNCTION 
 1352     MarkCompactMarkingVisitor::non_count_table_;
 
 1360     MarkCompactMarkingVisitor::VisitPointer(heap_, p);
 
 1364     MarkCompactMarkingVisitor::VisitPointers(heap_, start, end);
 
 1375       : collector_(collector) {}
 
 1378     collector_->PrepareThreadForCodeFlushing(isolate, top);
 
 1389       : collector_(collector) {}
 
 1397     if (obj->IsSharedFunctionInfo()) {
 
 1401       collector_->MarkObject(shared->code(), code_mark);
 
 1402       collector_->MarkObject(shared, shared_mark);
 
 1411 void MarkCompactCollector::PrepareThreadForCodeFlushing(
Isolate* isolate,
 
 1412                                                         ThreadLocalTop* top) {
 
 1413   for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
 
 1419     Code* code = frame->unchecked_code();
 
 1421     MarkObject(code, code_mark);
 
 1422     if (frame->is_optimized()) {
 
 1424                                                           frame->LookupCode());
 
 1430 void MarkCompactCollector::PrepareForCodeFlushing() {
 
 1434   if (!FLAG_flush_code || was_marked_incrementally_) {
 
 1439 #ifdef ENABLE_DEBUGGER_SUPPORT 
 1440   if (
heap()->isolate()->debug()->IsLoaded() ||
 
 1441       heap()->isolate()->debug()->has_break_points()) {
 
 1451   HeapObject* descriptor_array = 
heap()->empty_descriptor_array();
 
 1453   MarkObject(descriptor_array, descriptor_array_mark);
 
 1456   ASSERT(
this == 
heap()->mark_compact_collector());
 
 1457   PrepareThreadForCodeFlushing(
heap()->isolate(),
 
 1458                                heap()->isolate()->thread_local_top());
 
 1464       &code_marking_visitor);
 
 1470   ProcessMarkingDeque();
 
 1478     : collector_(heap->mark_compact_collector()) { }
 
 1481     MarkObjectByPointer(p);
 
 1485     for (
Object** p = start; p < end; p++) MarkObjectByPointer(p);
 
 1489   void MarkObjectByPointer(
Object** p) {
 
 1490     if (!(*p)->IsHeapObject()) 
return;
 
 1493     HeapObject* 
object = ShortCircuitConsString(p);
 
 1495     if (mark_bit.
Get()) 
return;
 
 1497     Map* map = 
object->
map();
 
 1499     collector_->SetMark(
object, mark_bit);
 
 1503     collector_->MarkObject(map, map_mark);
 
 1508     collector_->EmptyMarkingDeque();
 
 1511   MarkCompactCollector* collector_;
 
 1519     : heap_(heap), pointers_removed_(0) { }
 
 1523     for (
Object** p = start; p < end; p++) {
 
 1525       if (o->IsHeapObject() &&
 
 1532         if (o->IsExternalString()) {
 
 1536         *p = heap_->the_hole_value();
 
 1537         pointers_removed_++;
 
 1543     return pointers_removed_;
 
 1548   int pointers_removed_;
 
 1570 static void DiscoverGreyObjectsWithIterator(Heap* heap,
 
 1571                                             MarkingDeque* marking_deque,
 
 1575   ASSERT(!marking_deque->IsFull());
 
 1577   Map* filler_map = heap->one_pointer_filler_map();
 
 1578   for (HeapObject* 
object = it->Next();
 
 1580        object = it->Next()) {
 
 1585       marking_deque->PushBlack(
object);
 
 1586       if (marking_deque->IsFull()) 
return;
 
 1592 static inline int MarkWordToObjectStarts(uint32_t mark_bits, 
int* starts);
 
 1595 static void DiscoverGreyObjectsOnPage(MarkingDeque* marking_deque, Page* p) {
 
 1596   ASSERT(!marking_deque->IsFull());
 
 1604   int last_cell_index =
 
 1605       Bitmap::IndexToCell(
 
 1606           Bitmap::CellAlignIndex(
 
 1607               p->AddressToMarkbitIndex(p->area_end())));
 
 1609   Address cell_base = p->area_start();
 
 1610   int cell_index = Bitmap::IndexToCell(
 
 1611           Bitmap::CellAlignIndex(
 
 1612               p->AddressToMarkbitIndex(cell_base)));
 
 1616        cell_index < last_cell_index;
 
 1618     ASSERT((
unsigned)cell_index ==
 
 1619         Bitmap::IndexToCell(
 
 1620             Bitmap::CellAlignIndex(
 
 1621                 p->AddressToMarkbitIndex(cell_base))));
 
 1624     if (current_cell == 0) 
continue;
 
 1631     while (grey_objects != 0) {
 
 1632       int trailing_zeros = CompilerIntrinsics::CountTrailingZeros(grey_objects);
 
 1633       grey_objects >>= trailing_zeros;
 
 1634       offset += trailing_zeros;
 
 1635       MarkBit markbit(&cells[cell_index], 1 << offset, 
false);
 
 1641       marking_deque->PushBlack(
object);
 
 1642       if (marking_deque->IsFull()) 
return;
 
 1652 static void DiscoverGreyObjectsInSpace(Heap* heap,
 
 1653                                        MarkingDeque* marking_deque,
 
 1654                                        PagedSpace* space) {
 
 1655   if (!space->was_swept_conservatively()) {
 
 1656     HeapObjectIterator it(space);
 
 1657     DiscoverGreyObjectsWithIterator(heap, marking_deque, &it);
 
 1659     PageIterator it(space);
 
 1660     while (it.has_next()) {
 
 1661       Page* p = it.next();
 
 1662       DiscoverGreyObjectsOnPage(marking_deque, p);
 
 1663       if (marking_deque->IsFull()) 
return;
 
 1669 bool MarkCompactCollector::IsUnmarkedHeapObject(
Object** p) {
 
 1671   if (!o->IsHeapObject()) 
return false;
 
 1678 void MarkCompactCollector::MarkSymbolTable() {
 
 1679   SymbolTable* symbol_table = 
heap()->symbol_table();
 
 1682   SetMark(symbol_table, symbol_table_mark);
 
 1685   symbol_table->IteratePrefix(&marker);
 
 1686   ProcessMarkingDeque();
 
 1690 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
 
 1700     RefillMarkingDeque();
 
 1701     EmptyMarkingDeque();
 
 1706 void MarkCompactCollector::MarkObjectGroups() {
 
 1707   List<ObjectGroup*>* object_groups =
 
 1711   for (
int i = 0; i < object_groups->length(); i++) {
 
 1712     ObjectGroup* entry = object_groups->at(i);
 
 1715     Object*** objects = entry->objects_;
 
 1716     bool group_marked = 
false;
 
 1717     for (
size_t j = 0; j < entry->length_; j++) {
 
 1718       Object* 
object = *objects[j];
 
 1719       if (object->IsHeapObject()) {
 
 1723           group_marked = 
true;
 
 1729     if (!group_marked) {
 
 1730       (*object_groups)[last++] = entry;
 
 1736     for (
size_t j = 0; j < entry->length_; ++j) {
 
 1737       Object* 
object = *objects[j];
 
 1738       if (object->IsHeapObject()) {
 
 1741         MarkObject(heap_object, mark);
 
 1748     object_groups->at(i) = 
NULL;
 
 1750   object_groups->Rewind(last);
 
 1754 void MarkCompactCollector::MarkImplicitRefGroups() {
 
 1755   List<ImplicitRefGroup*>* ref_groups =
 
 1759   for (
int i = 0; i < ref_groups->length(); i++) {
 
 1760     ImplicitRefGroup* entry = ref_groups->at(i);
 
 1764       (*ref_groups)[last++] = entry;
 
 1768     Object*** children = entry->children_;
 
 1770     for (
size_t j = 0; j < entry->length_; ++j) {
 
 1771       if ((*children[j])->IsHeapObject()) {
 
 1774         MarkObject(child, mark);
 
 1782   ref_groups->Rewind(last);
 
 1790 void MarkCompactCollector::EmptyMarkingDeque() {
 
 1791   while (!marking_deque_.
IsEmpty()) {
 
 1792     while (!marking_deque_.
IsEmpty()) {
 
 1793       HeapObject* 
object = marking_deque_.
Pop();
 
 1794       ASSERT(object->IsHeapObject());
 
 1798       Map* map = 
object->
map();
 
 1800       MarkObject(map, map_mark);
 
 1817 void MarkCompactCollector::RefillMarkingDeque() {
 
 1820   SemiSpaceIterator new_it(
heap()->new_space());
 
 1821   DiscoverGreyObjectsWithIterator(
heap(), &marking_deque_, &new_it);
 
 1822   if (marking_deque_.
IsFull()) 
return;
 
 1824   DiscoverGreyObjectsInSpace(
heap(),
 
 1826                              heap()->old_pointer_space());
 
 1827   if (marking_deque_.
IsFull()) 
return;
 
 1829   DiscoverGreyObjectsInSpace(
heap(),
 
 1831                              heap()->old_data_space());
 
 1832   if (marking_deque_.
IsFull()) 
return;
 
 1834   DiscoverGreyObjectsInSpace(
heap(),
 
 1836                              heap()->code_space());
 
 1837   if (marking_deque_.
IsFull()) 
return;
 
 1839   DiscoverGreyObjectsInSpace(
heap(),
 
 1841                              heap()->map_space());
 
 1842   if (marking_deque_.
IsFull()) 
return;
 
 1844   DiscoverGreyObjectsInSpace(
heap(),
 
 1846                              heap()->cell_space());
 
 1847   if (marking_deque_.
IsFull()) 
return;
 
 1849   LargeObjectIterator lo_it(
heap()->lo_space());
 
 1850   DiscoverGreyObjectsWithIterator(
heap(),
 
 1853   if (marking_deque_.
IsFull()) 
return;
 
 1863 void MarkCompactCollector::ProcessMarkingDeque() {
 
 1864   EmptyMarkingDeque();
 
 1866     RefillMarkingDeque();
 
 1867     EmptyMarkingDeque();
 
 1872 void MarkCompactCollector::ProcessExternalMarking() {
 
 1873   bool work_to_do = 
true;
 
 1875   while (work_to_do) {
 
 1877     MarkImplicitRefGroups();
 
 1878     work_to_do = !marking_deque_.
IsEmpty();
 
 1879     ProcessMarkingDeque();
 
 1884 void MarkCompactCollector::MarkLiveObjects() {
 
 1885   GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK);
 
 1889   PostponeInterruptsScope postpone(
heap()->isolate());
 
 1891   bool incremental_marking_overflowed = 
false;
 
 1893   if (was_marked_incrementally_) {
 
 1901     incremental_marking_overflowed =
 
 1902         incremental_marking->marking_deque()->overflowed();
 
 1903     incremental_marking->marking_deque()->ClearOverflowed();
 
 1906     incremental_marking->Abort();
 
 1910   ASSERT(state_ == PREPARE_GC);
 
 1911   state_ = MARK_LIVE_OBJECTS;
 
 1917   if (FLAG_force_marking_deque_overflows) {
 
 1918     marking_deque_end = marking_deque_start + 64 * 
kPointerSize;
 
 1920   marking_deque_.
Initialize(marking_deque_start,
 
 1924   if (incremental_marking_overflowed) {
 
 1929   PrepareForCodeFlushing();
 
 1931   if (was_marked_incrementally_) {
 
 1935       HeapObjectIterator cell_iterator(
heap()->cell_space());
 
 1937       while ((cell = cell_iterator.Next()) != 
NULL) {
 
 1938         ASSERT(cell->IsJSGlobalPropertyCell());
 
 1941           MarkCompactMarkingVisitor::VisitPointer(
 
 1943               reinterpret_cast<Object**>(cell->address() + offset));
 
 1950   MarkRoots(&root_visitor);
 
 1955   ProcessExternalMarking();
 
 1964       &IsUnmarkedHeapObject);
 
 1968     RefillMarkingDeque();
 
 1969     EmptyMarkingDeque();
 
 1974   ProcessExternalMarking();
 
 1980 void MarkCompactCollector::AfterMarking() {
 
 1990   SymbolTable* symbol_table = 
heap()->symbol_table();
 
 1991   SymbolTableCleaner v(
heap());
 
 1992   symbol_table->IterateElements(&v);
 
 1993   symbol_table->ElementsRemoved(v.PointersRemoved());
 
 1998   MarkCompactWeakObjectRetainer mark_compact_object_retainer;
 
 2013   if (!FLAG_watch_ic_patching) {
 
 2018   if (FLAG_track_gc_object_stats) {
 
 2024 void MarkCompactCollector::ProcessMapCaches() {
 
 2025   Object* raw_context = 
heap()->native_contexts_list_;
 
 2026   while (raw_context != 
heap()->undefined_value()) {
 
 2027     Context* context = 
reinterpret_cast<Context*
>(raw_context);
 
 2029       HeapObject* raw_map_cache =
 
 2035           raw_map_cache != 
heap()->undefined_value()) {
 
 2036         MapCache* map_cache = 
reinterpret_cast<MapCache*
>(raw_map_cache);
 
 2037         int existing_elements = map_cache->NumberOfElements();
 
 2038         int used_elements = 0;
 
 2040              i < map_cache->length();
 
 2042           Object* raw_key = map_cache->get(i);
 
 2043           if (raw_key == 
heap()->undefined_value() ||
 
 2044               raw_key == 
heap()->the_hole_value()) 
continue;
 
 2046           Object* raw_map = map_cache->get(i + 1);
 
 2047           if (raw_map->IsHeapObject() && 
IsMarked(raw_map)) {
 
 2051             ASSERT(raw_map->IsMap());
 
 2052             map_cache->set_the_hole(i);
 
 2053             map_cache->set_the_hole(i + 1);
 
 2056         if (used_elements == 0) {
 
 2062           map_cache->ElementsRemoved(existing_elements - used_elements);
 
 2064           MarkObject(map_cache, map_cache_markbit);
 
 2071   ProcessMarkingDeque();
 
 2075 void MarkCompactCollector::ReattachInitialMaps() {
 
 2076   HeapObjectIterator map_iterator(
heap()->map_space());
 
 2077   for (HeapObject* obj = map_iterator.Next();
 
 2079        obj = map_iterator.Next()) {
 
 2080     if (obj->IsFreeSpace()) 
continue;
 
 2086     if (map->attached_to_shared_function_info()) {
 
 2093 void MarkCompactCollector::ClearNonLiveTransitions() {
 
 2094   HeapObjectIterator map_iterator(
heap()->map_space());
 
 2098   for (HeapObject* obj = map_iterator.Next();
 
 2099        obj != 
NULL; obj = map_iterator.Next()) {
 
 2100     Map* map = 
reinterpret_cast<Map*
>(obj);
 
 2102     if (map->IsFreeSpace()) 
continue;
 
 2109     if (map_mark.Get() &&
 
 2110         map->attached_to_shared_function_info()) {
 
 2114       map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map);
 
 2117     ClearNonLivePrototypeTransitions(map);
 
 2118     ClearNonLiveMapTransitions(map, map_mark);
 
 2123 void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) {
 
 2124   int number_of_transitions = map->NumberOfProtoTransitions();
 
 2125   FixedArray* prototype_transitions = map->GetPrototypeTransitions();
 
 2127   int new_number_of_transitions = 0;
 
 2132   for (
int i = 0; i < number_of_transitions; i++) {
 
 2133     Object* prototype = prototype_transitions->get(proto_offset + i * step);
 
 2134     Object* cached_map = prototype_transitions->get(map_offset + i * step);
 
 2136       int proto_index = proto_offset + new_number_of_transitions * step;
 
 2137       int map_index = map_offset + new_number_of_transitions * step;
 
 2138       if (new_number_of_transitions != i) {
 
 2139         prototype_transitions->set_unchecked(
 
 2144         prototype_transitions->set_unchecked(
 
 2153       RecordSlot(slot, slot, prototype);
 
 2154       new_number_of_transitions++;
 
 2158   if (new_number_of_transitions != number_of_transitions) {
 
 2159     map->SetNumberOfProtoTransitions(new_number_of_transitions);
 
 2163   for (
int i = new_number_of_transitions * step;
 
 2164        i < number_of_transitions * step;
 
 2166     prototype_transitions->set_undefined(heap_, header + i);
 
 2171 void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map,
 
 2173   Object* potential_parent = map->GetBackPointer();
 
 2174   if (!potential_parent->IsMap()) 
return;
 
 2175   Map* parent = 
Map::cast(potential_parent);
 
 2179   bool current_is_alive = map_mark.Get();
 
 2181   if (!current_is_alive && parent_is_alive) {
 
 2182     parent->ClearNonLiveTransitions(
heap());
 
 2187 void MarkCompactCollector::ProcessWeakMaps() {
 
 2191     JSWeakMap* weak_map = 
reinterpret_cast<JSWeakMap*
>(weak_map_obj);
 
 2193     Object** anchor = 
reinterpret_cast<Object**
>(table->address());
 
 2194     for (
int i = 0; i < table->Capacity(); i++) {
 
 2199         RecordSlot(anchor, key_slot, *key_slot);
 
 2202                 ObjectHashTable::EntryToValueIndex(i)));
 
 2203         MarkCompactMarkingVisitor::MarkObjectByPointer(
 
 2204             this, anchor, value_slot);
 
 2207     weak_map_obj = weak_map->next();
 
 2212 void MarkCompactCollector::ClearWeakMaps() {
 
 2216     JSWeakMap* weak_map = 
reinterpret_cast<JSWeakMap*
>(weak_map_obj);
 
 2218     for (
int i = 0; i < table->Capacity(); i++) {
 
 2220         table->RemoveEntry(i);
 
 2223     weak_map_obj = weak_map->next();
 
 2254     for (
int remaining = size / kPointerSize; remaining > 0; remaining--) {
 
 2261       } 
else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) {
 
 2263                            &migration_slots_buffer_,
 
 2264                            reinterpret_cast<Object**>(dst_slot),
 
 2278                            &migration_slots_buffer_,
 
 2285     PROFILE(
heap()->isolate(), CodeMoveEvent(src, dst));
 
 2288                        &migration_slots_buffer_,
 
 2312     for (
Object** p = start; p < end; p++) UpdatePointer(p);
 
 2316     ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
 
 2317     Object* target = rinfo->target_object();
 
 2318     Object* old_target = target;
 
 2322     if (target != old_target) {
 
 2323       rinfo->set_target_object(target);
 
 2328     ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
 
 2330     Object* old_target = target;
 
 2332     if (target != old_target) {
 
 2333       rinfo->set_target_address(
Code::cast(target)->instruction_start());
 
 2338     ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
 
 2339             rinfo->IsPatchedReturnSequence()) ||
 
 2340            (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
 
 2341             rinfo->IsPatchedDebugBreakSlotSequence()));
 
 2344     rinfo->set_call_address(
Code::cast(target)->instruction_start());
 
 2350     if (!obj->IsHeapObject()) 
return;
 
 2354     MapWord map_word = heap_obj->
map_word();
 
 2355     if (map_word.IsForwardingAddress()) {
 
 2357              MarkCompactCollector::IsOnEvacuationCandidate(heap_obj));
 
 2358       HeapObject* target = map_word.ToForwardingAddress();
 
 2361              !MarkCompactCollector::IsOnEvacuationCandidate(target));
 
 2366   inline void UpdatePointer(
Object** p) {
 
 2374 static void UpdatePointer(HeapObject** p, HeapObject* 
object) {
 
 2377   Address old_addr = 
object->address();
 
 2384   if (new_addr != 
NULL) {
 
 2392     *p = 
reinterpret_cast<HeapObject*
>(
Smi::FromInt(0x0f100d00 >> 1));  
 
 2397 static String* UpdateReferenceInExternalStringTableEntry(Heap* heap,
 
 2401   if (map_word.IsForwardingAddress()) {
 
 2414     MaybeObject* maybe_result =
 
 2416     if (maybe_result->ToObject(&result)) {
 
 2423           increment_promoted_objects_size(object_size);
 
 2429     ASSERT(target_space == 
heap()->old_pointer_space() ||
 
 2430            target_space == 
heap()->old_data_space());
 
 2431     MaybeObject* maybe_result = target_space->
AllocateRaw(object_size);
 
 2432     if (maybe_result->ToObject(&result)) {
 
 2439           increment_promoted_objects_size(object_size);
 
 2448 void MarkCompactCollector::EvacuateNewSpace() {
 
 2466   int survivors_size = 0;
 
 2475        object = from_it.Next()) {
 
 2477     if (mark_bit.
Get()) {
 
 2481       int size = 
object->Size();
 
 2482       survivors_size += size;
 
 2490       MaybeObject* allocation = new_space->AllocateRaw(size);
 
 2491       if (allocation->IsFailure()) {
 
 2498         allocation = new_space->AllocateRaw(size);
 
 2499         ASSERT(!allocation->IsFailure());
 
 2501       Object* target = allocation->ToObjectUnchecked();
 
 2521 void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) {
 
 2522   AlwaysAllocateScope always_allocate;
 
 2523   PagedSpace* space = 
static_cast<PagedSpace*
>(p->owner());
 
 2524   ASSERT(p->IsEvacuationCandidate() && !p->WasSwept());
 
 2526   p->MarkSweptPrecisely();
 
 2528   int last_cell_index =
 
 2529       Bitmap::IndexToCell(
 
 2530           Bitmap::CellAlignIndex(
 
 2531               p->AddressToMarkbitIndex(p->area_end())));
 
 2533   Address cell_base = p->area_start();
 
 2534   int cell_index = Bitmap::IndexToCell(
 
 2535           Bitmap::CellAlignIndex(
 
 2536               p->AddressToMarkbitIndex(cell_base)));
 
 2541        cell_index < last_cell_index;
 
 2543     ASSERT((
unsigned)cell_index ==
 
 2544         Bitmap::IndexToCell(
 
 2545             Bitmap::CellAlignIndex(
 
 2546                 p->AddressToMarkbitIndex(cell_base))));
 
 2547     if (cells[cell_index] == 0) 
continue;
 
 2549     int live_objects = MarkWordToObjectStarts(cells[cell_index], offsets);
 
 2550     for (
int i = 0; i < live_objects; i++) {
 
 2555       int size = 
object->Size();
 
 2557       MaybeObject* target = space->AllocateRaw(size);
 
 2558       if (target->IsFailure()) {
 
 2564       Object* target_object = target->ToObjectUnchecked();
 
 2570       ASSERT(object->map_word().IsForwardingAddress());
 
 2574     cells[cell_index] = 0;
 
 2576   p->ResetLiveBytes();
 
 2580 void MarkCompactCollector::EvacuatePages() {
 
 2581   int npages = evacuation_candidates_.length();
 
 2582   for (
int i = 0; i < npages; i++) {
 
 2583     Page* p = evacuation_candidates_[i];
 
 2584     ASSERT(p->IsEvacuationCandidate() ||
 
 2586     if (p->IsEvacuationCandidate()) {
 
 2589       if (static_cast<PagedSpace*>(p->owner())->CanExpand()) {
 
 2590         EvacuateLiveObjectsFromPage(p);
 
 2594         for (
int j = i; j < npages; j++) {
 
 2595           Page* page = evacuation_candidates_[j];
 
 2596           slots_buffer_allocator_.
DeallocateChain(page->slots_buffer_address());
 
 2597           page->ClearEvacuationCandidate();
 
 2610     if (object->IsHeapObject()) {
 
 2612       MapWord map_word = heap_object->
map_word();
 
 2613       if (map_word.IsForwardingAddress()) {
 
 2614         return map_word.ToForwardingAddress();
 
 2622 static inline void UpdateSlot(ObjectVisitor* v,
 
 2625   switch (slot_type) {
 
 2627       RelocInfo rinfo(addr, RelocInfo::CODE_TARGET, 0, 
NULL);
 
 2632       v->VisitCodeEntry(addr);
 
 2641       RelocInfo rinfo(addr, RelocInfo::DEBUG_BREAK_SLOT, 0, 
NULL);
 
 2642       if (rinfo.IsPatchedDebugBreakSlotSequence()) rinfo.Visit(v);
 
 2646       RelocInfo rinfo(addr, RelocInfo::JS_RETURN, 0, 
NULL);
 
 2647       if (rinfo.IsPatchedReturnSequence()) rinfo.Visit(v);
 
 2651       RelocInfo rinfo(addr, RelocInfo::EMBEDDED_OBJECT, 0, 
NULL);
 
 2680 template<SweepingMode sweeping_mode, SkipListRebuildingMode skip_list_mode>
 
 2681 static void SweepPrecisely(PagedSpace* space,
 
 2684   ASSERT(!p->IsEvacuationCandidate() && !p->WasSwept());
 
 2690   p->MarkSweptPrecisely();
 
 2692   int last_cell_index =
 
 2693       Bitmap::IndexToCell(
 
 2694           Bitmap::CellAlignIndex(
 
 2695               p->AddressToMarkbitIndex(p->area_end())));
 
 2697   Address free_start = p->area_start();
 
 2699       Bitmap::IndexToCell(
 
 2700           Bitmap::CellAlignIndex(
 
 2701               p->AddressToMarkbitIndex(free_start)));
 
 2703   ASSERT(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0);
 
 2704   Address object_address = free_start;
 
 2707   SkipList* skip_list = p->skip_list();
 
 2708   int curr_region = -1;
 
 2714        cell_index < last_cell_index;
 
 2716     ASSERT((
unsigned)cell_index ==
 
 2717         Bitmap::IndexToCell(
 
 2718             Bitmap::CellAlignIndex(
 
 2719                 p->AddressToMarkbitIndex(object_address))));
 
 2720     int live_objects = MarkWordToObjectStarts(cells[cell_index], offsets);
 
 2722     for ( ; live_objects != 0; live_objects--) {
 
 2724       if (free_end != free_start) {
 
 2725         space->Free(free_start, static_cast<int>(free_end - free_start));
 
 2729       Map* map = live_object->map();
 
 2730       int size = live_object->SizeFromMap(map);
 
 2732         live_object->IterateBody(map->instance_type(), size, v);
 
 2735         int new_region_start =
 
 2737         int new_region_end =
 
 2739         if (new_region_start != curr_region ||
 
 2740             new_region_end != curr_region) {
 
 2741           skip_list->AddObject(free_end, size);
 
 2742           curr_region = new_region_end;
 
 2745       free_start = free_end + size;
 
 2748     cells[cell_index] = 0;
 
 2750   if (free_start != p->area_end()) {
 
 2751     space->Free(free_start, static_cast<int>(p->area_end() - free_start));
 
 2753   p->ResetLiveBytes();
 
 2757 static bool SetMarkBitsUnderInvalidatedCode(Code* code, 
bool value) {
 
 2760   if (p->IsEvacuationCandidate() ||
 
 2765   Address code_start = code->address();
 
 2766   Address code_end = code_start + code->Size();
 
 2769   uint32_t end_index =
 
 2772   Bitmap* b = p->markbits();
 
 2774   MarkBit start_mark_bit = b->MarkBitFromIndex(start_index);
 
 2775   MarkBit end_mark_bit = b->MarkBitFromIndex(end_index);
 
 2784     if (start_cell == end_cell) {
 
 2785       *start_cell |= start_mask & end_mask;
 
 2787       *start_cell |= start_mask;
 
 2791       *end_cell |= end_mask;
 
 2803 static bool IsOnInvalidatedCodeObject(
Address addr) {
 
 2811   if (p->owner()->identity() != 
CODE_SPACE) 
return false;
 
 2818   return mark_bit.
Get();
 
 2824       !ShouldSkipEvacuationSlotRecording(code)) {
 
 2831     invalidated_code_.Add(code);
 
 2836 bool MarkCompactCollector::MarkInvalidatedCode() {
 
 2837   bool code_marked = 
false;
 
 2839   int length = invalidated_code_.length();
 
 2840   for (
int i = 0; i < length; i++) {
 
 2841     Code* code = invalidated_code_[i];
 
 2843     if (SetMarkBitsUnderInvalidatedCode(code, 
true)) {
 
 2852 void MarkCompactCollector::RemoveDeadInvalidatedCode() {
 
 2853   int length = invalidated_code_.length();
 
 2854   for (
int i = 0; i < length; i++) {
 
 2855     if (!
IsMarked(invalidated_code_[i])) invalidated_code_[i] = 
NULL;
 
 2860 void MarkCompactCollector::ProcessInvalidatedCode(ObjectVisitor* visitor) {
 
 2861   int length = invalidated_code_.length();
 
 2862   for (
int i = 0; i < length; i++) {
 
 2863     Code* code = invalidated_code_[i];
 
 2865       code->Iterate(visitor);
 
 2866       SetMarkBitsUnderInvalidatedCode(code, 
false);
 
 2869   invalidated_code_.Rewind(0);
 
 2873 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
 
 2874   Heap::RelocationLock relocation_lock(
heap());
 
 2876   bool code_slots_filtering_required;
 
 2877   { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
 
 2878     code_slots_filtering_required = MarkInvalidatedCode();
 
 2884   { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_EVACUATE_PAGES);
 
 2889   PointersUpdatingVisitor updating_visitor(
heap());
 
 2891   { GCTracer::Scope gc_scope(tracer_,
 
 2892                              GCTracer::Scope::MC_UPDATE_NEW_TO_NEW_POINTERS);
 
 2894     SemiSpaceIterator to_it(
heap()->new_space()->
bottom(),
 
 2895                             heap()->new_space()->top());
 
 2896     for (HeapObject* 
object = to_it.Next();
 
 2898          object = to_it.Next()) {
 
 2899       Map* map = 
object->map();
 
 2900       object->IterateBody(map->instance_type(),
 
 2901                           object->SizeFromMap(map),
 
 2906   { GCTracer::Scope gc_scope(tracer_,
 
 2907                              GCTracer::Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS);
 
 2913   { GCTracer::Scope gc_scope(tracer_,
 
 2914                              GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS);
 
 2915     StoreBufferRebuildScope scope(heap_,
 
 2917                                   &Heap::ScavengeStoreBufferCallback);
 
 2921   { GCTracer::Scope gc_scope(tracer_,
 
 2922                              GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED);
 
 2924                                        migration_slots_buffer_,
 
 2925                                        code_slots_filtering_required);
 
 2926     if (FLAG_trace_fragmentation) {
 
 2927       PrintF(
"  migration slots buffer: %d\n",
 
 2931     if (compacting_ && was_marked_incrementally_) {
 
 2933       LargeObjectIterator it(heap_->
lo_space());
 
 2934       for (HeapObject* obj = it.Next(); obj != 
NULL; obj = it.Next()) {
 
 2941           obj->Iterate(&updating_visitor);
 
 2948   int npages = evacuation_candidates_.length();
 
 2949   { GCTracer::Scope gc_scope(
 
 2950       tracer_, GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED);
 
 2951     for (
int i = 0; i < npages; i++) {
 
 2952       Page* p = evacuation_candidates_[i];
 
 2953       ASSERT(p->IsEvacuationCandidate() ||
 
 2956       if (p->IsEvacuationCandidate()) {
 
 2959                                            code_slots_filtering_required);
 
 2960         if (FLAG_trace_fragmentation) {
 
 2961           PrintF(
"  page %p slots buffer: %d\n",
 
 2962                  reinterpret_cast<void*>(p),
 
 2969         SkipList* list = p->skip_list();
 
 2970         if (list != 
NULL) list->Clear();
 
 2972         if (FLAG_gc_verbose) {
 
 2974                  reinterpret_cast<intptr_t>(p));
 
 2976         PagedSpace* space = 
static_cast<PagedSpace*
>(p->owner());
 
 2979         switch (space->identity()) {
 
 2984             SweepPrecisely<SWEEP_AND_VISIT_LIVE_OBJECTS, IGNORE_SKIP_LIST>(
 
 2985                 space, p, &updating_visitor);
 
 2988             SweepPrecisely<SWEEP_AND_VISIT_LIVE_OBJECTS, REBUILD_SKIP_LIST>(
 
 2989                 space, p, &updating_visitor);
 
 2999   GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_UPDATE_MISC_POINTERS);
 
 3002   HeapObjectIterator cell_iterator(heap_->
cell_space());
 
 3003   for (HeapObject* cell = cell_iterator.Next();
 
 3005        cell = cell_iterator.Next()) {
 
 3006     if (cell->IsJSGlobalPropertyCell()) {
 
 3008           reinterpret_cast<Address>(cell) +
 
 3010       updating_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
 
 3017   heap_->symbol_table()->Iterate(&updating_visitor);
 
 3021       &UpdateReferenceInExternalStringTableEntry);
 
 3023   if (!FLAG_watch_ic_patching) {
 
 3029   EvacuationWeakObjectRetainer evacuation_object_retainer;
 
 3034   ProcessInvalidatedCode(&updating_visitor);
 
 3039   if (FLAG_verify_heap) {
 
 3040     VerifyEvacuation(heap_);
 
 3046   for (
int i = 0; i < npages; i++) {
 
 3047     Page* p = evacuation_candidates_[i];
 
 3048     if (!p->IsEvacuationCandidate()) 
continue;
 
 3049     PagedSpace* space = 
static_cast<PagedSpace*
>(p->owner());
 
 3050     space->Free(p->area_start(), p->area_size());
 
 3051     p->set_scan_on_scavenge(
false);
 
 3053     p->ResetLiveBytes();
 
 3054     space->ReleasePage(p);
 
 3056   evacuation_candidates_.Rewind(0);
 
 3057   compacting_ = 
false;
 
 3061 static const int kStartTableEntriesPerLine = 5;
 
 3062 static const int kStartTableLines = 171;
 
 3063 static const int kStartTableInvalidLine = 127;
 
 3064 static const int kStartTableUnusedEntry = 126;
 
 3066 #define _ kStartTableUnusedEntry 
 3067 #define X kStartTableInvalidLine 
 3256 static inline int MarkWordToObjectStarts(uint32_t mark_bits, 
int* starts) {
 
 3261   ASSERT((mark_bits & 0x180) != 0x180);
 
 3262   ASSERT((mark_bits & 0x18000) != 0x18000);
 
 3263   ASSERT((mark_bits & 0x1800000) != 0x1800000);
 
 3265   while (mark_bits != 0) {
 
 3266     int byte = (mark_bits & 0xff);
 
 3269       ASSERT(byte < kStartTableLines);  
 
 3270       char* table = 
kStartTable + byte * kStartTableEntriesPerLine;
 
 3271       int objects_in_these_8_words = table[0];
 
 3272       ASSERT(objects_in_these_8_words != kStartTableInvalidLine);
 
 3273       ASSERT(objects_in_these_8_words < kStartTableEntriesPerLine);
 
 3274       for (
int i = 0; i < objects_in_these_8_words; i++) {
 
 3275         starts[objects++] = offset + table[1 + i];
 
 3284 static inline Address DigestFreeStart(
Address approximate_free_start,
 
 3285                                       uint32_t free_start_cell) {
 
 3286   ASSERT(free_start_cell != 0);
 
 3289   ASSERT((free_start_cell & (free_start_cell << 1)) == 0);
 
 3292   uint32_t cell = free_start_cell;
 
 3293   int offset_of_last_live;
 
 3294   if ((cell & 0x80000000u) != 0) {
 
 3296     offset_of_last_live = 31;
 
 3305     cell = (cell + 1) >> 1;
 
 3306     int live_objects = MarkWordToObjectStarts(cell, offsets);
 
 3307     ASSERT(live_objects == 1);
 
 3308     offset_of_last_live = offsets[live_objects - 1];
 
 3311       approximate_free_start + offset_of_last_live * 
kPointerSize;
 
 3313   Address free_start = last_live_start + last_live->Size();
 
 3318 static inline Address StartOfLiveObject(
Address block_address, uint32_t cell) {
 
 3322   ASSERT((cell & (cell << 1)) == 0);
 
 3325   if (cell == 0x80000000u) {  
 
 3328   uint32_t first_set_bit = ((cell ^ (cell - 1)) + 1) >> 1;
 
 3329   ASSERT((first_set_bit & cell) == first_set_bit);
 
 3330   int live_objects = MarkWordToObjectStarts(first_set_bit, offsets);
 
 3331   ASSERT(live_objects == 1);
 
 3349   int last_cell_index =
 
 3350       Bitmap::IndexToCell(
 
 3351           Bitmap::CellAlignIndex(
 
 3355       Bitmap::IndexToCell(
 
 3356           Bitmap::CellAlignIndex(
 
 3359   intptr_t freed_bytes = 0;
 
 3366        cell_index < last_cell_index;
 
 3368     if (cells[cell_index] != 0) 
break;
 
 3370   size_t size = block_address - p->
area_start();
 
 3371   if (cell_index == last_cell_index) {
 
 3373                                                 static_cast<int>(size)));
 
 3379   Address free_end = StartOfLiveObject(block_address, cells[cell_index]);
 
 3383                              static_cast<int>(size));
 
 3390   Address free_start = block_address;
 
 3391   uint32_t free_start_cell = cells[cell_index];
 
 3394        cell_index < last_cell_index;
 
 3396     ASSERT((
unsigned)cell_index ==
 
 3397         Bitmap::IndexToCell(
 
 3398             Bitmap::CellAlignIndex(
 
 3400     uint32_t cell = cells[cell_index];
 
 3404       if (block_address - free_start > 32 * kPointerSize) {
 
 3405         free_start = DigestFreeStart(free_start, free_start_cell);
 
 3406         if (block_address - free_start > 32 * kPointerSize) {
 
 3411           free_end = StartOfLiveObject(block_address, cell);
 
 3412           freed_bytes += space->
Free(free_start,
 
 3413                                      static_cast<int>(free_end - free_start));
 
 3417       free_start = block_address;
 
 3418       free_start_cell = cell;
 
 3420       cells[cell_index] = 0;
 
 3425   if (block_address - free_start > 32 * kPointerSize) {
 
 3426     free_start = DigestFreeStart(free_start, free_start_cell);
 
 3427     freed_bytes += space->
Free(free_start,
 
 3428                                static_cast<int>(block_address - free_start));
 
 3436 void MarkCompactCollector::SweepSpace(
PagedSpace* space, SweeperType sweeper) {
 
 3442   PageIterator it(space);
 
 3444   intptr_t freed_bytes = 0;
 
 3445   int pages_swept = 0;
 
 3447   bool lazy_sweeping_active = 
false;
 
 3448   bool unused_page_present = 
false;
 
 3450   while (it.has_next()) {
 
 3451     Page* p = it.next();
 
 3458       ASSERT(evacuation_candidates_.length() > 0);
 
 3469       if (unused_page_present) {
 
 3470         if (FLAG_gc_verbose) {
 
 3472                  reinterpret_cast<intptr_t>(p));
 
 3480       unused_page_present = 
true;
 
 3483     if (lazy_sweeping_active) {
 
 3484       if (FLAG_gc_verbose) {
 
 3486                reinterpret_cast<intptr_t>(p));
 
 3494         if (FLAG_gc_verbose) {
 
 3496                  reinterpret_cast<intptr_t>(p));
 
 3503         if (FLAG_gc_verbose) {
 
 3505                  reinterpret_cast<intptr_t>(p));
 
 3509         if (freed_bytes > 2 * newspace_size) {
 
 3511           lazy_sweeping_active = 
true;
 
 3513           if (FLAG_gc_verbose) {
 
 3521         if (FLAG_gc_verbose) {
 
 3523                  reinterpret_cast<intptr_t>(p));
 
 3526           SweepPrecisely<SWEEP_ONLY, REBUILD_SKIP_LIST>(space, p, 
NULL);
 
 3528           SweepPrecisely<SWEEP_ONLY, IGNORE_SKIP_LIST>(space, p, 
NULL);
 
 3539   if (FLAG_gc_verbose) {
 
 3540     PrintF(
"SweepSpace: %s (%d pages swept)\n",
 
 3550 void MarkCompactCollector::SweepSpaces() {
 
 3551   GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP);
 
 3553   state_ = SWEEP_SPACES;
 
 3558   if (sweep_precisely_) how_to_sweep = 
PRECISE;
 
 3564   SweepSpace(
heap()->old_pointer_space(), how_to_sweep);
 
 3565   SweepSpace(
heap()->old_data_space(), how_to_sweep);
 
 3567   RemoveDeadInvalidatedCode();
 
 3572   EvacuateNewSpaceAndCandidates();
 
 3586     if (code_flusher_ != 
NULL) 
return;
 
 3589     if (code_flusher_ == 
NULL) 
return;
 
 3590     delete code_flusher_;
 
 3591     code_flusher_ = 
NULL;
 
 3601 #ifdef ENABLE_GDB_JIT_INTERFACE 
 3602   if (obj->IsCode()) {
 
 3603     GDBJITInterface::RemoveCode(reinterpret_cast<Code*>(obj));
 
 3606   if (obj->IsCode()) {
 
 3635     *buffer_address = buffer;
 
 3638   buffer->
Add(reinterpret_cast<ObjectSlot>(type));
 
 3639   buffer->
Add(reinterpret_cast<ObjectSlot>(addr));
 
 3645   if (RelocInfo::IsCodeTarget(rmode)) {
 
 3647   } 
else if (RelocInfo::IsEmbeddedObject(rmode)) {
 
 3649   } 
else if (RelocInfo::IsDebugBreakSlot(rmode)) {
 
 3651   } 
else if (RelocInfo::IsJSReturn(rmode)) {
 
 3662       (rinfo->host() == 
NULL ||
 
 3663        !ShouldSkipEvacuationSlotRecording(rinfo->host()))) {
 
 3666                             SlotTypeForRMode(rinfo->rmode()),
 
 3678       !ShouldSkipEvacuationSlotRecording(reinterpret_cast<Object**>(slot))) {
 
 3694         GcSafeFindCodeForInnerPointer(pc);
 
 3697       RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host);
 
 3713   for (
int slot_idx = 0; slot_idx < idx_; ++slot_idx) {
 
 3721                  DecodeSlotType(slot),
 
 3722                  reinterpret_cast<Address>(slots_[slot_idx]));
 
 3731   for (
int slot_idx = 0; slot_idx < idx_; ++slot_idx) {
 
 3734       if (!IsOnInvalidatedCodeObject(reinterpret_cast<Address>(slot))) {
 
 3741       if (!IsOnInvalidatedCodeObject(pc)) {
 
 3743                    DecodeSlotType(slot),
 
 3744                    reinterpret_cast<Address>(slots_[slot_idx]));
 
 3763   while (buffer != 
NULL) {
 
 3766     buffer = next_buffer;
 
 3768   *buffer_address = 
NULL;
 
static bool IsBlack(MarkBit mark_bit)
 
RootMarkingVisitor(Heap *heap)
 
static int SizeOf(Map *map, HeapObject *object)
 
const uint32_t kShortcutTypeTag
 
void ClearEvacuationCandidate()
 
static const char * kGreyBitPattern
 
void RemoveImplicitRefGroups()
 
FixedArraySubInstanceType
 
static uint32_t FastAddressToMarkbitIndex(Address addr)
 
List< ImplicitRefGroup * > * implicit_ref_groups()
 
Code * builtin(Name name)
 
static bool IsTypedSlot(ObjectSlot slot)
 
static const int kCodeOffset
 
void VisitEmbeddedPointer(RelocInfo *rinfo)
 
#define CHECK_EQ(expected, value)
 
static Object *& Object_at(Address addr)
 
static const int kCodeEntryOffset
 
static const int kStartOffset
 
static VisitorDispatchTable< Callback > table_
 
static int EntryToIndex(int entry)
 
Object * DataAtUnchecked(int index)
 
static const int kElementsStartIndex
 
Address FromSpacePageHigh()
 
void VisitPointer(Object **p)
 
static void ReportDeleteIfNeeded(HeapObject *obj, Isolate *isolate)
 
CompilationCache * compilation_cache()
 
Object ** native_contexts_list_address()
 
void RecordObjectStats(InstanceType type, int sub_type, size_t size)
 
SharedFunctionInfoMarkingVisitor(MarkCompactCollector *collector)
 
void PrintF(const char *format,...)
 
bool InNewSpace(Object *object)
 
static String * cast(Object *obj)
 
static void VisitJSRegExp(Map *map, HeapObject *object)
 
friend class CodeMarkingVisitor
 
INLINE(static void MarkObject(Heap *heap, HeapObject *object))
 
HandleScopeImplementer * handle_scope_implementer()
 
static const char * kWhiteBitPattern
 
friend class RootMarkingVisitor
 
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes)
 
static void Visit(Map *map, HeapObject *obj)
 
static void MarkInlinedFunctionsCode(Heap *heap, Code *code)
 
static void Visit(Map *map, HeapObject *obj)
 
void Prepare(GCTracer *tracer)
 
static Smi * FromInt(int value)
 
void BeforeVisitingPointers()
 
static Object * GetObjectFromEntryAddress(Address location_of_address)
 
void FinalizeExternalString(String *string)
 
static MemoryChunk * FromAddress(Address a)
 
INLINE(static void VisitUnmarkedObject(MarkCompactCollector *collector, HeapObject *obj))
 
static HeapObject * cast(Object *obj)
 
static const int kProtoTransitionElementsPerEntry
 
static Map * cast(Object *obj)
 
INLINE(static void VisitPointer(Heap *heap, Object **p))
 
void SetDataAtUnchecked(int index, Object *value, Heap *heap)
 
void ResetAllocationInfo()
 
static void IterateElements(ObjectVisitor *v)
 
bool InFromSpace(Object *object)
 
static void Clear(MemoryChunk *chunk)
 
void Relocate(intptr_t delta)
 
virtual void VisitPointers(Object **start, Object **end)
 
static void MoveBlock(Address dst, Address src, int byte_size)
 
void IterateWeakRoots(ObjectVisitor *v)
 
const char * AllocationSpaceName(AllocationSpace space)
 
void UpdateSlots(Heap *heap)
 
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
 
#define ASSERT(condition)
 
static const uint32_t kBitsPerCell
 
static void IncrementLiveBytesFromGC(Address address, int by)
 
bool StartCompaction(CompactionMode mode)
 
#define PROFILE(isolate, Call)
 
void VisitCodeTarget(RelocInfo *rinfo)
 
void UpdateSlotsWithFilter(Heap *heap)
 
OldSpace * TargetSpace(HeapObject *object)
 
void VisitPointers(Object **start, Object **end)
 
static bool IsGrey(MarkBit mark_bit)
 
static Context * cast(Object *context)
 
#define VISITOR_ID_LIST(V)
 
static const char * kBlackBitPattern
 
static bool IsWhite(MarkBit mark_bit)
 
ThreadManager * thread_manager()
 
static SharedFunctionInfo * cast(Object *obj)
 
friend class SharedFunctionInfoMarkingVisitor
 
static const int kTableOffset
 
static const int kEntrySize
 
static void ObjectStatsVisitBase(StaticVisitorBase::VisitorId id, Map *map, HeapObject *obj)
 
void FreeUnmarkedObjects()
 
static Code * cast(Object *obj)
 
void ClearSweptConservatively()
 
static Object ** RawField(HeapObject *obj, int offset)
 
StoreBuffer * store_buffer()
 
static Smi * cast(Object *object)
 
static int SizeOfChain(SlotsBuffer *buffer)
 
void IterateArchivedThreads(ThreadVisitor *v)
 
void IncreaseUnsweptFreeBytes(Page *p)
 
static MarkBit MarkBitFrom(Address addr)
 
bool TryPromoteObject(HeapObject *object, int object_size)
 
static bool IsMarked(Object *obj)
 
SlotsBuffer * AllocateBuffer(SlotsBuffer *next_buffer)
 
void ClearSweptPrecisely()
 
bool HasSpaceForTypedSlot()
 
SymbolTableCleaner(Heap *heap)
 
void VisitPointer(Object **slot)
 
void AddEvacuationCandidate(Page *p)
 
void VisitPointer(Object **p)
 
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
 
static JSGlobalProxy * cast(Object *obj)
 
void VisitPointers(Object **start, Object **end)
 
CodeMarkingVisitor(MarkCompactCollector *collector)
 
void IteratePointersToNewSpace(ObjectSlotCallback callback)
 
#define HEAP_PROFILE(heap, call)
 
static const int kProtoTransitionMapOffset
 
RuntimeProfiler * runtime_profiler()
 
static NewSpacePage * FromAddress(Address address_in_page)
 
Context * native_context()
 
virtual intptr_t SizeOfObjects()
 
v8::Handle< v8::Object > bottom
 
void Initialize(Address low, Address high)
 
void CollectEvacuationCandidates(PagedSpace *space)
 
void EvictEvacuationCandidatesFromFreeLists()
 
static void ObjectStatsCountFixedArray(FixedArrayBase *fixed_array, FixedArraySubInstanceType fast_type, FixedArraySubInstanceType dictionary_type)
 
static Address & Address_at(Address addr)
 
void CodeIterateBody(ObjectVisitor *v)
 
void MarkEvacuationCandidate()
 
void CheckpointObjectStats()
 
void InvalidateCode(Code *code)
 
bool IsAligned(T value, U alignment)
 
void CountFreeListItems(Page *p, FreeList::SizeStats *sizes)
 
GlobalHandles * global_handles()
 
void IncrementYoungSurvivorsCounter(int survived)
 
char kStartTable[kStartTableLines *kStartTableEntriesPerLine]
 
virtual Object * RetainAs(Object *object)
 
PointersUpdatingVisitor(Heap *heap)
 
const uint32_t kShortcutTypeMask
 
static void IterateBody(Map *map, HeapObject *obj)
 
OldSpace * old_pointer_space()
 
bool TransferMark(Address old_start, Address new_start)
 
static void MarkBlack(MarkBit mark_bit)
 
static Code * GetCodeFromTargetAddress(Address address)
 
void set_age_mark(Address mark)
 
static void GreyToBlack(MarkBit markbit)
 
void VisitThread(Isolate *isolate, ThreadLocalTop *top)
 
static const int kMaxNonCodeHeapObjectSize
 
void DeallocateBuffer(SlotsBuffer *buffer)
 
void Iterate(ObjectVisitor *v)
 
void UpdateSamplesAfterCompact(ObjectVisitor *visitor)
 
MarkingVisitor(Heap *heap)
 
static void Visit(Map *map, HeapObject *obj)
 
LargeObjectSpace * lo_space()
 
int Free(Address start, int size_in_bytes)
 
void ReleasePage(Page *page)
 
activate correct semantics for inheriting readonliness false
 
void MarkSweptConservatively()
 
static int OffsetOfElementAt(int index)
 
static intptr_t SweepConservatively(PagedSpace *space, Page *p)
 
virtual Object * RetainAs(Object *object)
 
#define T(name, string, precedence)
 
static int SizeFor(int length)
 
void RecordRelocSlot(RelocInfo *rinfo, Object *target)
 
static const int kProtoTransitionHeaderSize
 
void DeallocateChain(SlotsBuffer **buffer_address)
 
void UpdateReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
 
void MigrateObject(Address dst, Address src, int size, AllocationSpace to_old_space)
 
List< ObjectGroup * > * object_groups()
 
bool HasTransitionArray()
 
void ProcessWeakReferences(WeakObjectRetainer *retainer)
 
#define VISITOR_ID_COUNT_FUNCTION(id)
 
bool is_code_flushing_enabled() const 
 
bool is_compacting() const 
 
static const int kMapOffset
 
void Add(ObjectSlot slot)
 
InnerPointerToCodeCache * inner_pointer_to_code_cache()
 
void VisitPointers(Object **start, Object **end)
 
void WhiteToGreyAndPush(HeapObject *obj, MarkBit mark_bit)
 
void IterateRoots(ObjectVisitor *v, VisitMode mode)
 
friend class MarkingVisitor
 
static ObjectHashTable * cast(Object *obj)
 
void VisitPointers(Object **start, Object **end)
 
void set_encountered_weak_maps(Object *weak_map)
 
void CheckNewSpaceExpansionCriteria()
 
void IdentifyWeakHandles(WeakSlotCallback f)
 
IncrementalMarking * incremental_marking()
 
static bool ChainLengthThresholdReached(SlotsBuffer *buffer)
 
SlotsBuffer ** slots_buffer_address()
 
void Iterate(v8::internal::ObjectVisitor *v)
 
static void UpdateSlot(Heap *heap, Object **slot)
 
static int code_index(bool is_ascii)
 
INLINE(static void MarkObjectByPointer(MarkCompactCollector *collector, Object **anchor_slot, Object **p))
 
static const int kProtoTransitionPrototypeOffset
 
#define ASSERT_EQ(v1, v2)
 
void IterateFunctions(ObjectVisitor *v)
 
InstanceType instance_type()
 
bool IsEvacuationCandidate()
 
static HeapObject * FromAddress(Address address)
 
static const int kConstructorOffset
 
INLINE(static void BeforeVisitingSharedFunctionInfo(HeapObject *object))
 
INLINE(static bool MarkObjectWithoutPush(Heap *heap, HeapObject *object))
 
static FixedArray * cast(Object *obj)
 
void set_was_swept_conservatively(bool b)
 
void RemoveObjectGroups()
 
void RecordCodeTargetPatch(Address pc, Code *target)
 
static const int kCompilationErrorValue
 
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
 
static const int kPrototypeOffset
 
void EvictEvacuationCandidate(Page *page)
 
void SetPagesToSweep(Page *first)
 
static void Visit(Map *map, HeapObject *obj)
 
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
 
void RestartIfNotMarking()
 
static const char * kImpossibleBitPattern
 
static GlobalObject * cast(Object *obj)
 
static bool VisitUnmarkedObjects(Heap *heap, Object **start, Object **end)
 
static const int kUninitializedValue
 
MUST_USE_RESULT MaybeObject * AllocateRaw(int object_size, Executability executable)
 
void VisitDebugTarget(RelocInfo *rinfo)
 
void VisitPointer(Object **p)
 
void check(i::Vector< const char > string)
 
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
 
static void UpdateSlotsRecordedIn(Heap *heap, SlotsBuffer *buffer, bool code_slots_filtering_required)
 
static int saved_code_index(bool is_ascii)
 
uint32_t AddressToMarkbitIndex(Address addr)
 
static void Visit(Map *map, HeapObject *obj)
 
static void ProcessNonLive(HeapObject *obj)
 
static const int kValueOffset
 
static bool AddTo(SlotsBufferAllocator *allocator, SlotsBuffer **buffer_address, ObjectSlot slot, AdditionMode mode)
 
static JSObject * cast(Object *obj)
 
OldSpace * old_data_space()
 
INLINE(static void VisitPointers(Heap *heap, Object **start, Object **end))
 
MarkCompactCollector * mark_compact_collector()
 
void EnableCodeFlushing(bool enable)
 
static int RegionNumber(Address addr)
 
AllocationSpace identity()
 
static void VisitJSWeakMap(Map *map, HeapObject *object)
 
void RecordCodeEntrySlot(Address slot, Code *target)
 
Object * encountered_weak_maps()
 
Address FromSpacePageLow()
 
static JSFunction * cast(Object *obj)