52 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
56 #if V8_TARGET_ARCH_MIPS && !V8_INTERPRETED_REGEXP
69 #if defined(V8_TARGET_ARCH_X64)
71 code_range_size_(512*
MB),
79 initial_semispace_size_(Page::kPageSize),
80 max_old_generation_size_(192*
MB),
81 max_executable_size_(max_old_generation_size_),
85 initial_semispace_size_(Page::kPageSize),
94 survived_since_last_expansion_(0),
96 always_allocate_scope_depth_(0),
97 linear_allocation_scope_depth_(0),
98 contexts_disposed_(0),
100 scan_on_scavenge_pages_(0),
102 old_pointer_space_(
NULL),
103 old_data_space_(
NULL),
108 gc_state_(NOT_IN_GC),
109 gc_post_processing_depth_(0),
112 remembered_unmapped_pages_index_(0),
113 unflattened_strings_length_(0),
115 allocation_allowed_(
true),
116 allocation_timeout_(0),
117 disallow_allocation_failure_(
false),
120 new_space_high_promotion_mode_active_(
false),
121 old_gen_promotion_limit_(kMinimumPromotionLimit),
122 old_gen_allocation_limit_(kMinimumAllocationLimit),
123 old_gen_limit_factor_(1),
124 size_of_old_gen_at_last_old_space_gc_(0),
125 external_allocation_limit_(0),
126 amount_of_external_allocated_memory_(0),
127 amount_of_external_allocated_memory_at_last_global_gc_(0),
128 old_gen_exhausted_(
false),
129 store_buffer_rebuilder_(store_buffer()),
130 hidden_symbol_(
NULL),
131 global_gc_prologue_callback_(
NULL),
132 global_gc_epilogue_callback_(
NULL),
133 gc_safe_size_of_old_object_(
NULL),
134 total_regexp_code_generated_(0),
136 young_survivors_after_last_gc_(0),
137 high_survival_rate_period_length_(0),
139 previous_survival_rate_trend_(Heap::STABLE),
140 survival_rate_trend_(Heap::STABLE),
142 max_alive_after_gc_(0),
144 alive_after_last_gc_(0),
145 last_gc_end_timestamp_(0.0),
148 incremental_marking_(this),
149 number_idle_notifications_(0),
150 last_idle_notification_gc_count_(0),
151 last_idle_notification_gc_count_init_(
false),
152 mark_sweeps_since_idle_round_started_(0),
153 ms_count_at_last_idle_notification_(0),
154 gc_count_at_last_idle_gc_(0),
155 scavenges_since_last_idle_round_(kIdleScavengeThreshold),
156 promotion_queue_(this),
158 chunks_queued_for_free_(
NULL) {
162 #if defined(V8_MAX_SEMISPACE_SIZE)
163 max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
168 if (max_virtual > 0) {
169 if (code_range_size_ > 0) {
171 code_range_size_ =
Min(code_range_size_, max_virtual >> 3);
175 memset(roots_, 0,
sizeof(roots_[0]) * kRootListLength);
176 global_contexts_list_ =
NULL;
177 mark_compact_collector_.heap_ =
this;
178 external_string_table_.heap_ =
this;
181 RememberUnmappedPage(
NULL,
false);
229 return old_pointer_space_ !=
NULL &&
230 old_data_space_ !=
NULL &&
231 code_space_ !=
NULL &&
232 map_space_ !=
NULL &&
233 cell_space_ !=
NULL &&
238 int Heap::GcSafeSizeOfOldObject(
HeapObject*
object) {
242 return object->SizeFromMap(object->
map());
247 const char** reason) {
250 isolate_->
counters()->gc_compactor_caused_by_request()->Increment();
251 *reason =
"GC in old space requested";
255 if (FLAG_gc_global || (FLAG_stress_compaction && (gc_count_ & 1) != 0)) {
256 *reason =
"GC in old space forced by flags";
262 isolate_->
counters()->gc_compactor_caused_by_promoted_data()->Increment();
263 *reason =
"promotion limit reached";
268 if (old_gen_exhausted_) {
270 gc_compactor_caused_by_oldspace_exhaustion()->Increment();
271 *reason =
"old generations exhausted";
286 gc_compactor_caused_by_oldspace_exhaustion()->Increment();
287 *reason =
"scavenge might not succeed";
299 void Heap::ReportStatisticsBeforeGC() {
305 if (FLAG_heap_stats) {
306 ReportHeapStatistics(
"Before GC");
307 }
else if (FLAG_log_gc) {
322 if (!FLAG_trace_gc_verbose)
return;
327 PrintF(
"New space, used: %8" V8_PTR_PREFIX
"d"
328 ", available: %8" V8_PTR_PREFIX
"d\n",
329 Heap::new_space_.Size(),
331 PrintF(
"Old pointers, used: %8" V8_PTR_PREFIX
"d"
332 ", available: %8" V8_PTR_PREFIX
"d"
333 ", waste: %8" V8_PTR_PREFIX
"d\n",
334 old_pointer_space_->
Size(),
336 old_pointer_space_->
Waste());
337 PrintF(
"Old data space, used: %8" V8_PTR_PREFIX
"d"
338 ", available: %8" V8_PTR_PREFIX
"d"
339 ", waste: %8" V8_PTR_PREFIX
"d\n",
340 old_data_space_->
Size(),
342 old_data_space_->
Waste());
343 PrintF(
"Code space, used: %8" V8_PTR_PREFIX
"d"
344 ", available: %8" V8_PTR_PREFIX
"d"
345 ", waste: %8" V8_PTR_PREFIX
"d\n",
348 code_space_->
Waste());
349 PrintF(
"Map space, used: %8" V8_PTR_PREFIX
"d"
350 ", available: %8" V8_PTR_PREFIX
"d"
351 ", waste: %8" V8_PTR_PREFIX
"d\n",
354 map_space_->
Waste());
355 PrintF(
"Cell space, used: %8" V8_PTR_PREFIX
"d"
356 ", available: %8" V8_PTR_PREFIX
"d"
357 ", waste: %8" V8_PTR_PREFIX
"d\n",
360 cell_space_->
Waste());
361 PrintF(
"Large object space, used: %8" V8_PTR_PREFIX
"d"
362 ", available: %8" V8_PTR_PREFIX
"d\n",
370 void Heap::ReportStatisticsAfterGC() {
374 if (FLAG_heap_stats) {
376 ReportHeapStatistics(
"After GC");
377 }
else if (FLAG_log_gc) {
390 unflattened_strings_length_ = 0;
393 allow_allocation(
false);
395 if (FLAG_verify_heap) {
399 if (FLAG_gc_verbose)
Print();
403 ReportStatisticsBeforeGC();
413 for (
Space* space = spaces.next(); space !=
NULL; space = spaces.next()) {
414 total += space->SizeOfObjects();
423 allow_allocation(
true);
426 if (FLAG_verify_heap) {
430 if (FLAG_print_global_handles) isolate_->
global_handles()->Print();
431 if (FLAG_print_handles) PrintHandles();
432 if (FLAG_gc_verbose)
Print();
433 if (FLAG_code_stats) ReportCodeStatistics(
"After GC");
436 isolate_->
counters()->alive_after_last_gc()->Set(
439 isolate_->
counters()->symbol_table_capacity()->Set(
441 isolate_->
counters()->number_of_symbols()->Set(
442 symbol_table()->NumberOfElements());
444 ReportStatisticsAfterGC();
446 #ifdef ENABLE_DEBUGGER_SUPPORT
447 isolate_->debug()->AfterGarbageCollection();
448 #endif // ENABLE_DEBUGGER_SUPPORT
456 mark_compact_collector_.
SetFlags(flags);
477 const int kMaxNumberOfAttempts = 7;
478 for (
int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
493 const char* gc_reason,
494 const char* collector_reason) {
496 VMState state(isolate_, GC);
504 allocation_timeout_ =
Max(6, FLAG_gc_interval);
508 if (FLAG_trace_incremental_marking) {
509 PrintF(
"[IncrementalMarking] Scavenge during marking.\n");
517 FLAG_incremental_marking_steps) {
519 const intptr_t kStepSizeWhenDelayedByScavenge = 1 *
MB;
523 if (FLAG_trace_incremental_marking) {
524 PrintF(
"[IncrementalMarking] Delaying MarkSweep.\n");
527 collector_reason =
"incremental marking delaying mark-sweep";
531 bool next_gc_likely_to_collect_more =
false;
537 tracer.set_gc_count(gc_count_);
540 tracer.set_collector(collector);
543 ? isolate_->
counters()->gc_scavenger()
544 : isolate_->
counters()->gc_compactor();
546 next_gc_likely_to_collect_more =
547 PerformGarbageCollection(collector, &tracer);
560 return next_gc_likely_to_collect_more;
567 PerformGarbageCollection(
SCAVENGER, &tracer);
576 class SymbolTableVerifier :
public ObjectVisitor {
580 for (
Object** p = start; p < end; p++) {
581 if ((*p)->IsHeapObject()) {
583 ASSERT((*p)->IsTheHole() || (*p)->IsUndefined() || (*p)->IsSymbol());
591 static void VerifySymbolTable() {
593 SymbolTableVerifier verifier;
594 HEAP->symbol_table()->IterateElements(&verifier);
599 static bool AbortIncrementalMarkingAndCollectGarbage(
602 const char* gc_reason =
NULL) {
604 bool result = heap->CollectGarbage(space, gc_reason);
612 int pointer_space_size,
617 int large_object_size) {
625 bool gc_performed =
true;
627 static const int kThreshold = 20;
628 while (gc_performed && counter++ < kThreshold) {
629 gc_performed =
false;
632 "failed to reserve space in the new space");
635 if (!old_pointer_space->
ReserveSpace(pointer_space_size)) {
637 "failed to reserve space in the old pointer space");
642 "failed to reserve space in the old data space");
646 AbortIncrementalMarkingAndCollectGarbage(
this,
CODE_SPACE,
647 "failed to reserve space in the code space");
651 AbortIncrementalMarkingAndCollectGarbage(
this,
MAP_SPACE,
652 "failed to reserve space in the map space");
656 AbortIncrementalMarkingAndCollectGarbage(
this,
CELL_SPACE,
657 "failed to reserve space in the cell space");
662 large_object_size *= 2;
666 large_object_size += cell_space_size + map_space_size + code_space_size +
667 data_space_size + pointer_space_size;
669 AbortIncrementalMarkingAndCollectGarbage(
this,
LO_SPACE,
670 "failed to reserve space in the large object space");
699 Object* context = global_contexts_list_;
700 while (!context->IsUndefined()) {
703 Object* caches_or_undefined =
705 if (!caches_or_undefined->IsUndefined()) {
708 int length = caches->
length();
709 for (
int i = 0; i < length; i++) {
726 Object* context = global_contexts_list_;
727 while (!context->IsUndefined()) {
732 if (!cache->IsUndefined()) {
740 void Heap::UpdateSurvivalRateTrend(
int start_new_space_size) {
741 double survival_rate =
742 (
static_cast<double>(young_survivors_after_last_gc_) * 100) /
743 start_new_space_size;
745 if (survival_rate > kYoungSurvivalRateHighThreshold) {
746 high_survival_rate_period_length_++;
748 high_survival_rate_period_length_ = 0;
751 if (survival_rate < kYoungSurvivalRateLowThreshold) {
752 low_survival_rate_period_length_++;
754 low_survival_rate_period_length_ = 0;
757 double survival_rate_diff = survival_rate_ - survival_rate;
759 if (survival_rate_diff > kYoungSurvivalRateAllowedDeviation) {
760 set_survival_rate_trend(DECREASING);
761 }
else if (survival_rate_diff < -kYoungSurvivalRateAllowedDeviation) {
762 set_survival_rate_trend(INCREASING);
764 set_survival_rate_trend(STABLE);
767 survival_rate_ = survival_rate;
772 bool next_gc_likely_to_collect_more =
false;
775 PROFILE(isolate_, CodeMovingGCEvent());
778 if (FLAG_verify_heap) {
781 if (collector ==
MARK_COMPACTOR && global_gc_prologue_callback_) {
782 ASSERT(!allocation_allowed_);
783 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
784 global_gc_prologue_callback_();
790 for (
int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
791 if (gc_type & gc_prologue_callbacks_[i].gc_type) {
800 if (IsHighSurvivalRate()) {
811 bool high_survival_rate_during_scavenges = IsHighSurvivalRate() &&
812 IsStableOrIncreasingSurvivalTrend();
814 UpdateSurvivalRateTrend(start_new_space_size);
818 if (high_survival_rate_during_scavenges &&
819 IsStableOrIncreasingSurvivalTrend()) {
826 old_gen_limit_factor_ = 2;
828 old_gen_limit_factor_ = 1;
831 old_gen_promotion_limit_ =
833 old_gen_allocation_limit_ =
836 old_gen_exhausted_ =
false;
842 UpdateSurvivalRateTrend(start_new_space_size);
845 if (!new_space_high_promotion_mode_active_ &&
847 IsStableOrIncreasingSurvivalTrend() &&
848 IsHighSurvivalRate()) {
853 new_space_high_promotion_mode_active_ =
true;
855 PrintF(
"Limited new space size due to high promotion rate: %d MB\n",
858 }
else if (new_space_high_promotion_mode_active_ &&
859 IsStableOrDecreasingSurvivalTrend() &&
860 IsLowSurvivalRate()) {
864 new_space_high_promotion_mode_active_ =
false;
866 PrintF(
"Unlimited new space size due to low promotion rate: %d MB\n",
871 if (new_space_high_promotion_mode_active_ &&
876 isolate_->
counters()->objs_since_last_young()->Set(0);
878 gc_post_processing_depth_++;
879 { DisableAssertNoAllocation allow_allocation;
880 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
881 next_gc_likely_to_collect_more =
884 gc_post_processing_depth_--;
887 Relocatable::PostGarbageCollectionProcessing();
891 amount_of_external_allocated_memory_at_last_global_gc_ =
892 amount_of_external_allocated_memory_;
896 for (
int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
897 if (gc_type & gc_epilogue_callbacks_[i].gc_type) {
898 gc_epilogue_callbacks_[i].callback(gc_type, callback_flags);
902 if (collector ==
MARK_COMPACTOR && global_gc_epilogue_callback_) {
903 ASSERT(!allocation_allowed_);
904 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
905 global_gc_epilogue_callback_();
907 if (FLAG_verify_heap) {
911 return next_gc_likely_to_collect_more;
915 void Heap::MarkCompact(GCTracer* tracer) {
917 LOG(isolate_, ResourceEvent(
"markcompact",
"begin"));
919 mark_compact_collector_.
Prepare(tracer);
922 tracer->set_full_gc_count(ms_count_);
924 MarkCompactPrologue();
928 LOG(isolate_, ResourceEvent(
"markcompact",
"end"));
932 isolate_->
counters()->objs_since_last_full()->Set(0);
934 contexts_disposed_ = 0;
940 void Heap::MarkCompactPrologue() {
952 FlushNumberStringCache();
953 if (FLAG_cleanup_code_caches_at_gc) {
954 polymorphic_code_cache()->set_cache(undefined_value());
963 GcSafeFindCodeForInnerPointer(a);
976 for (
Object** p = start; p < end; p++) ScavengePointer(p);
980 void ScavengePointer(
Object** p) {
984 reinterpret_cast<HeapObject*>(
object));
994 class VerifyNonPointerSpacePointersVisitor:
public ObjectVisitor {
997 for (
Object** current = start; current < end; current++) {
998 if ((*current)->IsHeapObject()) {
1006 static void VerifyNonPointerSpacePointers() {
1009 VerifyNonPointerSpacePointersVisitor v;
1010 HeapObjectIterator code_it(
HEAP->code_space());
1011 for (HeapObject*
object = code_it.Next();
1012 object !=
NULL;
object = code_it.Next())
1013 object->Iterate(&v);
1017 if (!
HEAP->old_data_space()->was_swept_conservatively()) {
1018 HeapObjectIterator data_it(
HEAP->old_data_space());
1019 for (HeapObject*
object = data_it.Next();
1020 object !=
NULL;
object = data_it.Next())
1021 object->Iterate(&v);
1029 survived_since_last_expansion_ > new_space_.
Capacity() &&
1030 !new_space_high_promotion_mode_active_) {
1035 survived_since_last_expansion_ = 0;
1040 static bool IsUnscavengedHeapObject(
Heap* heap,
Object** p) {
1046 void Heap::ScavengeStoreBufferCallback(
1050 heap->store_buffer_rebuilder_.Callback(page, event);
1056 start_of_current_page_ =
NULL;
1057 current_page_ =
NULL;
1059 if (current_page_ !=
NULL) {
1063 store_buffer_->
SetTop(start_of_current_page_);
1064 }
else if (store_buffer_->
Top() - start_of_current_page_ >=
1065 (store_buffer_->
Limit() - store_buffer_->
Top()) >> 2) {
1070 store_buffer_->
SetTop(start_of_current_page_);
1078 start_of_current_page_ = store_buffer_->
Top();
1079 current_page_ = page;
1084 if (current_page_ ==
NULL) {
1092 ASSERT(current_page_ == page);
1095 ASSERT(start_of_current_page_ != store_buffer_->
Top());
1096 store_buffer_->
SetTop(start_of_current_page_);
1113 emergency_stack_ =
NULL;
1118 void PromotionQueue::RelocateQueueHead() {
1121 Page* p = Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
1122 intptr_t* head_start = rear_;
1123 intptr_t* head_end =
1124 Min(front_, reinterpret_cast<intptr_t*>(p->
area_end()));
1127 static_cast<int>(head_end - head_start) / kEntrySizeInWords;
1129 emergency_stack_ =
new List<Entry>(2 * entries_count);
1131 while (head_start != head_end) {
1132 int size =
static_cast<int>(*(head_start++));
1134 emergency_stack_->
Add(Entry(obj, size));
1150 if (map_word.IsForwardingAddress()) {
1151 return map_word.ToForwardingAddress();
1161 void Heap::Scavenge() {
1163 if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
1169 LOG(isolate_, ResourceEvent(
"scavenge",
"begin"));
1179 SelectScavengingVisitorsTable();
1214 ScavengeVisitor scavenge_visitor(
this);
1220 StoreBufferRebuildScope scope(
this,
1222 &ScavengeStoreBufferCallback);
1227 HeapObjectIterator cell_iterator(cell_space_);
1228 for (HeapObject* cell = cell_iterator.Next();
1229 cell !=
NULL; cell = cell_iterator.Next()) {
1230 if (cell->IsJSGlobalPropertyCell()) {
1232 reinterpret_cast<Address>(cell) +
1234 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
1239 scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_));
1241 new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1243 &IsUnscavengedHeapObject);
1246 new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1249 &UpdateNewSpaceReferenceInExternalStringTableEntry);
1254 if (!FLAG_watch_ic_patching) {
1259 ScavengeWeakObjectRetainer weak_object_retainer(
this);
1262 ASSERT(new_space_front == new_space_.
top());
1274 LOG(isolate_, ResourceEvent(
"scavenge",
"end"));
1278 scavenges_since_last_idle_round_++;
1282 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
1286 if (!first_word.IsForwardingAddress()) {
1299 if (FLAG_verify_heap) {
1300 external_string_table_.Verify();
1303 if (external_string_table_.new_space_strings_.is_empty())
return;
1305 Object** start = &external_string_table_.new_space_strings_[0];
1306 Object** end = start + external_string_table_.new_space_strings_.length();
1309 for (
Object** p = start; p < end; ++p) {
1311 String* target = updater_func(
this, p);
1313 if (target ==
NULL)
continue;
1315 ASSERT(target->IsExternalString());
1323 external_string_table_.AddOldString(target);
1328 external_string_table_.ShrinkNewStrings(static_cast<int>(last - start));
1336 if (external_string_table_.old_space_strings_.length() > 0) {
1337 Object** start = &external_string_table_.old_space_strings_[0];
1338 Object** end = start + external_string_table_.old_space_strings_.length();
1339 for (
Object** p = start; p < end; ++p) *p = updater_func(
this, p);
1346 static Object* ProcessFunctionWeakReferences(
Heap* heap,
1349 bool record_slots) {
1350 Object* undefined = heap->undefined_value();
1351 Object* head = undefined;
1353 Object* candidate =
function;
1354 while (candidate != undefined) {
1358 if (retain !=
NULL) {
1359 if (head == undefined) {
1365 tail->set_next_function_link(retain);
1370 next_function, next_function, retain);
1374 candidate_function =
reinterpret_cast<JSFunction*
>(retain);
1375 tail = candidate_function;
1377 ASSERT(retain->IsUndefined() || retain->IsJSFunction());
1379 if (retain == undefined)
break;
1383 candidate = candidate_function->next_function_link();
1388 tail->set_next_function_link(undefined);
1396 Object* undefined = undefined_value();
1397 Object* head = undefined;
1399 Object* candidate = global_contexts_list_;
1409 while (candidate != undefined) {
1411 Context* candidate_context =
reinterpret_cast<Context*
>(candidate);
1413 if (retain !=
NULL) {
1414 if (head == undefined) {
1430 next_context, next_context, retain);
1434 candidate_context =
reinterpret_cast<Context*
>(retain);
1435 tail = candidate_context;
1437 if (retain == undefined)
break;
1440 Object* function_list_head =
1441 ProcessFunctionWeakReferences(
1451 Object** optimized_functions =
1455 optimized_functions, optimized_functions, function_list_head);
1467 Heap::undefined_value(),
1472 global_contexts_list_ = head;
1479 class VisitorAdapter :
public ObjectVisitor {
1482 : visitor_(visitor) {}
1483 virtual void VisitPointers(
Object** start,
Object** end) {
1484 for (
Object** p = start; p < end; p++) {
1485 if ((*p)->IsExternalString()) {
1493 } visitor_adapter(visitor);
1494 external_string_table_.
Iterate(&visitor_adapter);
1504 reinterpret_cast<HeapObject*>(
object));
1509 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
1516 while (new_space_front != new_space_.
top()) {
1529 StoreBufferRebuildScope scope(
this,
1531 &ScavengeStoreBufferCallback);
1541 ASSERT(!target->IsMap());
1543 target->address() + size,
1550 }
while (new_space_front != new_space_.
top());
1552 return new_space_front;
1559 INLINE(
static HeapObject* EnsureDoubleAligned(Heap* heap,
1563 static HeapObject* EnsureDoubleAligned(Heap* heap,
1567 heap->CreateFillerObjectAt(object->address(),
kPointerSize);
1570 heap->CreateFillerObjectAt(object->address() + size -
kPointerSize,
1599 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1600 template VisitSpecialized<Context::kSize>);
1603 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1604 template VisitSpecialized<ConsString::kSize>);
1607 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1608 template VisitSpecialized<SlicedString::kSize>);
1611 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1612 template VisitSpecialized<SharedFunctionInfo::kSize>);
1615 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1619 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1624 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1625 template VisitSpecialized<JSFunction::kSize>);
1648 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
1649 enum SizeRestriction { SMALL, UNKNOWN_SIZE };
1651 static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
1652 bool should_record =
false;
1654 should_record = FLAG_heap_stats;
1656 should_record = should_record || FLAG_log_gc;
1657 if (should_record) {
1658 if (heap->new_space()->Contains(obj)) {
1659 heap->new_space()->RecordAllocation(obj);
1661 heap->new_space()->RecordPromotion(obj);
1669 INLINE(
static void MigrateObject(Heap* heap,
1674 heap->CopyBlock(target->address(), source->address(), size);
1677 source->set_map_word(MapWord::FromForwardingAddress(target));
1681 RecordCopiedObject(heap, target);
1682 HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
1683 Isolate* isolate = heap->isolate();
1684 if (isolate->logger()->is_logging() ||
1685 CpuProfiler::is_profiling(isolate)) {
1686 if (target->IsSharedFunctionInfo()) {
1687 PROFILE(isolate, SharedFunctionInfoMoveEvent(
1688 source->address(), target->address()));
1694 if (Marking::TransferColor(source, target)) {
1701 template<ObjectContents object_contents,
1702 SizeRestriction size_restriction,
1704 static inline void EvacuateObject(Map* map,
1712 int allocation_size = object_size;
1718 Heap* heap = map->GetHeap();
1719 if (heap->ShouldBePromoted(object->address(), object_size)) {
1720 MaybeObject* maybe_result;
1722 if ((size_restriction != SMALL) &&
1724 maybe_result = heap->lo_space()->AllocateRaw(allocation_size,
1727 if (object_contents == DATA_OBJECT) {
1728 maybe_result = heap->old_data_space()->AllocateRaw(allocation_size);
1731 heap->old_pointer_space()->AllocateRaw(allocation_size);
1736 if (maybe_result->ToObject(&result)) {
1740 target = EnsureDoubleAligned(heap, target, allocation_size);
1747 MigrateObject(heap,
object, target, object_size);
1749 if (object_contents == POINTER_OBJECT) {
1751 heap->promotion_queue()->insert(
1754 heap->promotion_queue()->insert(target, object_size);
1758 heap->tracer()->increment_promoted_objects_size(object_size);
1762 MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size);
1763 heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
1764 Object* result = allocation->ToObjectUnchecked();
1768 target = EnsureDoubleAligned(heap, target, allocation_size);
1775 MigrateObject(heap,
object, target, object_size);
1780 static inline void EvacuateJSFunction(Map* map,
1782 HeapObject*
object) {
1783 ObjectEvacuationStrategy<POINTER_OBJECT>::
1784 template VisitSpecialized<JSFunction::kSize>(map, slot, object);
1786 HeapObject* target = *slot;
1796 map->GetHeap()->mark_compact_collector()->
1797 RecordCodeEntrySlot(code_entry_slot, code);
1802 static inline void EvacuateFixedArray(Map* map,
1804 HeapObject*
object) {
1806 EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(map,
1813 static inline void EvacuateFixedDoubleArray(Map* map,
1815 HeapObject*
object) {
1816 int length =
reinterpret_cast<FixedDoubleArray*
>(object)->length();
1818 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kDoubleAlignment>(
1826 static inline void EvacuateByteArray(Map* map,
1828 HeapObject*
object) {
1829 int object_size =
reinterpret_cast<ByteArray*
>(object)->ByteArraySize();
1830 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
1831 map, slot, object, object_size);
1835 static inline void EvacuateSeqAsciiString(Map* map,
1837 HeapObject*
object) {
1839 SeqAsciiStringSize(map->instance_type());
1840 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
1841 map, slot, object, object_size);
1845 static inline void EvacuateSeqTwoByteString(Map* map,
1847 HeapObject*
object) {
1849 SeqTwoByteStringSize(map->instance_type());
1850 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
1851 map, slot, object, object_size);
1855 static inline bool IsShortcutCandidate(
int type) {
1859 static inline void EvacuateShortcutCandidate(Map* map,
1861 HeapObject*
object) {
1862 ASSERT(IsShortcutCandidate(map->instance_type()));
1864 Heap* heap = map->GetHeap();
1868 heap->empty_string()) {
1874 if (!heap->InNewSpace(first)) {
1875 object->set_map_word(MapWord::FromForwardingAddress(first));
1879 MapWord first_word = first->map_word();
1880 if (first_word.IsForwardingAddress()) {
1881 HeapObject* target = first_word.ToForwardingAddress();
1884 object->set_map_word(MapWord::FromForwardingAddress(target));
1888 heap->DoScavengeObject(first->map(), slot, first);
1889 object->set_map_word(MapWord::FromForwardingAddress(*slot));
1894 EvacuateObject<POINTER_OBJECT, SMALL, kObjectAlignment>(
1895 map, slot, object, object_size);
1898 template<ObjectContents
object_contents>
1899 class ObjectEvacuationStrategy {
1901 template<
int object_size>
1902 static inline void VisitSpecialized(Map* map,
1904 HeapObject*
object) {
1905 EvacuateObject<object_contents, SMALL, kObjectAlignment>(
1906 map, slot, object, object_size);
1909 static inline void Visit(Map* map,
1911 HeapObject*
object) {
1912 int object_size = map->instance_size();
1913 EvacuateObject<object_contents, SMALL, kObjectAlignment>(
1914 map, slot, object, object_size);
1918 static VisitorDispatchTable<ScavengingCallback> table_;
1924 VisitorDispatchTable<ScavengingCallback>
1925 ScavengingVisitor<marks_handling, logging_and_profiling_mode>::table_;
1928 static void InitializeScavengingVisitorsTables() {
1938 void Heap::SelectScavengingVisitorsTable() {
1939 bool logging_and_profiling =
1941 CpuProfiler::is_profiling(
isolate()) ||
1943 isolate()->heap_profiler()->is_profiling());
1946 if (!logging_and_profiling) {
1947 scavenging_visitors_table_.
CopyFrom(
1951 scavenging_visitors_table_.
CopyFrom(
1956 if (!logging_and_profiling) {
1957 scavenging_visitors_table_.
CopyFrom(
1961 scavenging_visitors_table_.
CopyFrom(
1971 scavenging_visitors_table_.
Register(
1980 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject*
object) {
1982 MapWord first_word =
object->map_word();
1984 Map* map = first_word.ToMap();
1985 map->GetHeap()->DoScavengeObject(map, p,
object);
1990 int instance_size) {
1992 { MaybeObject* maybe_result = AllocateRawMap();
1993 if (!maybe_result->ToObject(&result))
return maybe_result;
1997 reinterpret_cast<Map*
>(result)->set_map(raw_unchecked_meta_map());
1998 reinterpret_cast<Map*
>(result)->set_instance_type(instance_type);
1999 reinterpret_cast<Map*
>(result)->set_instance_size(instance_size);
2000 reinterpret_cast<Map*
>(result)->set_visitor_id(
2002 reinterpret_cast<Map*
>(result)->set_inobject_properties(0);
2003 reinterpret_cast<Map*
>(result)->set_pre_allocated_property_fields(0);
2004 reinterpret_cast<Map*
>(result)->set_unused_property_fields(0);
2005 reinterpret_cast<Map*
>(result)->set_bit_field(0);
2006 reinterpret_cast<Map*
>(result)->set_bit_field2(0);
2015 { MaybeObject* maybe_result = AllocateRawMap();
2016 if (!maybe_result->ToObject(&result))
return maybe_result;
2019 Map* map =
reinterpret_cast<Map*
>(result);
2050 if (!maybe_code_cache->To(&code_cache))
return maybe_code_cache;
2066 if (!maybe_accessors->To(&accessors))
return maybe_accessors;
2077 if (!maybe_info->To(&info))
return maybe_info;
2090 if (!maybe_entry->To(&entry))
return maybe_entry;
2097 const Heap::StringTypeTable Heap::string_type_table[] = {
2098 #define STRING_TYPE_ELEMENT(type, size, name, camel_name) \
2099 {type, size, k##camel_name##MapRootIndex},
2101 #undef STRING_TYPE_ELEMENT
2105 const Heap::ConstantSymbolTable Heap::constant_symbol_table[] = {
2106 #define CONSTANT_SYMBOL_ELEMENT(name, contents) \
2107 {contents, k##name##RootIndex},
2109 #undef CONSTANT_SYMBOL_ELEMENT
2113 const Heap::StructTable Heap::struct_table[] = {
2114 #define STRUCT_TABLE_ELEMENT(NAME, Name, name) \
2115 { NAME##_TYPE, Name::kSize, k##Name##MapRootIndex },
2117 #undef STRUCT_TABLE_ELEMENT
2121 bool Heap::CreateInitialMaps() {
2124 if (!maybe_obj->ToObject(&obj))
return false;
2127 Map* new_meta_map =
reinterpret_cast<Map*
>(obj);
2128 set_meta_map(new_meta_map);
2129 new_meta_map->set_map(new_meta_map);
2131 { MaybeObject* maybe_obj =
2133 if (!maybe_obj->ToObject(&obj))
return false;
2138 if (!maybe_obj->ToObject(&obj))
return false;
2143 { MaybeObject* maybe_obj = AllocateEmptyFixedArray();
2144 if (!maybe_obj->ToObject(&obj))
return false;
2149 if (!maybe_obj->ToObject(&obj))
return false;
2155 if (!maybe_obj->ToObject(&obj))
return false;
2162 { MaybeObject* maybe_obj = AllocateEmptyFixedArray();
2163 if (!maybe_obj->ToObject(&obj))
return false;
2168 meta_map()->init_instance_descriptors();
2169 meta_map()->set_code_cache(empty_fixed_array());
2170 meta_map()->init_prototype_transitions(undefined_value());
2172 fixed_array_map()->init_instance_descriptors();
2173 fixed_array_map()->set_code_cache(empty_fixed_array());
2174 fixed_array_map()->init_prototype_transitions(undefined_value());
2176 oddball_map()->init_instance_descriptors();
2177 oddball_map()->set_code_cache(empty_fixed_array());
2178 oddball_map()->init_prototype_transitions(undefined_value());
2181 meta_map()->set_prototype(null_value());
2182 meta_map()->set_constructor(null_value());
2184 fixed_array_map()->set_prototype(null_value());
2185 fixed_array_map()->set_constructor(null_value());
2187 oddball_map()->set_prototype(null_value());
2188 oddball_map()->set_constructor(null_value());
2190 { MaybeObject* maybe_obj =
2192 if (!maybe_obj->ToObject(&obj))
return false;
2194 set_fixed_cow_array_map(
Map::cast(obj));
2195 ASSERT(fixed_array_map() != fixed_cow_array_map());
2197 { MaybeObject* maybe_obj =
2199 if (!maybe_obj->ToObject(&obj))
return false;
2204 if (!maybe_obj->ToObject(&obj))
return false;
2209 if (!maybe_obj->ToObject(&obj))
return false;
2213 for (
unsigned i = 0; i <
ARRAY_SIZE(string_type_table); i++) {
2214 const StringTypeTable& entry = string_type_table[i];
2215 { MaybeObject* maybe_obj =
AllocateMap(entry.type, entry.size);
2216 if (!maybe_obj->ToObject(&obj))
return false;
2222 if (!maybe_obj->ToObject(&obj))
return false;
2224 set_undetectable_string_map(
Map::cast(obj));
2227 { MaybeObject* maybe_obj =
2229 if (!maybe_obj->ToObject(&obj))
return false;
2231 set_undetectable_ascii_string_map(
Map::cast(obj));
2234 { MaybeObject* maybe_obj =
2236 if (!maybe_obj->ToObject(&obj))
return false;
2238 set_fixed_double_array_map(
Map::cast(obj));
2240 { MaybeObject* maybe_obj =
2242 if (!maybe_obj->ToObject(&obj))
return false;
2246 { MaybeObject* maybe_obj =
2248 if (!maybe_obj->ToObject(&obj))
return false;
2253 if (!maybe_obj->ToObject(&obj))
return false;
2257 { MaybeObject* maybe_obj =
2259 if (!maybe_obj->ToObject(&obj))
return false;
2261 set_external_pixel_array_map(
Map::cast(obj));
2265 if (!maybe_obj->ToObject(&obj))
return false;
2267 set_external_byte_array_map(
Map::cast(obj));
2271 if (!maybe_obj->ToObject(&obj))
return false;
2273 set_external_unsigned_byte_array_map(
Map::cast(obj));
2277 if (!maybe_obj->ToObject(&obj))
return false;
2279 set_external_short_array_map(
Map::cast(obj));
2283 if (!maybe_obj->ToObject(&obj))
return false;
2285 set_external_unsigned_short_array_map(
Map::cast(obj));
2289 if (!maybe_obj->ToObject(&obj))
return false;
2291 set_external_int_array_map(
Map::cast(obj));
2295 if (!maybe_obj->ToObject(&obj))
return false;
2297 set_external_unsigned_int_array_map(
Map::cast(obj));
2301 if (!maybe_obj->ToObject(&obj))
return false;
2303 set_external_float_array_map(
Map::cast(obj));
2305 { MaybeObject* maybe_obj =
2307 if (!maybe_obj->ToObject(&obj))
return false;
2309 set_non_strict_arguments_elements_map(
Map::cast(obj));
2313 if (!maybe_obj->ToObject(&obj))
return false;
2315 set_external_double_array_map(
Map::cast(obj));
2318 if (!maybe_obj->ToObject(&obj))
return false;
2324 if (!maybe_obj->ToObject(&obj))
return false;
2326 set_global_property_cell_map(
Map::cast(obj));
2329 if (!maybe_obj->ToObject(&obj))
return false;
2331 set_one_pointer_filler_map(
Map::cast(obj));
2334 if (!maybe_obj->ToObject(&obj))
return false;
2336 set_two_pointer_filler_map(
Map::cast(obj));
2338 for (
unsigned i = 0; i <
ARRAY_SIZE(struct_table); i++) {
2339 const StructTable& entry = struct_table[i];
2340 { MaybeObject* maybe_obj =
AllocateMap(entry.type, entry.size);
2341 if (!maybe_obj->ToObject(&obj))
return false;
2346 { MaybeObject* maybe_obj =
2348 if (!maybe_obj->ToObject(&obj))
return false;
2352 { MaybeObject* maybe_obj =
2354 if (!maybe_obj->ToObject(&obj))
return false;
2356 set_function_context_map(
Map::cast(obj));
2358 { MaybeObject* maybe_obj =
2360 if (!maybe_obj->ToObject(&obj))
return false;
2364 { MaybeObject* maybe_obj =
2366 if (!maybe_obj->ToObject(&obj))
return false;
2370 { MaybeObject* maybe_obj =
2372 if (!maybe_obj->ToObject(&obj))
return false;
2376 { MaybeObject* maybe_obj =
2378 if (!maybe_obj->ToObject(&obj))
return false;
2382 { MaybeObject* maybe_obj =
2384 if (!maybe_obj->ToObject(&obj))
return false;
2386 Map* global_context_map =
Map::cast(obj);
2388 set_global_context_map(global_context_map);
2392 if (!maybe_obj->ToObject(&obj))
return false;
2394 set_shared_function_info_map(
Map::cast(obj));
2398 if (!maybe_obj->ToObject(&obj))
return false;
2414 { MaybeObject* maybe_result =
2416 if (!maybe_result->ToObject(&result))
return maybe_result;
2435 if (!maybe_result->ToObject(&result))
return maybe_result;
2445 { MaybeObject* maybe_result = AllocateRawCell();
2446 if (!maybe_result->ToObject(&result))
return maybe_result;
2449 global_property_cell_map());
2455 MaybeObject* Heap::CreateOddball(
const char* to_string,
2460 if (!maybe_result->ToObject(&result))
return maybe_result;
2470 if (!maybe_obj->ToObject(&obj))
return false;
2478 set_neander_map(new_neander_map);
2481 if (!maybe_obj->ToObject(&obj))
return false;
2485 if (!maybe_elements->ToObject(&elements))
return false;
2495 void Heap::CreateJSEntryStub() {
2497 set_js_entry_code(*stub.GetCode());
2501 void Heap::CreateJSConstructEntryStub() {
2502 JSConstructEntryStub stub;
2503 set_js_construct_entry_code(*stub.GetCode());
2507 void Heap::CreateFixedStubs() {
2521 Heap::CreateJSEntryStub();
2522 Heap::CreateJSConstructEntryStub();
2528 CodeStub::GenerateStubsAheadOfTime();
2532 bool Heap::CreateInitialObjects() {
2537 if (!maybe_obj->ToObject(&obj))
return false;
2543 if (!maybe_obj->ToObject(&obj))
return false;
2548 if (!maybe_obj->ToObject(&obj))
return false;
2554 set_the_hole_value(reinterpret_cast<Oddball*>(
Smi::FromInt(0)));
2558 if (!maybe_obj->ToObject(&obj))
return false;
2564 { MaybeObject* maybe_obj =
2565 undefined_value()->Initialize(
"undefined",
2568 if (!maybe_obj->ToObject(&obj))
return false;
2572 { MaybeObject* maybe_obj =
2574 if (!maybe_obj->ToObject(&obj))
return false;
2577 { MaybeObject* maybe_obj = CreateOddball(
"true",
2580 if (!maybe_obj->ToObject(&obj))
return false;
2584 { MaybeObject* maybe_obj = CreateOddball(
"false",
2587 if (!maybe_obj->ToObject(&obj))
return false;
2591 { MaybeObject* maybe_obj = CreateOddball(
"hole",
2594 if (!maybe_obj->ToObject(&obj))
return false;
2598 { MaybeObject* maybe_obj = CreateOddball(
"arguments_marker",
2601 if (!maybe_obj->ToObject(&obj))
return false;
2605 { MaybeObject* maybe_obj = CreateOddball(
"no_interceptor_result_sentinel",
2608 if (!maybe_obj->ToObject(&obj))
return false;
2610 set_no_interceptor_result_sentinel(obj);
2612 { MaybeObject* maybe_obj = CreateOddball(
"termination_exception",
2615 if (!maybe_obj->ToObject(&obj))
return false;
2617 set_termination_exception(obj);
2621 if (!maybe_obj->ToObject(&obj))
return false;
2625 for (
unsigned i = 0; i <
ARRAY_SIZE(constant_symbol_table); i++) {
2626 { MaybeObject* maybe_obj =
2628 if (!maybe_obj->ToObject(&obj))
return false;
2630 roots_[constant_symbol_table[i].index] =
String::cast(obj);
2639 { MaybeObject* maybe_obj =
2641 if (!maybe_obj->ToObject(&obj))
return false;
2646 { MaybeObject* maybe_obj =
2648 if (!maybe_obj->ToObject(&obj))
return false;
2655 if (!maybe_obj->ToObject(&obj))
return false;
2663 if (!maybe_obj->ToObject(&obj))
return false;
2668 if (!maybe_obj->ToObject(&obj))
return false;
2680 if (!maybe_obj->ToObject(&obj))
return false;
2684 if (!maybe_obj->ToObject(&obj))
return false;
2688 { MaybeObject* maybe_obj = AllocateInitialNumberStringCache();
2689 if (!maybe_obj->ToObject(&obj))
return false;
2694 { MaybeObject* maybe_obj =
2696 if (!maybe_obj->ToObject(&obj))
return false;
2701 { MaybeObject* maybe_obj =
2703 if (!maybe_obj->ToObject(&obj))
return false;
2709 if (!maybe_obj->ToObject(&obj))
return false;
2714 set_last_script_id(undefined_value());
2734 if (!string->IsSymbol() || !pattern->IsSymbol())
return Smi::FromInt(0);
2735 uint32_t hash =
string->Hash();
2737 ~(kArrayEntriesPerCacheEntry - 1));
2738 if (cache->
get(index + kStringOffset) ==
string &&
2739 cache->
get(index + kPatternOffset) == pattern) {
2740 return cache->
get(index + kArrayOffset);
2743 if (cache->
get(index + kStringOffset) ==
string &&
2744 cache->
get(index + kPatternOffset) == pattern) {
2745 return cache->
get(index + kArrayOffset);
2756 if (!string->IsSymbol() || !pattern->IsSymbol())
return;
2757 uint32_t hash =
string->Hash();
2759 ~(kArrayEntriesPerCacheEntry - 1));
2761 cache->
set(index + kStringOffset,
string);
2762 cache->
set(index + kPatternOffset, pattern);
2763 cache->
set(index + kArrayOffset, array);
2768 cache->
set(index2 + kStringOffset,
string);
2769 cache->
set(index2 + kPatternOffset, pattern);
2770 cache->
set(index2 + kArrayOffset, array);
2775 cache->
set(index + kStringOffset,
string);
2776 cache->
set(index + kPatternOffset, pattern);
2777 cache->
set(index + kArrayOffset, array);
2780 if (array->
length() < 100) {
2781 for (
int i = 0; i < array->
length(); i++) {
2785 if (maybe_symbol->ToObject(&symbol)) {
2786 array->
set(i, symbol);
2801 MaybeObject* Heap::AllocateInitialNumberStringCache() {
2802 MaybeObject* maybe_obj =
2808 int Heap::FullSizeNumberStringCacheLength() {
2812 int number_string_cache_size = max_semispace_size_ / 512;
2813 number_string_cache_size = Max(kInitialNumberStringCacheSize * 2,
2814 Min(0x4000, number_string_cache_size));
2817 return number_string_cache_size * 2;
2821 void Heap::AllocateFullSizeNumberStringCache() {
2826 MaybeObject* maybe_obj =
2829 if (maybe_obj->ToObject(&new_cache)) {
2839 void Heap::FlushNumberStringCache() {
2841 int len = number_string_cache()->length();
2842 for (
int i = 0; i < len; i++) {
2843 number_string_cache()->set_undefined(
this, i);
2848 static inline int double_get_hash(
double d) {
2849 DoubleRepresentation rep(d);
2850 return static_cast<int>(rep.bits) ^ static_cast<int>(rep.bits >> 32);
2854 static inline int smi_get_hash(Smi* smi) {
2855 return smi->value();
2861 int mask = (number_string_cache()->length() >> 1) - 1;
2862 if (number->IsSmi()) {
2863 hash = smi_get_hash(
Smi::cast(number)) & mask;
2865 hash = double_get_hash(number->
Number()) & mask;
2867 Object* key = number_string_cache()->get(hash * 2);
2868 if (key == number) {
2869 return String::cast(number_string_cache()->
get(hash * 2 + 1));
2870 }
else if (key->IsHeapNumber() &&
2871 number->IsHeapNumber() &&
2873 return String::cast(number_string_cache()->
get(hash * 2 + 1));
2875 return undefined_value();
2881 int mask = (number_string_cache()->length() >> 1) - 1;
2882 if (number->IsSmi()) {
2883 hash = smi_get_hash(
Smi::cast(number)) & mask;
2885 hash = double_get_hash(number->
Number()) & mask;
2887 if (number_string_cache()->get(hash * 2) != undefined_value() &&
2888 number_string_cache()->length() != FullSizeNumberStringCacheLength()) {
2891 AllocateFullSizeNumberStringCache();
2894 number_string_cache()->set(hash * 2, number);
2895 number_string_cache()->set(hash * 2 + 1,
string);
2900 bool check_number_string_cache) {
2901 isolate_->
counters()->number_to_string_runtime()->Increment();
2902 if (check_number_string_cache) {
2904 if (cached != undefined_value()) {
2912 if (number->IsSmi()) {
2922 if (maybe_js_string->ToObject(&js_string)) {
2925 return maybe_js_string;
2930 bool check_number_string_cache) {
2933 if (!maybe->To<
Object>(&number))
return maybe;
2945 switch (array_type) {
2947 return kExternalByteArrayMapRootIndex;
2949 return kExternalUnsignedByteArrayMapRootIndex;
2951 return kExternalShortArrayMapRootIndex;
2953 return kExternalUnsignedShortArrayMapRootIndex;
2955 return kExternalIntArrayMapRootIndex;
2957 return kExternalUnsignedIntArrayMapRootIndex;
2959 return kExternalFloatArrayMapRootIndex;
2961 return kExternalDoubleArrayMapRootIndex;
2963 return kExternalPixelArrayMapRootIndex;
2966 return kUndefinedValueRootIndex;
2982 int int_value =
FastD2I(value);
2997 MaybeObject* maybe_result =
Allocate(foreign_map(), space);
2998 if (!maybe_result->To(&result))
return maybe_result;
3010 share->set_name(name);
3012 share->set_code(illegal);
3014 Code* construct_stub =
3016 share->set_construct_stub(construct_stub);
3017 share->set_instance_class_name(Object_symbol());
3054 if (!maybe_result->ToObject(&result))
return maybe_result;
3060 message->set_type(type);
3061 message->set_arguments(arguments);
3064 message->set_script(script);
3065 message->set_stack_trace(stack_trace);
3066 message->set_stack_frames(stack_frames);
3073 static inline bool Between(uint32_t character, uint32_t from, uint32_t to) {
3075 return character - from <= to - from;
3079 MUST_USE_RESULT static inline MaybeObject* MakeOrFindTwoCharacterString(
3086 if ((!Between(c1,
'0',
'9') || !Between(c2,
'0',
'9')) &&
3087 heap->symbol_table()->LookupTwoCharsSymbolIfExists(c1, c2, &symbol)) {
3094 { MaybeObject* maybe_result = heap->AllocateRawAsciiString(2);
3095 if (!maybe_result->ToObject(&result))
return maybe_result;
3103 { MaybeObject* maybe_result = heap->AllocateRawTwoByteString(2);
3104 if (!maybe_result->ToObject(&result))
return maybe_result;
3115 int first_length = first->
length();
3116 if (first_length == 0) {
3120 int second_length = second->
length();
3121 if (second_length == 0) {
3125 int length = first_length + second_length;
3131 unsigned c1 = first->Get(0);
3132 unsigned c2 = second->Get(0);
3133 return MakeOrFindTwoCharacterString(
this, c1, c2);
3138 bool is_ascii = first_is_ascii && second_is_ascii;
3147 bool is_ascii_data_in_two_byte_string =
false;
3152 is_ascii_data_in_two_byte_string =
3154 if (is_ascii_data_in_two_byte_string) {
3155 isolate_->
counters()->string_add_runtime_ext_to_ascii()->Increment();
3168 if (!maybe_result->ToObject(&result))
return maybe_result;
3174 if (first->IsExternalString()) {
3179 for (
int i = 0; i < first_length; i++) *dest++ = src[i];
3181 if (second->IsExternalString()) {
3186 for (
int i = 0; i < second_length; i++) *dest++ = src[i];
3189 if (is_ascii_data_in_two_byte_string) {
3192 if (!maybe_result->ToObject(&result))
return maybe_result;
3198 isolate_->
counters()->string_add_runtime_ext_to_ascii()->Increment();
3204 if (!maybe_result->ToObject(&result))
return maybe_result;
3214 Map* map = (is_ascii || is_ascii_data_in_two_byte_string) ?
3215 cons_ascii_string_map() : cons_string_map();
3219 if (!maybe_result->ToObject(&result))
return maybe_result;
3237 int length = end - start;
3239 return empty_string();
3240 }
else if (length == 1) {
3242 }
else if (length == 2) {
3246 unsigned c1 = buffer->Get(start);
3247 unsigned c2 = buffer->Get(start + 1);
3248 return MakeOrFindTwoCharacterString(
this, c1, c2);
3254 if (!FLAG_string_slices ||
3263 { MaybeObject* maybe_result = is_ascii
3266 if (!maybe_result->ToObject(&result))
return maybe_result;
3284 if (FLAG_verify_heap) {
3285 buffer->StringVerify();
3297 ? sliced_ascii_string_map()
3298 : sliced_string_map();
3300 if (!maybe_result->ToObject(&result))
return maybe_result;
3307 if (buffer->IsConsString()) {
3312 }
else if (buffer->IsSlicedString()) {
3322 sliced_string->
parent()->IsExternalString());
3329 size_t length = resource->
length();
3337 Map* map = external_ascii_string_map();
3340 if (!maybe_result->ToObject(&result))
return maybe_result;
3344 external_string->
set_length(static_cast<int>(length));
3354 size_t length = resource->
length();
3362 static const size_t kAsciiCheckLengthLimit = 32;
3363 bool is_ascii = length <= kAsciiCheckLengthLimit &&
3365 Map* map = is_ascii ?
3366 external_string_with_ascii_data_map() : external_string_map();
3369 if (!maybe_result->ToObject(&result))
return maybe_result;
3373 external_string->
set_length(static_cast<int>(length));
3383 Object* value = single_character_string_cache()->get(code);
3384 if (value != undefined_value())
return value;
3387 buffer[0] =
static_cast<char>(code);
3391 if (!maybe_result->ToObject(&result))
return maybe_result;
3392 single_character_string_cache()->set(code, result);
3398 if (!maybe_result->ToObject(&result))
return maybe_result;
3401 answer->
Set(0, code);
3418 if (!maybe_result->ToObject(&result))
return maybe_result;
3421 reinterpret_cast<ByteArray*
>(result)->set_map_no_write_barrier(
3423 reinterpret_cast<ByteArray*
>(result)->set_length(length);
3437 if (!maybe_result->ToObject(&result))
return maybe_result;
3440 reinterpret_cast<ByteArray*
>(result)->set_map_no_write_barrier(
3442 reinterpret_cast<ByteArray*
>(result)->set_length(length);
3448 if (size == 0)
return;
3463 void* external_pointer,
3470 if (!maybe_result->ToObject(&result))
return maybe_result;
3473 reinterpret_cast<ExternalArray*
>(result)->set_map_no_write_barrier(
3475 reinterpret_cast<ExternalArray*
>(result)->set_length(length);
3476 reinterpret_cast<ExternalArray*
>(result)->set_external_pointer(
3491 if (!maybe_reloc_info->To(&reloc_info))
return maybe_reloc_info;
3497 MaybeObject* maybe_result;
3500 if (obj_size >
code_space()->AreaSize() || immovable) {
3503 maybe_result = code_space_->
AllocateRaw(obj_size);
3507 if (!maybe_result->ToObject(&result))
return maybe_result;
3515 code->set_relocation_info(reloc_info);
3527 if (!self_reference.
is_null()) {
3528 *(self_reference.
location()) = code;
3538 if (FLAG_verify_heap) {
3548 int obj_size = code->
Size();
3549 MaybeObject* maybe_result;
3553 maybe_result = code_space_->
AllocateRaw(obj_size);
3557 if (!maybe_result->ToObject(&result))
return maybe_result;
3562 CopyBlock(new_addr, old_addr, obj_size);
3567 new_code->
Relocate(new_addr - old_addr);
3575 Object* reloc_info_array;
3576 { MaybeObject* maybe_reloc_info_array =
3578 if (!maybe_reloc_info_array->ToObject(&reloc_info_array)) {
3579 return maybe_reloc_info_array;
3589 size_t relocation_offset =
3592 MaybeObject* maybe_result;
3596 maybe_result = code_space_->
AllocateRaw(new_obj_size);
3600 if (!maybe_result->ToObject(&result))
return maybe_result;
3606 memcpy(new_addr, old_addr, relocation_offset);
3617 new_code->
Relocate(new_addr - old_addr);
3620 if (FLAG_verify_heap) {
3636 { MaybeObject* maybe_result =
3638 if (!maybe_result->ToObject(&result))
return maybe_result;
3646 void Heap::InitializeFunction(
JSFunction*
function,
3649 ASSERT(!prototype->IsMap());
3650 function->initialize_properties();
3651 function->initialize_elements();
3652 function->set_shared(shared);
3653 function->set_code(shared->code());
3654 function->set_prototype_or_initial_map(prototype);
3655 function->set_context(undefined_value());
3656 function->set_literals_or_bindings(empty_fixed_array());
3657 function->set_next_function_link(undefined_value());
3673 { MaybeObject* maybe_map =
3676 if (!maybe_map->To<
Map>(&new_map))
return maybe_map;
3680 if (!maybe_prototype->ToObject(&prototype))
return maybe_prototype;
3685 { MaybeObject* maybe_result =
3687 constructor_symbol(),
function,
DONT_ENUM);
3688 if (!maybe_result->ToObject(&result))
return maybe_result;
3701 { MaybeObject* maybe_result =
Allocate(function_map, space);
3702 if (!maybe_result->ToObject(&result))
return maybe_result;
3714 int arguments_object_size;
3715 bool strict_mode_callee = callee->IsJSFunction() &&
3717 if (strict_mode_callee) {
3720 strict_mode_arguments_boilerplate();
3739 { MaybeObject* maybe_result =
3741 if (!maybe_result->ToObject(&result))
return maybe_result;
3756 if (!strict_mode_callee) {
3773 for (
int i = 1; i != count; i++) {
3775 if (prev_key == current_key)
return true;
3776 prev_key = current_key;
3788 int instance_size = fun->shared()->CalculateInstanceSize();
3789 int in_object_properties = fun->shared()->CalculateInObjectProperties();
3792 if (!maybe_map_obj->ToObject(&map_obj))
return maybe_map_obj;
3801 if (!maybe_prototype->ToObject(&prototype))
return maybe_prototype;
3807 map->set_prototype(prototype);
3816 if (fun->shared()->CanGenerateInlineConstructor(prototype)) {
3817 int count = fun->shared()->this_property_assignments_count();
3818 if (count > in_object_properties) {
3820 fun->shared()->ForbidInlineConstructor();
3823 { MaybeObject* maybe_descriptors_obj =
3826 return maybe_descriptors_obj;
3830 for (
int i = 0; i < count; i++) {
3831 String*
name = fun->shared()->GetThisPropertyAssignmentName(i);
3832 ASSERT(name->IsSymbol());
3834 field.SetEnumerationIndex(i);
3835 descriptors->
Set(i, &field, witness);
3844 if (HasDuplicates(descriptors)) {
3845 fun->shared()->ForbidInlineConstructor();
3847 map->set_instance_descriptors(descriptors);
3854 fun->shared()->StartInobjectSlackTracking(map);
3860 void Heap::InitializeJSObjectFromMap(
JSObject* obj,
3863 obj->set_properties(properties);
3879 if (map->constructor()->IsJSFunction() &&
3881 IsInobjectSlackTrackingInProgress()) {
3884 filler = Heap::one_pointer_filler_map();
3886 filler = Heap::undefined_value();
3910 if (!maybe_properties->ToObject(&properties))
return maybe_properties;
3918 { MaybeObject* maybe_obj =
Allocate(map, space);
3919 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
3937 if (!maybe_initial_map->ToObject(&initial_map))
return maybe_initial_map;
3940 Map::cast(initial_map)->set_constructor(constructor);
3948 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject());
3958 if (!maybe_map->To(&map))
return maybe_map;
3970 ASSERT(capacity >= length);
3974 MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure);
3976 if (!maybe_array->To(&array))
return maybe_array;
3978 if (capacity == 0) {
3980 array->set_elements(empty_fixed_array());
3985 MaybeObject* maybe_elms =
NULL;
4002 if (!maybe_elms->To(&elms))
return maybe_elms;
4004 array->set_elements(elms);
4014 MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure);
4016 if (!maybe_array->To(&array))
return maybe_array;
4018 array->set_elements(elements);
4031 if (!maybe_map_obj->To<
Map>(&map))
return maybe_map_obj;
4032 map->set_prototype(prototype);
4037 if (!maybe_result->To<
JSProxy>(&result))
return maybe_result;
4039 result->set_handler(handler);
4053 MaybeObject* maybe_map_obj =
4055 if (!maybe_map_obj->To<
Map>(&map))
return maybe_map_obj;
4056 map->set_prototype(prototype);
4063 result->set_handler(handler);
4065 result->set_call_trap(call_trap);
4066 result->set_construct_trap(construct_trap);
4078 ASSERT(map->NextFreePropertyIndex() == 0);
4082 ASSERT(map->unused_property_fields() == 0);
4083 ASSERT(map->inobject_properties() == 0);
4092 { MaybeObject* maybe_obj =
4094 map->NumberOfDescribedProperties() * 2 + initial_size);
4095 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
4103 PropertyDetails details = descs->
GetDetails(i);
4106 PropertyDetails(details.attributes(),
CALLBACKS, details.index());
4109 if (!maybe_value->ToObject(&value))
return maybe_value;
4113 { MaybeObject* maybe_result = dictionary->
Add(descs->
GetKey(i), value, d);
4114 if (!maybe_result->ToObject(&result))
return maybe_result;
4121 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
4124 InitializeJSObjectFromMap(global, dictionary, map);
4127 { MaybeObject* maybe_obj = map->CopyDropDescriptors();
4128 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
4135 global->set_properties(dictionary);
4138 ASSERT(global->IsGlobalObject());
4150 Map* map = source->
map();
4159 { MaybeObject* maybe_clone =
4161 if (!maybe_clone->ToObject(&clone))
return maybe_clone;
4173 { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size);
4174 if (!maybe_clone->ToObject(&clone))
return maybe_clone;
4189 if (elements->length() > 0) {
4191 { MaybeObject* maybe_elem;
4192 if (elements->map() == fixed_cow_array_map()) {
4199 if (!maybe_elem->ToObject(&elem))
return maybe_elem;
4204 if (properties->length() > 0) {
4207 if (!maybe_prop->ToObject(&prop))
return maybe_prop;
4224 if (!maybe->To<
Map>(&map))
return maybe;
4227 int size_difference =
object->map()->instance_size() - map->
instance_size();
4228 ASSERT(size_difference >= 0);
4230 map->set_prototype(object->
map()->prototype());
4236 if (!maybe->ToObject(&properties))
return maybe;
4243 if (!maybe->To<
String>(&name))
return maybe;
4252 object->set_map(map);
4263 isolate()->context()->global_context());
4267 if (size_difference > 0) {
4283 ASSERT(map->instance_size() ==
object->map()->instance_size());
4284 ASSERT(map->instance_type() ==
object->map()->instance_type());
4287 int prop_size = map->unused_property_fields() - map->inobject_properties();
4290 if (!maybe_properties->ToObject(&properties))
return maybe_properties;
4304 if (
string.length() == 1) {
4308 { MaybeObject* maybe_result =
4310 if (!maybe_result->ToObject(&result))
return maybe_result;
4315 for (
int i = 0; i <
string.length(); i++) {
4328 decoder->Reset(
string.start(),
string.length());
4330 while (decoder->has_more()) {
4331 uint32_t r = decoder->GetNext();
4341 if (!maybe_result->ToObject(&result))
return maybe_result;
4346 decoder->Reset(
string.start(),
string.length());
4349 uint32_t r = decoder->GetNext();
4354 string_result->
Set(i++, r);
4364 MaybeObject* maybe_result;
4371 if (!maybe_result->ToObject(&result))
return maybe_result;
4376 for (
int i = 0; i <
string.length(); i++) {
4377 string_result->
Set(i,
string[i]);
4396 return external_symbol_with_ascii_data_map();
4399 return short_external_ascii_symbol_map();
4401 return short_external_symbol_with_ascii_data_map();
4402 default:
return NULL;
4409 uint32_t hash_field) {
4414 bool is_ascii =
true;
4431 map = ascii_symbol_map();
4446 if (!maybe_result->ToObject(&result))
return maybe_result;
4449 reinterpret_cast<HeapObject*
>(result)->set_map_no_write_barrier(map);
4460 uint32_t character = buffer->
GetNext();
4465 answer->
Set(i++, character);
4484 if (size > kMaxObjectSizeInNewSpace) {
4496 { MaybeObject* maybe_result =
AllocateRaw(size, space, retry_space);
4497 if (!maybe_result->ToObject(&result))
return maybe_result;
4507 if (FLAG_verify_heap) {
4530 if (size > kMaxObjectSizeInNewSpace) {
4542 { MaybeObject* maybe_result =
AllocateRaw(size, space, retry_space);
4543 if (!maybe_result->ToObject(&result))
return maybe_result;
4555 MaybeObject* Heap::AllocateJSArray(
4559 JSFunction* array_function = global_context->array_function();
4561 Object* maybe_map_array = global_context->js_array_maps();
4562 if (!maybe_map_array->IsUndefined()) {
4563 Object* maybe_transitioned_map =
4565 if (!maybe_transitioned_map->IsUndefined()) {
4566 map =
Map::cast(maybe_transitioned_map);
4574 MaybeObject* Heap::AllocateEmptyFixedArray() {
4577 { MaybeObject* maybe_result =
4579 if (!maybe_result->ToObject(&result))
return maybe_result;
4582 reinterpret_cast<FixedArray*
>(result)->set_map_no_write_barrier(
4584 reinterpret_cast<FixedArray*
>(result)->set_length(0);
4598 return size <= kMaxObjectSizeInNewSpace
4599 ? new_space_.AllocateRaw(size)
4608 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
4625 for (
int i = 0; i < len; i++) result->
set(i, src->
get(i), mode);
4635 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
4649 if (length == 0)
return empty_fixed_array();
4652 if (!maybe_result->ToObject(&result))
return maybe_result;
4673 if (space ==
NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
4695 ASSERT(heap->empty_fixed_array()->IsFixedArray());
4696 if (length == 0)
return heap->empty_fixed_array();
4701 if (!maybe_result->ToObject(&result))
return maybe_result;
4706 array->set_length(length);
4713 return AllocateFixedArrayWithFiller(
this,
4722 return AllocateFixedArrayWithFiller(
this,
4730 if (length == 0)
return empty_fixed_array();
4734 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
4737 reinterpret_cast<FixedArray*
>(obj)->set_map_no_write_barrier(
4744 MaybeObject* Heap::AllocateEmptyFixedDoubleArray() {
4747 { MaybeObject* maybe_result =
4749 if (!maybe_result->ToObject(&result))
return maybe_result;
4752 reinterpret_cast<FixedDoubleArray*
>(result)->set_map_no_write_barrier(
4753 fixed_double_array_map());
4754 reinterpret_cast<FixedDoubleArray*
>(result)->set_length(0);
4762 if (length == 0)
return empty_fixed_array();
4766 if (!maybe_obj->ToObject(&elements_object))
return maybe_obj;
4779 if (length == 0)
return empty_fixed_array();
4783 if (!maybe_obj->ToObject(&elements_object))
return maybe_obj;
4787 for (
int i = 0; i < length; ++i) {
4807 #ifndef V8_HOST_ARCH_64_BIT
4811 if (space ==
NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
4824 { MaybeObject* maybe_object =
AllocateRaw(size, space, retry_space);
4825 if (!maybe_object->To<
HeapObject>(&
object))
return maybe_object;
4828 return EnsureDoubleAligned(
this,
object, size);
4835 if (!maybe_result->ToObject(&result))
return maybe_result;
4837 reinterpret_cast<HeapObject*
>(result)->set_map_no_write_barrier(
4839 ASSERT(result->IsHashTable());
4846 { MaybeObject* maybe_result =
4848 if (!maybe_result->ToObject(&result))
return maybe_result;
4852 context->set_js_array_maps(undefined_value());
4853 ASSERT(context->IsGlobalContext());
4854 ASSERT(result->IsContext());
4862 { MaybeObject* maybe_result =
4864 if (!maybe_result->ToObject(&result))
return maybe_result;
4879 if (!maybe_result->ToObject(&result))
return maybe_result;
4886 context->
set_global(function->context()->global());
4897 { MaybeObject* maybe_result =
4899 if (!maybe_result->ToObject(&result))
return maybe_result;
4917 if (!maybe_result->ToObject(&result))
return maybe_result;
4933 { MaybeObject* maybe_result =
4935 if (!maybe_result->ToObject(&result))
return maybe_result;
4950 if (!maybe_scope_info->To(&scope_info))
return maybe_scope_info;
4959 #define MAKE_CASE(NAME, Name, name) \
4960 case NAME##_TYPE: map = name##_map(); break;
4971 { MaybeObject* maybe_result =
Allocate(map, space);
4972 if (!maybe_result->ToObject(&result))
return maybe_result;
4986 ASSERT(IsAllocationAllowed());
4994 void Heap::AdvanceIdleIncrementalMarking(intptr_t step_size) {
4999 bool uncommit =
false;
5000 if (gc_count_at_last_idle_gc_ == gc_count_) {
5006 gc_count_at_last_idle_gc_ = gc_count_;
5016 const int kMaxHint = 1000;
5017 intptr_t size_factor =
Min(
Max(hint, 20), kMaxHint) / 4;
5023 if (contexts_disposed_ > 0) {
5024 if (hint >= kMaxHint) {
5030 int mark_sweep_time =
Min(TimeMarkSweepWouldTakeInMs(), 1000);
5031 if (hint >= mark_sweep_time && !FLAG_expose_gc &&
5033 HistogramTimerScope scope(isolate_->
counters()->gc_context());
5035 "idle notification: contexts disposed");
5037 AdvanceIdleIncrementalMarking(step_size);
5038 contexts_disposed_ = 0;
5052 return IdleGlobalGC();
5072 if (mark_sweeps_since_idle_round_started_ >= kMaxMarkSweepsInIdleRound) {
5073 if (EnoughGarbageSinceLastIdleRound()) {
5080 int new_mark_sweeps = ms_count_ - ms_count_at_last_idle_notification_;
5081 mark_sweeps_since_idle_round_started_ += new_mark_sweeps;
5082 ms_count_at_last_idle_notification_ = ms_count_;
5084 if (mark_sweeps_since_idle_round_started_ >= kMaxMarkSweepsInIdleRound) {
5093 AdvanceIdleIncrementalMarking(step_size);
5098 bool Heap::IdleGlobalGC() {
5099 static const int kIdlesBeforeScavenge = 4;
5100 static const int kIdlesBeforeMarkSweep = 7;
5101 static const int kIdlesBeforeMarkCompact = 8;
5102 static const int kMaxIdleCount = kIdlesBeforeMarkCompact + 1;
5103 static const unsigned int kGCsBetweenCleanup = 4;
5105 if (!last_idle_notification_gc_count_init_) {
5106 last_idle_notification_gc_count_ = gc_count_;
5107 last_idle_notification_gc_count_init_ =
true;
5110 bool uncommit =
true;
5111 bool finished =
false;
5117 if (gc_count_ - last_idle_notification_gc_count_ < kGCsBetweenCleanup) {
5118 number_idle_notifications_ =
5119 Min(number_idle_notifications_ + 1, kMaxIdleCount);
5121 number_idle_notifications_ = 0;
5122 last_idle_notification_gc_count_ = gc_count_;
5125 if (number_idle_notifications_ == kIdlesBeforeScavenge) {
5128 last_idle_notification_gc_count_ = gc_count_;
5129 }
else if (number_idle_notifications_ == kIdlesBeforeMarkSweep) {
5137 last_idle_notification_gc_count_ = gc_count_;
5139 }
else if (number_idle_notifications_ == kIdlesBeforeMarkCompact) {
5142 last_idle_notification_gc_count_ = gc_count_;
5143 number_idle_notifications_ = 0;
5145 }
else if (number_idle_notifications_ > kIdlesBeforeMarkCompact) {
5164 for (Space* space = spaces.next(); space !=
NULL; space = spaces.next())
5169 void Heap::ReportCodeStatistics(
const char* title) {
5170 PrintF(
">>>>>> Code Stats (%s) >>>>>>\n", title);
5171 PagedSpace::ResetCodeStatistics();
5174 code_space_->CollectCodeStatistics();
5175 lo_space_->CollectCodeStatistics();
5176 PagedSpace::ReportCodeStatistics();
5183 void Heap::ReportHeapStatistics(
const char* title) {
5185 PrintF(
">>>>>> =============== %s (%d) =============== >>>>>>\n",
5188 old_gen_promotion_limit_);
5190 old_gen_allocation_limit_);
5191 PrintF(
"old_gen_limit_factor_ %d\n", old_gen_limit_factor_);
5198 PrintF(
"Heap statistics : ");
5202 PrintF(
"Old pointer space : ");
5203 old_pointer_space_->ReportStatistics();
5204 PrintF(
"Old data space : ");
5205 old_data_space_->ReportStatistics();
5207 code_space_->ReportStatistics();
5209 map_space_->ReportStatistics();
5211 cell_space_->ReportStatistics();
5212 PrintF(
"Large object space : ");
5213 lo_space_->ReportStatistics();
5214 PrintF(
">>>>>> ========================================= >>>>>>\n");
5228 old_pointer_space_->
Contains(addr) ||
5250 return old_pointer_space_->
Contains(addr);
5252 return old_data_space_->
Contains(addr);
5254 return code_space_->
Contains(addr);
5258 return cell_space_->
Contains(addr);
5268 void Heap::Verify() {
5273 VerifyPointersVisitor visitor;
5276 new_space_.Verify();
5278 old_pointer_space_->Verify(&visitor);
5279 map_space_->Verify(&visitor);
5281 VerifyPointersVisitor no_dirty_regions_visitor;
5282 old_data_space_->Verify(&no_dirty_regions_visitor);
5283 code_space_->Verify(&no_dirty_regions_visitor);
5284 cell_space_->Verify(&no_dirty_regions_visitor);
5286 lo_space_->Verify();
5288 VerifyNoAccessorPairSharing();
5292 void Heap::VerifyNoAccessorPairSharing() {
5297 for (
int phase = 0; phase < 2; phase++) {
5299 for (HeapObject* obj = iter.Next(); obj !=
NULL; obj = iter.Next()) {
5301 DescriptorArray* descs =
Map::cast(obj)->instance_descriptors();
5302 for (
int i = 0; i < descs->number_of_descriptors(); i++) {
5304 descs->GetValue(i)->IsAccessorPair()) {
5306 uintptr_t before =
reinterpret_cast<intptr_t
>(accessors->getter());
5307 uintptr_t after = (phase == 0) ?
5310 CHECK(before != after);
5311 accessors->set_getter(reinterpret_cast<Object*>(after));
5324 { MaybeObject* maybe_new_table =
5325 symbol_table()->LookupSymbol(
string, &symbol);
5326 if (!maybe_new_table->ToObject(&new_table))
return maybe_new_table;
5339 { MaybeObject* maybe_new_table =
5340 symbol_table()->LookupAsciiSymbol(
string, &symbol);
5341 if (!maybe_new_table->ToObject(&new_table))
return maybe_new_table;
5356 { MaybeObject* maybe_new_table =
5357 symbol_table()->LookupSubStringAsciiSymbol(
string,
5361 if (!maybe_new_table->ToObject(&new_table))
return maybe_new_table;
5374 { MaybeObject* maybe_new_table =
5375 symbol_table()->LookupTwoByteSymbol(
string, &symbol);
5376 if (!maybe_new_table->ToObject(&new_table))
return maybe_new_table;
5387 if (string->IsSymbol())
return string;
5390 { MaybeObject* maybe_new_table =
5391 symbol_table()->LookupString(
string, &symbol);
5392 if (!maybe_new_table->ToObject(&new_table))
return maybe_new_table;
5403 if (string->IsSymbol()) {
5407 return symbol_table()->LookupSymbolIfExists(
string, symbol);
5412 void Heap::ZapFromSpace() {
5415 while (it.has_next()) {
5438 bool record_slots =
false;
5444 while (slot_address < end) {
5445 Object** slot =
reinterpret_cast<Object**
>(slot_address);
5451 if (object->IsHeapObject()) {
5453 callback(reinterpret_cast<HeapObject**>(slot),
5455 Object* new_object = *slot;
5460 reinterpret_cast<Address>(slot));
5462 SLOW_ASSERT(!MarkCompactCollector::IsOnEvacuationCandidate(new_object));
5463 }
else if (record_slots &&
5464 MarkCompactCollector::IsOnEvacuationCandidate(
object)) {
5474 typedef bool (*CheckStoreBufferFilter)(
Object** addr);
5477 bool IsAMapPointerAddress(
Object** addr) {
5478 uintptr_t a =
reinterpret_cast<uintptr_t
>(addr);
5485 bool EverythingsAPointer(
Object** addr) {
5490 static void CheckStoreBuffer(Heap* heap,
5493 Object**** store_buffer_position,
5494 Object*** store_buffer_top,
5495 CheckStoreBufferFilter filter,
5496 Address special_garbage_start,
5497 Address special_garbage_end) {
5498 Map* free_space_map = heap->free_space_map();
5499 for ( ; current < limit; current++) {
5503 if (o == free_space_map) {
5505 FreeSpace* free_space =
5507 int skip = free_space->Size();
5508 ASSERT(current_address + skip <= reinterpret_cast<Address>(limit));
5511 current =
reinterpret_cast<Object**
>(current_address);
5516 if (current_address == special_garbage_start &&
5517 special_garbage_end != special_garbage_start) {
5519 current =
reinterpret_cast<Object**
>(current_address);
5522 if (!(*filter)(current))
continue;
5523 ASSERT(current_address < special_garbage_start ||
5524 current_address >= special_garbage_end);
5530 if (!heap->InNewSpace(o))
continue;
5531 while (**store_buffer_position < current &&
5532 *store_buffer_position < store_buffer_top) {
5533 (*store_buffer_position)++;
5535 if (**store_buffer_position != current ||
5536 *store_buffer_position == store_buffer_top) {
5537 Object** obj_start = current;
5538 while (!(*obj_start)->IsMap()) obj_start--;
5548 void Heap::OldPointerSpaceCheckStoreBuffer() {
5550 PageIterator pages(space);
5554 while (pages.has_next()) {
5555 Page* page = pages.next();
5556 Object** current =
reinterpret_cast<Object**
>(page->area_start());
5558 Address end = page->area_end();
5564 CheckStoreBuffer(
this,
5567 &store_buffer_position,
5569 &EverythingsAPointer,
5576 void Heap::MapSpaceCheckStoreBuffer() {
5578 PageIterator pages(space);
5582 while (pages.has_next()) {
5583 Page* page = pages.next();
5584 Object** current =
reinterpret_cast<Object**
>(page->area_start());
5586 Address end = page->area_end();
5592 CheckStoreBuffer(
this,
5595 &store_buffer_position,
5597 &IsAMapPointerAddress,
5604 void Heap::LargeObjectSpaceCheckStoreBuffer() {
5605 LargeObjectIterator it(
lo_space());
5606 for (HeapObject*
object = it.Next();
object !=
NULL;
object = it.Next()) {
5610 if (object->IsFixedArray()) {
5613 Object** current =
reinterpret_cast<Object**
>(
object->address());
5615 reinterpret_cast<Object**
>(
object->address() +
object->Size());
5616 CheckStoreBuffer(
this,
5619 &store_buffer_position,
5621 &EverythingsAPointer,
5638 v->Synchronize(VisitorSynchronization::kSymbolTable);
5642 external_string_table_.
Iterate(v);
5644 v->Synchronize(VisitorSynchronization::kExternalStringsTable);
5650 v->Synchronize(VisitorSynchronization::kStrongRootList);
5652 v->VisitPointer(BitCast<Object**>(&hidden_symbol_));
5653 v->Synchronize(VisitorSynchronization::kSymbol);
5656 v->Synchronize(VisitorSynchronization::kBootstrapper);
5658 v->Synchronize(VisitorSynchronization::kTop);
5659 Relocatable::Iterate(v);
5660 v->Synchronize(VisitorSynchronization::kRelocatable);
5662 #ifdef ENABLE_DEBUGGER_SUPPORT
5663 isolate_->debug()->
Iterate(v);
5668 v->Synchronize(VisitorSynchronization::kDebug);
5670 v->Synchronize(VisitorSynchronization::kCompilationCache);
5674 v->Synchronize(VisitorSynchronization::kHandleScope);
5682 v->Synchronize(VisitorSynchronization::kBuiltins);
5697 v->Synchronize(VisitorSynchronization::kGlobalHandles);
5701 v->Synchronize(VisitorSynchronization::kThreadManager);
5723 intptr_t max_old_gen_size,
5724 intptr_t max_executable_size) {
5727 if (FLAG_stress_compaction) {
5732 if (max_semispace_size > 0) {
5735 if (FLAG_trace_gc) {
5736 PrintF(
"Max semispace size cannot be less than %dkbytes\n",
5740 max_semispace_size_ = max_semispace_size;
5749 if (max_semispace_size_ > reserved_semispace_size_) {
5750 max_semispace_size_ = reserved_semispace_size_;
5751 if (FLAG_trace_gc) {
5752 PrintF(
"Max semispace size cannot be more than %dkbytes\n",
5753 reserved_semispace_size_ >> 10);
5759 reserved_semispace_size_ = max_semispace_size_;
5762 if (max_old_gen_size > 0) max_old_generation_size_ = max_old_gen_size;
5763 if (max_executable_size > 0) {
5769 if (max_executable_size_ > max_old_generation_size_) {
5770 max_executable_size_ = max_old_generation_size_;
5777 initial_semispace_size_ =
Min(initial_semispace_size_, max_semispace_size_);
5778 external_allocation_limit_ = 10 * max_semispace_size_;
5782 max_old_generation_size_ =
Max(static_cast<intptr_t>(paged_space_count *
5784 RoundUp(max_old_generation_size_,
5793 return ConfigureHeap(static_cast<intptr_t>(FLAG_max_new_space_size / 2) *
KB,
5794 static_cast<intptr_t>(FLAG_max_old_space_size) *
MB,
5795 static_cast<intptr_t>(FLAG_max_executable_size) * MB);
5822 if (take_snapshot) {
5823 HeapIterator iterator;
5826 obj = iterator.next()) {
5846 intptr_t Heap::PromotedExternalMemorySize() {
5847 if (amount_of_external_allocated_memory_
5848 <= amount_of_external_allocated_memory_at_last_global_gc_)
return 0;
5849 return amount_of_external_allocated_memory_
5850 - amount_of_external_allocated_memory_at_last_global_gc_;
5856 static const int kMarkTag = 2;
5859 class HeapDebugUtils {
5861 explicit HeapDebugUtils(Heap* heap)
5862 : search_for_any_global_(
false),
5863 search_target_(
NULL),
5864 found_target_(
false),
5869 class MarkObjectVisitor :
public ObjectVisitor {
5871 explicit MarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
5875 for (
Object** p = start; p < end; p++) {
5876 if ((*p)->IsHeapObject())
5877 utils_->MarkObjectRecursively(p);
5881 HeapDebugUtils* utils_;
5884 void MarkObjectRecursively(
Object** p) {
5885 if (!(*p)->IsHeapObject())
return;
5891 if (!map->IsHeapObject())
return;
5893 if (found_target_)
return;
5894 object_stack_.Add(obj);
5895 if ((search_for_any_global_ && obj->IsJSGlobalObject()) ||
5896 (!search_for_any_global_ && (obj == search_target_))) {
5897 found_target_ =
true;
5904 Address map_addr = map_p->address();
5906 obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag));
5908 MarkObjectRecursively(&map);
5910 MarkObjectVisitor mark_visitor(
this);
5912 obj->IterateBody(map_p->instance_type(), obj->SizeFromMap(map_p),
5916 object_stack_.RemoveLast();
5920 class UnmarkObjectVisitor :
public ObjectVisitor {
5922 explicit UnmarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
5926 for (
Object** p = start; p < end; p++) {
5927 if ((*p)->IsHeapObject())
5928 utils_->UnmarkObjectRecursively(p);
5932 HeapDebugUtils* utils_;
5936 void UnmarkObjectRecursively(
Object** p) {
5937 if (!(*p)->IsHeapObject())
return;
5941 Object* map = obj->map();
5943 if (map->IsHeapObject())
return;
5947 map_addr -= kMarkTag;
5953 obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_p));
5955 UnmarkObjectRecursively(reinterpret_cast<Object**>(&map_p));
5957 UnmarkObjectVisitor unmark_visitor(
this);
5959 obj->IterateBody(
Map::cast(map_p)->instance_type(),
5965 void MarkRootObjectRecursively(
Object** root) {
5966 if (search_for_any_global_) {
5969 ASSERT(search_target_->IsHeapObject());
5971 found_target_ =
false;
5972 object_stack_.Clear();
5974 MarkObjectRecursively(root);
5975 UnmarkObjectRecursively(root);
5977 if (found_target_) {
5978 PrintF(
"=====================================\n");
5979 PrintF(
"==== Path to object ====\n");
5980 PrintF(
"=====================================\n\n");
5982 ASSERT(!object_stack_.is_empty());
5983 for (
int i = 0; i < object_stack_.length(); i++) {
5984 if (i > 0)
PrintF(
"\n |\n |\n V\n\n");
5985 Object* obj = object_stack_[i];
5988 PrintF(
"=====================================\n");
5993 class MarkRootVisitor:
public ObjectVisitor {
5995 explicit MarkRootVisitor(HeapDebugUtils* utils) : utils_(utils) { }
5999 for (
Object** p = start; p < end; p++) {
6000 if ((*p)->IsHeapObject())
6001 utils_->MarkRootObjectRecursively(p);
6005 HeapDebugUtils* utils_;
6008 bool search_for_any_global_;
6011 List<Object*> object_stack_;
6022 static void InitializeGCOnce() {
6023 InitializeScavengingVisitorsTables();
6030 allocation_timeout_ = FLAG_gc_interval;
6031 debug_utils_ =
new HeapDebugUtils(
this);
6046 CallOnce(&initialize_gc_once, &InitializeGCOnce);
6048 MarkMapPointersAsEncoded(
false);
6055 if (!new_space_.
SetUp(reserved_semispace_size_, max_semispace_size_)) {
6060 old_pointer_space_ =
6062 max_old_generation_size_,
6065 if (old_pointer_space_ ==
NULL)
return false;
6066 if (!old_pointer_space_->
SetUp())
return false;
6071 max_old_generation_size_,
6074 if (old_data_space_ ==
NULL)
return false;
6075 if (!old_data_space_->
SetUp())
return false;
6081 if (code_range_size_ > 0) {
6089 if (code_space_ ==
NULL)
return false;
6090 if (!code_space_->
SetUp())
return false;
6094 if (map_space_ ==
NULL)
return false;
6095 if (!map_space_->
SetUp())
return false;
6099 if (cell_space_ ==
NULL)
return false;
6100 if (!cell_space_->
SetUp())
return false;
6106 if (lo_space_ ==
NULL)
return false;
6107 if (!lo_space_->
SetUp())
return false;
6110 ASSERT(hash_seed() == 0);
6111 if (FLAG_randomize_hashes) {
6112 if (FLAG_hash_seed == 0) {
6120 if (create_heap_objects) {
6122 if (!CreateInitialMaps())
return false;
6126 if (!CreateInitialObjects())
return false;
6128 global_contexts_list_ = undefined_value();
6131 LOG(isolate_, IntPtrTEvent(
"heap-capacity",
Capacity()));
6132 LOG(isolate_, IntPtrTEvent(
"heap-available",
Available()));
6148 roots_[kStackLimitRootIndex] =
6149 reinterpret_cast<Object*
>(
6151 roots_[kRealStackLimitRootIndex] =
6152 reinterpret_cast<Object*
>(
6159 if (FLAG_verify_heap) {
6163 if (FLAG_print_cumulative_gc_stat) {
6165 PrintF(
"gc_count=%d ", gc_count_);
6166 PrintF(
"mark_sweep_count=%d ", ms_count_);
6180 if (old_pointer_space_ !=
NULL) {
6182 delete old_pointer_space_;
6183 old_pointer_space_ =
NULL;
6186 if (old_data_space_ !=
NULL) {
6188 delete old_data_space_;
6189 old_data_space_ =
NULL;
6192 if (code_space_ !=
NULL) {
6198 if (map_space_ !=
NULL) {
6204 if (cell_space_ !=
NULL) {
6210 if (lo_space_ !=
NULL) {
6222 delete debug_utils_;
6223 debug_utils_ =
NULL;
6233 space = spaces.next()) {
6234 space->ReleaseAllUnusedPages();
6241 GCPrologueCallbackPair pair(callback, gc_type);
6243 return gc_prologue_callbacks_.
Add(pair);
6249 for (
int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
6250 if (gc_prologue_callbacks_[i].callback == callback) {
6251 gc_prologue_callbacks_.
Remove(i);
6261 GCEpilogueCallbackPair pair(callback, gc_type);
6263 return gc_epilogue_callbacks_.
Add(pair);
6269 for (
int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
6270 if (gc_epilogue_callbacks_[i].callback == callback) {
6271 gc_epilogue_callbacks_.
Remove(i);
6281 class PrintHandleVisitor:
public ObjectVisitor {
6284 for (
Object** p = start; p < end; p++)
6285 PrintF(
" handle %p to %p\n",
6286 reinterpret_cast<void*>(p),
6287 reinterpret_cast<void*>(*p));
6291 void Heap::PrintHandles() {
6293 PrintHandleVisitor v;
6300 Space* AllSpaces::next() {
6301 switch (counter_++) {
6303 return HEAP->new_space();
6305 return HEAP->old_pointer_space();
6307 return HEAP->old_data_space();
6309 return HEAP->code_space();
6311 return HEAP->map_space();
6313 return HEAP->cell_space();
6315 return HEAP->lo_space();
6322 PagedSpace* PagedSpaces::next() {
6323 switch (counter_++) {
6325 return HEAP->old_pointer_space();
6327 return HEAP->old_data_space();
6329 return HEAP->code_space();
6331 return HEAP->map_space();
6333 return HEAP->cell_space();
6341 OldSpace* OldSpaces::next() {
6342 switch (counter_++) {
6344 return HEAP->old_pointer_space();
6346 return HEAP->old_data_space();
6348 return HEAP->code_space();
6365 size_func_(size_func) {
6382 if (iterator_ !=
NULL) {
6393 return CreateIterator();
6401 switch (current_space_) {
6441 MarkReachableObjects();
6445 Isolate::Current()->heap()->mark_compact_collector()->ClearMarkbits();
6450 return !mark_bit.
Get();
6459 for (
Object** p = start; p < end; p++) {
6460 if (!(*p)->IsHeapObject())
continue;
6463 if (!mark_bit.Get()) {
6465 marking_stack_.Add(obj);
6470 void TransitiveClosure() {
6471 while (!marking_stack_.is_empty()) {
6472 HeapObject* obj = marking_stack_.RemoveLast();
6478 List<HeapObject*> marking_stack_;
6481 void MarkReachableObjects() {
6482 Heap* heap = Isolate::Current()->heap();
6483 MarkingVisitor visitor;
6484 heap->IterateRoots(&visitor,
VISIT_ALL);
6485 visitor.TransitiveClosure();
6488 AssertNoAllocation no_alloc;
6492 HeapIterator::HeapIterator()
6493 : filtering_(HeapIterator::kNoFiltering),
6499 HeapIterator::HeapIterator(HeapIterator::HeapObjectsFiltering filtering)
6500 : filtering_(filtering),
6506 HeapIterator::~HeapIterator() {
6511 void HeapIterator::Init() {
6513 space_iterator_ =
new SpaceIterator;
6514 switch (filtering_) {
6515 case kFilterUnreachable:
6516 filter_ =
new UnreachableObjectsFilter;
6521 object_iterator_ = space_iterator_->next();
6525 void HeapIterator::Shutdown() {
6529 if (filtering_ != kNoFiltering) {
6534 delete space_iterator_;
6535 space_iterator_ =
NULL;
6536 object_iterator_ =
NULL;
6542 HeapObject* HeapIterator::next() {
6543 if (filter_ ==
NULL)
return NextObject();
6545 HeapObject* obj = NextObject();
6546 while (obj !=
NULL && filter_->SkipObject(obj)) obj = NextObject();
6551 HeapObject* HeapIterator::NextObject() {
6553 if (object_iterator_ ==
NULL)
return NULL;
6555 if (HeapObject* obj = object_iterator_->next_object()) {
6560 while (space_iterator_->has_next()) {
6561 object_iterator_ = space_iterator_->next();
6562 if (HeapObject* obj = object_iterator_->next_object()) {
6568 object_iterator_ =
NULL;
6573 void HeapIterator::reset() {
6580 #if defined(DEBUG) || defined(LIVE_OBJECT_LIST)
6582 Object*
const PathTracer::kAnyGlobalObject =
reinterpret_cast<Object*
>(
NULL);
6584 class PathTracer::MarkVisitor:
public ObjectVisitor {
6586 explicit MarkVisitor(PathTracer* tracer) : tracer_(tracer) {}
6589 for (
Object** p = start; !tracer_->found() && (p < end); p++) {
6590 if ((*p)->IsHeapObject())
6591 tracer_->MarkRecursively(p,
this);
6596 PathTracer* tracer_;
6600 class PathTracer::UnmarkVisitor:
public ObjectVisitor {
6602 explicit UnmarkVisitor(PathTracer* tracer) : tracer_(tracer) {}
6605 for (
Object** p = start; p < end; p++) {
6606 if ((*p)->IsHeapObject())
6607 tracer_->UnmarkRecursively(p,
this);
6612 PathTracer* tracer_;
6616 void PathTracer::VisitPointers(
Object** start,
Object** end) {
6617 bool done = ((what_to_find_ == FIND_FIRST) && found_target_);
6619 for (
Object** p = start; !done && (p < end); p++) {
6620 if ((*p)->IsHeapObject()) {
6622 done = ((what_to_find_ == FIND_FIRST) && found_target_);
6629 found_target_ =
false;
6630 object_stack_.Clear();
6634 void PathTracer::TracePathFrom(
Object** root) {
6635 ASSERT((search_target_ == kAnyGlobalObject) ||
6636 search_target_->IsHeapObject());
6637 found_target_in_trace_ =
false;
6638 object_stack_.Clear();
6640 MarkVisitor mark_visitor(
this);
6641 MarkRecursively(root, &mark_visitor);
6643 UnmarkVisitor unmark_visitor(
this);
6644 UnmarkRecursively(root, &unmark_visitor);
6650 static bool SafeIsGlobalContext(HeapObject* obj) {
6651 return obj->map() == obj->GetHeap()->raw_unchecked_global_context_map();
6655 void PathTracer::MarkRecursively(
Object** p, MarkVisitor* mark_visitor) {
6656 if (!(*p)->IsHeapObject())
return;
6660 Object* map = obj->map();
6662 if (!map->IsHeapObject())
return;
6664 if (found_target_in_trace_)
return;
6665 object_stack_.Add(obj);
6666 if (((search_target_ == kAnyGlobalObject) && obj->IsJSGlobalObject()) ||
6667 (obj == search_target_)) {
6668 found_target_in_trace_ =
true;
6669 found_target_ =
true;
6673 bool is_global_context = SafeIsGlobalContext(obj);
6678 Address map_addr = map_p->address();
6680 obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag));
6685 Object** start =
reinterpret_cast<Object**
>(obj->address() +
6687 Object** end =
reinterpret_cast<Object**
>(obj->address() +
6689 mark_visitor->VisitPointers(start, end);
6691 obj->IterateBody(map_p->instance_type(),
6692 obj->SizeFromMap(map_p),
6698 MarkRecursively(&map, mark_visitor);
6700 if (!found_target_in_trace_)
6701 object_stack_.RemoveLast();
6705 void PathTracer::UnmarkRecursively(
Object** p, UnmarkVisitor* unmark_visitor) {
6706 if (!(*p)->IsHeapObject())
return;
6710 Object* map = obj->map();
6712 if (map->IsHeapObject())
return;
6716 map_addr -= kMarkTag;
6722 obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_p));
6724 UnmarkRecursively(reinterpret_cast<Object**>(&map_p), unmark_visitor);
6726 obj->IterateBody(
Map::cast(map_p)->instance_type(),
6732 void PathTracer::ProcessResults() {
6733 if (found_target_) {
6734 PrintF(
"=====================================\n");
6735 PrintF(
"==== Path to object ====\n");
6736 PrintF(
"=====================================\n\n");
6738 ASSERT(!object_stack_.is_empty());
6739 for (
int i = 0; i < object_stack_.length(); i++) {
6740 if (i > 0)
PrintF(
"\n |\n |\n V\n\n");
6741 Object* obj = object_stack_[i];
6748 PrintF(
"=====================================\n");
6751 #endif // DEBUG || LIVE_OBJECT_LIST
6757 void Heap::TracePathToObject(
Object* target) {
6766 void Heap::TracePathToGlobal() {
6767 PathTracer
tracer(PathTracer::kAnyGlobalObject,
6768 PathTracer::FIND_ALL,
6775 static intptr_t CountTotalHolesSize() {
6776 intptr_t holes_size = 0;
6778 for (OldSpace* space = spaces.next();
6780 space = spaces.next()) {
6781 holes_size += space->Waste() + space->Available();
6787 GCTracer::GCTracer(Heap* heap,
6788 const char* gc_reason,
6789 const char* collector_reason)
6791 start_object_size_(0),
6792 start_memory_size_(0),
6795 allocated_since_last_gc_(0),
6796 spent_in_mutator_(0),
6797 promoted_objects_size_(0),
6799 gc_reason_(gc_reason),
6800 collector_reason_(collector_reason) {
6801 if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat)
return;
6803 start_object_size_ = heap_->SizeOfObjects();
6804 start_memory_size_ = heap_->isolate()->memory_allocator()->Size();
6806 for (
int i = 0; i < Scope::kNumberOfScopes; i++) {
6810 in_free_list_or_wasted_before_gc_ = CountTotalHolesSize();
6812 allocated_since_last_gc_ =
6813 heap_->SizeOfObjects() - heap_->alive_after_last_gc_;
6815 if (heap_->last_gc_end_timestamp_ > 0) {
6816 spent_in_mutator_ =
Max(start_time_ - heap_->last_gc_end_timestamp_, 0.0);
6819 steps_count_ = heap_->incremental_marking()->steps_count();
6820 steps_took_ = heap_->incremental_marking()->steps_took();
6821 longest_step_ = heap_->incremental_marking()->longest_step();
6822 steps_count_since_last_gc_ =
6823 heap_->incremental_marking()->steps_count_since_last_gc();
6824 steps_took_since_last_gc_ =
6825 heap_->incremental_marking()->steps_took_since_last_gc();
6829 GCTracer::~GCTracer() {
6831 if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat)
return;
6833 bool first_gc = (heap_->last_gc_end_timestamp_ == 0);
6835 heap_->alive_after_last_gc_ = heap_->SizeOfObjects();
6838 int time =
static_cast<int>(heap_->last_gc_end_timestamp_ - start_time_);
6841 if (FLAG_print_cumulative_gc_stat) {
6842 heap_->max_gc_pause_ =
Max(heap_->max_gc_pause_, time);
6843 heap_->max_alive_after_gc_ =
Max(heap_->max_alive_after_gc_,
6844 heap_->alive_after_last_gc_);
6846 heap_->min_in_mutator_ =
Min(heap_->min_in_mutator_,
6847 static_cast<int>(spent_in_mutator_));
6851 PrintF(
"%8.0f ms: ", heap_->isolate()->time_millis_since_init());
6853 if (!FLAG_trace_gc_nvp) {
6854 int external_time =
static_cast<int>(scopes_[Scope::EXTERNAL]);
6856 double end_memory_size_mb =
6857 static_cast<double>(heap_->isolate()->memory_allocator()->Size()) /
MB;
6859 PrintF(
"%s %.1f (%.1f) -> %.1f (%.1f) MB, ",
6861 static_cast<double>(start_object_size_) /
MB,
6862 static_cast<double>(start_memory_size_) /
MB,
6863 SizeOfHeapObjects(),
6864 end_memory_size_mb);
6866 if (external_time > 0)
PrintF(
"%d / ", external_time);
6868 if (steps_count_ > 0) {
6870 PrintF(
" (+ %d ms in %d steps since last GC)",
6871 static_cast<int>(steps_took_since_last_gc_),
6872 steps_count_since_last_gc_);
6874 PrintF(
" (+ %d ms in %d steps since start of marking, "
6875 "biggest step %f ms)",
6876 static_cast<int>(steps_took_),
6882 if (gc_reason_ !=
NULL) {
6883 PrintF(
" [%s]", gc_reason_);
6886 if (collector_reason_ !=
NULL) {
6887 PrintF(
" [%s]", collector_reason_);
6892 PrintF(
"pause=%d ", time);
6894 static_cast<int>(spent_in_mutator_));
6897 switch (collector_) {
6909 PrintF(
"external=%d ", static_cast<int>(scopes_[Scope::EXTERNAL]));
6910 PrintF(
"mark=%d ", static_cast<int>(scopes_[Scope::MC_MARK]));
6911 PrintF(
"sweep=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP]));
6912 PrintF(
"sweepns=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP_NEWSPACE]));
6913 PrintF(
"evacuate=%d ", static_cast<int>(scopes_[Scope::MC_EVACUATE_PAGES]));
6915 static_cast<int>(scopes_[Scope::MC_UPDATE_NEW_TO_NEW_POINTERS]));
6917 static_cast<int>(scopes_[Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS]));
6919 static_cast<int>(scopes_[Scope::MC_UPDATE_OLD_TO_NEW_POINTERS]));
6920 PrintF(
"compaction_ptrs=%d ",
6921 static_cast<int>(scopes_[Scope::MC_UPDATE_POINTERS_TO_EVACUATED]));
6922 PrintF(
"intracompaction_ptrs=%d ", static_cast<int>(scopes_[
6923 Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED]));
6924 PrintF(
"misc_compaction=%d ",
6925 static_cast<int>(scopes_[Scope::MC_UPDATE_MISC_POINTERS]));
6930 in_free_list_or_wasted_before_gc_);
6937 PrintF(
"stepscount=%d ", steps_count_since_last_gc_);
6938 PrintF(
"stepstook=%d ", static_cast<int>(steps_took_since_last_gc_));
6940 PrintF(
"stepscount=%d ", steps_count_);
6941 PrintF(
"stepstook=%d ", static_cast<int>(steps_took_));
6947 heap_->PrintShortHeapStatistics();
6951 const char* GCTracer::CollectorString() {
6952 switch (collector_) {
6956 return "Mark-sweep";
6958 return "Unknown GC";
6962 int KeyedLookupCache::Hash(Map* map, String*
name) {
6964 uintptr_t addr_hash =
6965 static_cast<uint32_t
>(
reinterpret_cast<uintptr_t
>(map)) >>
kMapHashShift;
6966 return static_cast<uint32_t
>((addr_hash ^ name->Hash()) &
kCapacityMask);
6971 int index = (Hash(map, name) &
kHashMask);
6973 Key& key = keys_[index + i];
6974 if ((key.map == map) && key.name->Equals(name)) {
6975 return field_offsets_[index + i];
6984 if (
HEAP->LookupSymbolIfExists(name, &symbol)) {
6985 int index = (Hash(map, symbol) &
kHashMask);
6989 Key& key = keys_[index];
6991 if (key.map == free_entry_indicator) {
6994 field_offsets_[index + i] = field_offset;
7000 for (
int i = kEntriesPerBucket - 1; i > 0; i--) {
7001 Key& key = keys_[index + i];
7002 Key& key2 = keys_[index + i - 1];
7004 field_offsets_[index + i] = field_offsets_[index + i - 1];
7008 Key& key = keys_[index];
7011 field_offsets_[index] = field_offset;
7017 for (
int index = 0; index <
kLength; index++) keys_[index].map =
NULL;
7022 for (
int index = 0; index < kLength; index++) keys_[index].array =
NULL;
7027 void Heap::GarbageCollectionGreedyCheck() {
7030 if (disallow_allocation_failure())
return;
7036 TranscendentalCache::SubCache::SubCache(Type t)
7038 isolate_(
Isolate::Current()) {
7039 uint32_t in0 = 0xffffffffu;
7040 uint32_t in1 = 0xffffffffu;
7041 for (
int i = 0; i < kCacheSize; i++) {
7042 elements_[i].in[0] = in0;
7043 elements_[i].in[1] = in1;
7044 elements_[i].output =
NULL;
7051 if (caches_[i] !=
NULL) {
7061 for (
int i = 0; i < new_space_strings_.length(); ++i) {
7062 if (new_space_strings_[i] == heap_->raw_unchecked_the_hole_value()) {
7065 if (heap_->
InNewSpace(new_space_strings_[i])) {
7066 new_space_strings_[last++] = new_space_strings_[i];
7068 old_space_strings_.Add(new_space_strings_[i]);
7071 new_space_strings_.Rewind(last);
7073 for (
int i = 0; i < old_space_strings_.length(); ++i) {
7074 if (old_space_strings_[i] == heap_->raw_unchecked_the_hole_value()) {
7078 old_space_strings_[last++] = old_space_strings_[i];
7080 old_space_strings_.Rewind(last);
7081 if (FLAG_verify_heap) {
7088 new_space_strings_.Free();
7089 old_space_strings_.Free();
7095 chunks_queued_for_free_ = chunk;
7100 if (chunks_queued_for_free_ ==
NULL)
return;
7103 for (chunk = chunks_queued_for_free_; chunk !=
NULL; chunk = next) {
7121 while (inner <= inner_last) {
7127 if (area_end < inner->address()) area_end = chunk_end;
7139 for (chunk = chunks_queued_for_free_; chunk !=
NULL; chunk = next) {
7143 chunks_queued_for_free_ =
NULL;
7148 uintptr_t p =
reinterpret_cast<uintptr_t
>(page);
7155 remembered_unmapped_pages_[remembered_unmapped_pages_index_] =
7157 remembered_unmapped_pages_index_++;
7158 remembered_unmapped_pages_index_ %= kRememberedUnmappedPages;
static int SizeOfMarkedObject(HeapObject *object)
static bool IsBlack(MarkBit mark_bit)
void set_length(int value)
virtual bool ReserveSpace(int bytes)
intptr_t OldGenPromotionLimit(intptr_t old_gen_size)
ContextSlotCache * context_slot_cache()
const uint32_t kShortcutTypeTag
void GarbageCollectionEpilogue()
static const int kPointerFieldsEndOffset
MUST_USE_RESULT MaybeObject * CopyCode(Code *code)
void set_elements_kind(ElementsKind elements_kind)
static uchar TrailSurrogate(int char_code)
static const int kMaxLength
Code * builtin(Name name)
TranscendentalCache * transcendental_cache() const
static int NumberOfHandles()
#define SLOW_ASSERT(condition)
const intptr_t kSmiTagMask
static uchar LeadSurrogate(int char_code)
const intptr_t kDoubleAlignmentMask
static const int kCodeEntryOffset
MUST_USE_RESULT MaybeObject * AllocateFixedArray(int length, PretenureFlag pretenure)
bool has_instance_prototype()
static const int kMaxAsciiCharCode
bool Contains(const T &elm) const
bool NextGCIsLikelyToBeFull()
static const int kZeroHash
MUST_USE_RESULT MaybeObject * AllocateExternalStringFromAscii(const ExternalAsciiString::Resource *resource)
MUST_USE_RESULT MaybeObject * AllocateSymbol(Vector< const char > str, int chars, uint32_t hash_field)
int inobject_properties()
void Callback(MemoryChunk *page, StoreBufferEvent event)
#define STRUCT_TABLE_ELEMENT(NAME, Name, name)
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
intptr_t * old_pointer_space_size
MUST_USE_RESULT MaybeObject * AllocateFunctionPrototype(JSFunction *function)
void set(int index, Object *value)
CompilationCache * compilation_cache()
intptr_t * cell_space_size
static const int kMapHashShift
void PrintF(const char *format,...)
void PrintStack(StringStream *accumulator)
#define ASSERT_TAG_ALIGNED(address)
bool OldGenerationPromotionLimitReached()
void set_function_with_prototype(bool value)
bool InNewSpace(Object *object)
static const int kPadStart
static String * cast(Object *obj)
MUST_USE_RESULT MaybeObject * Add(Key key, Object *value, PropertyDetails details)
static const int kArgumentsObjectSize
void SortUnchecked(const WhitenessWitness &)
void IdentifyNewSpaceWeakIndependentHandles(WeakSlotCallbackWithHeap f)
MUST_USE_RESULT MaybeObject * AllocateFunctionContext(int length, JSFunction *function)
MUST_USE_RESULT MaybeObject * Allocate(Map *map, AllocationSpace space)
MUST_USE_RESULT MaybeObject * AllocateSubString(String *buffer, int start, int end, PretenureFlag pretenure=NOT_TENURED)
void(* ObjectSlotCallback)(HeapObject **from, HeapObject *to)
HandleScopeImplementer * handle_scope_implementer()
void set_opt_count(int opt_count)
static DescriptorArray * cast(Object *obj)
static Failure * InternalError()
void IterateWeakRoots(ObjectVisitor *v, VisitMode mode)
bool SkipObject(HeapObject *object)
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes)
static int SizeOf(Map *map, HeapObject *object)
void clear_instance_descriptors()
MUST_USE_RESULT MaybeObject * ReinitializeJSGlobalProxy(JSFunction *constructor, JSGlobalProxy *global)
int unused_property_fields()
void set_length(Smi *length)
bool SetUp(const size_t requested_size)
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArrayWithMap(FixedDoubleArray *src, Map *map)
MUST_USE_RESULT MaybeObject * AllocateGlobalObject(JSFunction *constructor)
void Prepare(GCTracer *tracer)
void set_scan_on_scavenge(bool scan)
static Smi * FromInt(int value)
#define LOG(isolate, Call)
MUST_USE_RESULT MaybeObject * AllocateJSFunctionProxy(Object *handler, Object *call_trap, Object *construct_trap, Object *prototype)
void set_second(String *second, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static Object * GetObjectFromEntryAddress(Address location_of_address)
MUST_USE_RESULT MaybeObject * AllocateJSObject(JSFunction *constructor, PretenureFlag pretenure=NOT_TENURED)
void CompletelyClearInstanceofCache()
V8_DECLARE_ONCE(initialize_gc_once)
static MemoryChunk * FromAddress(Address a)
static MUST_USE_RESULT MaybeObject * Allocate(int at_least_space_for, PretenureFlag pretenure=NOT_TENURED)
void set_ic_age(int count)
void CollectAllGarbage(int flags, const char *gc_reason=NULL)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
void init_instance_descriptors()
static HeapObject * cast(Object *obj)
Map * MapForExternalArrayType(ExternalArrayType array_type)
void SetNumberStringCache(Object *number, String *str)
static const byte kArgumentMarker
MUST_USE_RESULT MaybeObject * AllocateCodeCache()
void set_pre_allocated_property_fields(int value)
void CallOnce(OnceType *once, NoArgFunction init_func)
static const byte kUndefined
static AccessorPair * cast(Object *obj)
const int kVariableSizeSentinel
static const int kAlignedSize
RootListIndex RootIndexForExternalArrayType(ExternalArrayType array_type)
MUST_USE_RESULT MaybeObject * LookupAsciiSymbol(Vector< const char > str)
static Failure * OutOfMemoryException()
static bool IsOutsideAllocatedSpace(void *pointer)
bool SetUp(intptr_t max_capacity, intptr_t capacity_executable)
bool IsAsciiRepresentation()
static ExternalTwoByteString * cast(Object *obj)
void VisitExternalResources(v8::ExternalResourceVisitor *visitor)
intptr_t OldGenAllocationLimit(intptr_t old_gen_size)
static Map * cast(Object *obj)
void set_start_position(int value)
static const int kEmptyHashField
void ResetAllocationInfo()
MUST_USE_RESULT MaybeObject * AllocateByteArray(int length, PretenureFlag pretenure)
static const byte kTheHole
static ByteArray * cast(Object *obj)
bool has_fast_object_elements()
static Object * Lookup(FixedArray *cache, String *string, String *pattern)
void set_end_position(int end_position)
void set_context(Object *context)
static FreeSpace * cast(Object *obj)
void mark_out_of_memory()
void RemoveGCPrologueCallback(GCEpilogueCallback callback)
Bootstrapper * bootstrapper()
void Set(int descriptor_number, Descriptor *desc, const WhitenessWitness &)
bool InFromSpace(Object *object)
MUST_USE_RESULT MaybeObject * Uint32ToString(uint32_t value, bool check_number_string_cache=true)
void Relocate(intptr_t delta)
PromotionQueue * promotion_queue()
void SetTop(Object ***top)
static Foreign * cast(Object *obj)
Map * SymbolMapForString(String *str)
void set_ic_with_type_info_count(int count)
intptr_t inline_allocation_limit_step()
intptr_t * code_space_size
void AddGCEpilogueCallback(GCEpilogueCallback callback, GCType gc_type_filter)
MUST_USE_RESULT MaybeObject * AllocateRawAsciiString(int length, PretenureFlag pretenure=NOT_TENURED)
Context * global_context()
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
#define ASSERT(condition)
bool InSpace(Address addr, AllocationSpace space)
void(* GCPrologueCallback)(GCType type, GCCallbackFlags flags)
v8::Handle< v8::Value > Print(const v8::Arguments &args)
static void IncrementLiveBytesFromGC(Address address, int by)
void Step(intptr_t allocated, CompletionAction action)
#define PROFILE(isolate, Call)
Object * instance_prototype()
KeyedLookupCache * keyed_lookup_cache()
static const int kReduceMemoryFootprintMask
void AddGCPrologueCallback(GCEpilogueCallback callback, GCType gc_type_filter)
MUST_USE_RESULT MaybeObject * LookupTwoByteSymbol(Vector< const uc16 > str)
void IterateStrongRoots(ObjectVisitor *v)
virtual Object * RetainAs(Object *object)
static Context * cast(Object *context)
static const int kMaxLength
const intptr_t kCodeAlignment
MUST_USE_RESULT MaybeObject * LookupSymbol(Vector< const char > str)
bool SetUp(bool create_heap_objects)
ThreadManager * thread_manager()
MUST_USE_RESULT MaybeObject * AllocateBlockContext(JSFunction *function, Context *previous, ScopeInfo *info)
int SizeFromMap(Map *map)
static const int kMaxPreAllocatedPropertyFields
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8Slow(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
intptr_t CommittedMemoryExecutable()
void set_is_undetectable()
static void Iterate(ObjectVisitor *visitor)
int GetInternalFieldCount()
void initialize_elements()
void VisitPointers(Object **start, Object **end)
#define STRING_TYPE_LIST(V)
MUST_USE_RESULT MaybeObject * CopyJSObject(JSObject *source)
void set_first(String *first, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static ExternalAsciiString * cast(Object *obj)
static const int kMaxSize
static const int kPageSize
void set_foreign_address(Address value)
void ReserveSpace(int new_space_size, int pointer_space_size, int data_space_size, int code_space_size, int map_space_size, int cell_space_size, int large_object_size)
static Code * cast(Object *obj)
virtual const uint16_t * data() const =0
MUST_USE_RESULT MaybeObject * AllocateInternalSymbol(unibrow::CharacterStream *buffer, int chars, uint32_t hash_field)
static bool IsAtEnd(Address addr)
void IterateAndMarkPointersToFromSpace(Address start, Address end, ObjectSlotCallback callback)
static PolymorphicCodeCache * cast(Object *obj)
MUST_USE_RESULT MaybeObject * AllocateJSArrayWithElements(FixedArrayBase *array_base, ElementsKind elements_kind, PretenureFlag pretenure=NOT_TENURED)
ArrayStorageAllocationMode
virtual Object * RetainAs(Object *object)=0
static Object ** RawField(HeapObject *obj, int offset)
StoreBuffer * store_buffer()
MUST_USE_RESULT MaybeObject * AllocateHeapNumber(double value, PretenureFlag pretenure)
static Smi * cast(Object *object)
void set_function_token_position(int function_token_position)
void set_global(GlobalObject *global)
#define STRING_TYPE_ELEMENT(type, size, name, camel_name)
static bool IsAscii(const char *chars, int length)
static MUST_USE_RESULT MaybeObject * InitializeIntrinsicFunctionNames(Heap *heap, Object *dictionary)
bool CollectGarbage(AllocationSpace space, GarbageCollector collector, const char *gc_reason, const char *collector_reason)
void set_closure(JSFunction *closure)
static MarkBit MarkBitFrom(Address addr)
StackGuard * stack_guard()
void set_context_exit_happened(bool context_exit_happened)
MUST_USE_RESULT MaybeObject * AllocateWithContext(JSFunction *function, Context *previous, JSObject *extension)
void Free(MemoryChunk *chunk)
static const int kStringSplitCacheSize
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
Object * InObjectPropertyAtPut(int index, Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
MUST_USE_RESULT MaybeObject * AllocateConsString(String *first, String *second)
static Struct * cast(Object *that)
static int GetBuiltinsCount()
void InitializeBody(int object_size, Object *value)
MUST_USE_RESULT MaybeObject * NumberToString(Object *number, bool check_number_string_cache=true)
static const int kMinLength
UnicodeCache * unicode_cache()
String *(* ExternalStringTableUpdaterCallback)(Heap *heap, Object **pointer)
String * GetKey(int descriptor_number)
void set_the_hole(int index)
static const int kEndMarker
bool IdleNotification(int hint)
MUST_USE_RESULT MaybeObject * AllocateStringFromAscii(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
virtual size_t length() const =0
void EnsureHeapIsIterable()
static const int kArgumentsObjectSizeStrict
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedDoubleArray(int length, PretenureFlag pretenure=NOT_TENURED)
bool PostGarbageCollectionProcessing(GarbageCollector collector)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
int(* HeapObjectCallback)(HeapObject *obj)
void set_num_literals(int value)
static const int kMaxLength
const char * IntToCString(int n, Vector< char > buffer)
void set_ic_total_count(int count)
void set_unchecked(int index, Smi *value)
void Register(StaticVisitorBase::VisitorId id, Callback callback)
intptr_t CommittedMemory()
static bool IsMarked(HeapObject *object)
void IteratePointersToNewSpace(ObjectSlotCallback callback)
#define HEAP_PROFILE(heap, call)
void RemoveGCEpilogueCallback(GCEpilogueCallback callback)
static SlicedString * cast(Object *obj)
RuntimeProfiler * runtime_profiler()
intptr_t CommittedMemory()
int pre_allocated_property_fields()
void set_expected_nof_properties(int value)
void set_instruction_size(int value)
void InitializeBody(int object_size)
virtual intptr_t SizeOfObjects()
void LowerInlineAllocationLimit(intptr_t step)
static const int kStoreBufferSize
static const uchar kMaxNonSurrogateCharCode
static bool IsValid(intptr_t value)
void set_resource(const Resource *buffer)
#define MAKE_CASE(NAME, Name, name)
void CollectAllAvailableGarbage(const char *gc_reason=NULL)
bool ConfigureHeapDefault()
void set_aliased_context_slot(int count)
ElementsKind GetElementsKind()
static const int kNoGCFlags
MUST_USE_RESULT MaybeObject * AllocateFixedArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
MemoryAllocator * memory_allocator()
MUST_USE_RESULT MaybeObject * AllocateInitialMap(JSFunction *fun)
static Oddball * cast(Object *obj)
static Address & Address_at(Address addr)
MUST_USE_RESULT MaybeObject * AllocateForeign(Address address, PretenureFlag pretenure=NOT_TENURED)
const char * DoubleToCString(double v, Vector< char > buffer)
MUST_USE_RESULT MaybeObject * AllocateModuleContext(Context *previous, ScopeInfo *scope_info)
static UnseededNumberDictionary * cast(Object *obj)
void QueueMemoryChunkForFree(MemoryChunk *chunk)
MUST_USE_RESULT MaybeObject * AllocateGlobalContext()
MUST_USE_RESULT MaybeObject * AllocateExternalArray(int length, ExternalArrayType array_type, void *external_pointer, PretenureFlag pretenure)
#define CONSTANT_SYMBOL_ELEMENT(name, contents)
intptr_t * cell_space_capacity
bool IsAligned(T value, U alignment)
intptr_t * memory_allocator_size
static SeqAsciiString * cast(Object *obj)
void set_inobject_properties(int value)
void set_hash_field(uint32_t value)
void MarkCompactPrologue()
void Iterate(ObjectVisitor *v)
GlobalHandles * global_handles()
virtual bool ReserveSpace(int bytes)
void IncrementYoungSurvivorsCounter(int survived)
~UnreachableObjectsFilter()
MUST_USE_RESULT MaybeObject * AllocatePolymorphicCodeCache()
intptr_t * code_space_capacity
void VisitPointer(Object **p)
void Update(Map *map, String *name, int field_offset)
const uint32_t kShortcutTypeMask
static Handle< Object > SetLocalPropertyIgnoreAttributes(Handle< JSObject > object, Handle< String > key, Handle< Object > value, PropertyAttributes attributes)
void set_end_position(int value)
OldSpace * old_pointer_space()
void UncommitMarkingDeque()
bool ConfigureHeap(int max_semispace_size, intptr_t max_old_gen_size, intptr_t max_executable_size)
T RoundUp(T x, intptr_t m)
intptr_t * map_space_size
static double TimeCurrentMillis()
static FixedDoubleArray * cast(Object *obj)
MUST_USE_RESULT MaybeObject * AllocateTypeFeedbackInfo()
bool IsTwoByteRepresentation()
void set_age_mark(Address mark)
void IterateAllRoots(ObjectVisitor *v)
static const int kMaxNonCodeHeapObjectSize
bool contains(Address address)
static const int kMinLength
void set_length(int value)
static const int kMakeHeapIterableMask
MUST_USE_RESULT MaybeObject * AllocateJSArrayAndStorage(ElementsKind elements_kind, int length, int capacity, ArrayStorageAllocationMode mode=DONT_INITIALIZE_ARRAY_ELEMENTS, PretenureFlag pretenure=NOT_TENURED)
void EnsureSpace(intptr_t space_needed)
void Iterate(ObjectVisitor *v)
static const int kNextFunctionLinkOffset
bool InToSpace(Object *object)
void CopyFrom(const CodeDesc &desc)
static int SizeFor(int length)
static int IterateBody(Map *map, HeapObject *obj)
void set_start_position_and_type(int value)
WriteBarrierMode GetWriteBarrierMode(const AssertNoAllocation &)
void set_resource(const Resource *buffer)
PropertyDetails GetDetails(int descriptor_number)
void GarbageCollectionPrologue()
static void Clear(FixedArray *cache)
void Iterate(ObjectVisitor *v)
void Iterate(ObjectVisitor *v)
byte * relocation_start()
LargeObjectSpace * lo_space()
static ScopeInfo * Empty()
const Address kFromSpaceZapValue
bool ToSpaceContains(Address address)
MUST_USE_RESULT MaybeObject * AllocateJSMessageObject(String *type, JSArray *arguments, int start_position, int end_position, Object *script, Object *stack_trace, Object *stack_frames)
DeoptimizerData * deoptimizer_data()
static MUST_USE_RESULT MaybeObject * Allocate(int at_least_space_for)
Callback GetVisitorById(StaticVisitorBase::VisitorId id)
MUST_USE_RESULT MaybeObject * AllocateExternalStringFromTwoByte(const ExternalTwoByteString::Resource *resource)
MUST_USE_RESULT MaybeObject * AllocatePartialMap(InstanceType instance_type, int instance_size)
MUST_USE_RESULT MaybeObject * CreateCode(const CodeDesc &desc, Code::Flags flags, Handle< Object > self_reference, bool immovable=false)
virtual bool SkipObject(HeapObject *object)=0
DescriptorLookupCache * descriptor_lookup_cache()
void set_map_no_write_barrier(Map *value)
void set_check_type(CheckType value)
static JSMessageObject * cast(Object *obj)
static const int kAbortIncrementalMarkingMask
static const int kNonWeakFieldsEndOffset
Vector< const char > CStrVector(const char *data)
static Local< Context > ToLocal(v8::internal::Handle< v8::internal::Context > obj)
intptr_t CommittedMemory()
Object * GetNumberStringCache(Object *number)
MUST_USE_RESULT MaybeObject * AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure)
static int SizeFor(int length)
void SetArea(Address area_start, Address area_end)
static const int kMaxSize
void IterateNewSpaceStrongAndDependentRoots(ObjectVisitor *v)
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
void RecordWrites(Address address, int start, int len)
void UpdateMarkingDequeAfterScavenge()
static SeqTwoByteString * cast(Object *obj)
static JSFunctionResultCache * cast(Object *obj)
void Iterate(ObjectVisitor *v)
void(* GCEpilogueCallback)(GCType type, GCCallbackFlags flags)
intptr_t get_max_alive_after_gc()
void UpdateReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
MUST_USE_RESULT MaybeObject * AllocateJSProxy(Object *handler, Object *prototype)
void ProcessWeakReferences(WeakObjectRetainer *retainer)
void ClearNormalizedMapCaches()
static const int kHeaderSize
static void VisitPointer(Heap *heap, Object **p)
MUST_USE_RESULT MaybeObject * NumberFromDouble(double value, PretenureFlag pretenure=NOT_TENURED)
bool SlowContains(Address addr)
intptr_t * old_data_space_capacity
static int SizeFor(int length)
bool is_compacting() const
MUST_USE_RESULT MaybeObject * AllocateArgumentsObject(Object *callee, int length)
intptr_t SizeExecutable()
int Lookup(Map *map, String *name)
InnerPointerToCodeCache * inner_pointer_to_code_cache()
void set_instance_type(InstanceType value)
static HeapNumber * cast(Object *obj)
static void WriteToFlat(String *source, sinkchar *sink, int from, int to)
static StringDictionary * cast(Object *obj)
void set_value(double value)
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
virtual size_t length() const =0
void IterateRoots(ObjectVisitor *v, VisitMode mode)
static const int kLengthOffset
static double nan_value()
MUST_USE_RESULT MaybeObject * ReinitializeJSReceiver(JSReceiver *object, InstanceType type, int size)
MUST_USE_RESULT MaybeObject * AllocateAccessorPair()
void set_counters(int value)
MUST_USE_RESULT MaybeObject * AllocateCatchContext(JSFunction *function, Context *previous, String *name, Object *thrown_object)
const uint32_t kFreeListZapValue
static uint32_t RandomPrivate(Isolate *isolate)
static const int kArgumentsLengthIndex
static int SizeFor(int length)
void CheckNewSpaceExpansionCriteria()
const intptr_t kObjectAlignment
INLINE(static HeapObject *EnsureDoubleAligned(Heap *heap, HeapObject *object, int size))
static NewSpacePage * FromLimit(Address address_limit)
static void Enter(Heap *heap, FixedArray *cache, String *string, String *pattern, FixedArray *array)
void RecordStats(HeapStats *stats)
MUST_USE_RESULT MaybeObject * AllocateScopeInfo(int length)
bool LookupSymbolIfExists(String *str, String **symbol)
static JSGlobalPropertyCell * cast(Object *obj)
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
IncrementalMarking * incremental_marking()
bool Contains(Address addr)
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedArray(int length)
void set_extension(Object *object)
static const int kStartMarker
void set_bit_field(byte value)
static TypeFeedbackCells * cast(Object *obj)
static int SizeFor(int length)
virtual const char * data() const =0
MUST_USE_RESULT MaybeObject * Initialize(const char *to_string, Object *to_number, byte kind)
virtual bool ReserveSpace(int bytes)
void Iterate(v8::internal::ObjectVisitor *v)
NewSpacePage * next_page() const
int number_of_descriptors()
void MemsetPointer(T **dest, U *value, int counter)
void set_owner(Space *space)
MUST_USE_RESULT MaybeObject * AllocateJSObjectFromMap(Map *map, PretenureFlag pretenure=NOT_TENURED)
ScavengeWeakObjectRetainer(Heap *heap)
void RememberUnmappedPage(Address page, bool compacted)
void NotifyOfHighPromotionRate()
static void UpdateReferencesForScavengeGC()
void Set(int index, uint16_t value)
static const int kNotFound
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
static const int kBodyOffset
MUST_USE_RESULT MaybeObject * LookupSingleCharacterStringFromCode(uint16_t code)
InstanceType instance_type()
static void CopyBlock(Address dst, Address src, int byte_size)
MUST_USE_RESULT MaybeObject * AllocateJSGlobalPropertyCell(Object *value)
static HeapObject * FromAddress(Address address)
void set_size(size_t size)
MUST_USE_RESULT MaybeObject * AllocateFixedDoubleArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
MUST_USE_RESULT MaybeObject * AllocateRawFixedArray(int length)
ScavengeVisitor(Heap *heap)
MUST_USE_RESULT MaybeObject * CopyDropTransitions(DescriptorArray::SharedMode shared_mode)
static const unsigned kMaxAsciiCharCodeU
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping true
static const int kArgumentsCalleeIndex
MUST_USE_RESULT MaybeObject * AllocateHashTable(int length, PretenureFlag pretenure=NOT_TENURED)
static FixedArray * cast(Object *obj)
static const unsigned kMaxOneByteChar
static const int kHeaderSize
void SeqAsciiStringSet(int index, uint16_t value)
void set_parent(String *parent)
UnreachableObjectsFilter()
Object * FindCodeObject(Address a)
void set_previous(Context *context)
intptr_t PromotedSpaceSizeOfObjects()
void IterateNewSpaceWeakIndependentRoots(ObjectVisitor *v)
intptr_t * old_pointer_space_capacity
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
StaticResource< Utf8Decoder > * utf8_decoder()
Object * GetCallbacksObject(int descriptor_number)
void set_instance_size(int value)
static VisitorId GetVisitorId(int instance_type, int instance_size)
void ClearJSFunctionResultCaches()
void set_compiler_hints(int value)
void RecordStats(HeapStats *stats, bool take_snapshot=false)
void set_formal_parameter_count(int value)
bool HasFastDoubleElements()
static const int kMaxLength
String * TryFlattenGetString(PretenureFlag pretenure=NOT_TENURED)
virtual ~HeapObjectsFilter()
void set_bit_field2(byte value)
void CopyFrom(VisitorDispatchTable *other)
void CreateFillerObjectAt(Address addr, int size)
static int GetLastError()
MUST_USE_RESULT MaybeObject * AllocateSharedFunctionInfo(Object *name)
bool AdvanceSweepers(int step_size)
void RegisterSpecializations()
static NormalizedMapCache * cast(Object *obj)
static const int kMaxLength
intptr_t * map_space_capacity
static int SizeFor(int body_size)
void set_stress_deopt_counter(int counter)
static intptr_t MaxVirtualMemory()
static const intptr_t kAllocatedThreshold
static const int kCapacityMask
static void ScavengeObject(HeapObject **p, HeapObject *object)
bool is_keyed_call_stub()
void set_visitor_id(int visitor_id)
bool IsSweepingComplete()
void set_length(int value)
void set_this_property_assignments_count(int value)
bool SetUp(int reserved_semispace_size_, int max_semispace_size)
void IterateBuiltins(ObjectVisitor *v)
static VisitorDispatchTable< ScavengingCallback > * GetTable()
void set_ast_node_count(int count)
intptr_t * memory_allocator_capacity
static ConsString * cast(Object *obj)
virtual intptr_t SizeOfObjects()
void set_offset(int offset)
static FixedArrayBase * cast(Object *object)
void set_flags(Flags flags)
MUST_USE_RESULT MaybeObject * AllocateStruct(InstanceType type)
void EnterDirectlyIntoStoreBuffer(Address addr)
intptr_t * old_data_space_size
MUST_USE_RESULT MaybeObject * AllocateRaw(int object_size, Executability executable)
MUST_USE_RESULT MaybeObject * AllocateJSModule()
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
MUST_USE_RESULT MaybeObject * AllocateMap(InstanceType instance_type, int instance_size, ElementsKind elements_kind=TERMINAL_FAST_ELEMENTS_KIND)
const intptr_t kDoubleAlignment
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
intptr_t MaxExecutableSize()
static const int kMaxLength
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
static const int kValueOffset
void SetNextEnumerationIndex(int value)
void set_next_chunk(MemoryChunk *next)
void PrintShortHeapStatistics()
static JSObject * cast(Object *obj)
static const int kHashMask
AllocationSpace TargetSpaceId(InstanceType type)
uint32_t RoundUpToPowerOf2(uint32_t x)
OldSpace * old_data_space()
MUST_USE_RESULT MaybeObject * AllocateRawTwoByteString(int length, PretenureFlag pretenure=NOT_TENURED)
static void AssertValidRange(Address from, Address to)
MarkCompactCollector * mark_compact_collector()
static MUST_USE_RESULT MaybeObject * Allocate(int number_of_descriptors, SharedMode shared_mode)
MUST_USE_RESULT MaybeObject * AllocateFunction(Map *function_map, SharedFunctionInfo *shared, Object *prototype, PretenureFlag pretenure=TENURED)
void UpdateNewSpaceReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
void set_initial_map(Map *value)
static const int kAlignedSize
bool CommitFromSpaceIfNeeded()
AllocationSpace identity()
void set_unused_property_fields(int value)
void UpdateSamplesAfterScavenge()
void init_prototype_transitions(Object *undefined)
static const int kIsExtensible
MUST_USE_RESULT MaybeObject * AllocateStringFromTwoByte(Vector< const uc16 > str, PretenureFlag pretenure=NOT_TENURED)
static const int kNonCodeObjectAreaSize
static const int kEntriesPerBucket
void PrepareForScavenge()
static const int kPointerFieldsBeginOffset
void EnsureFromSpaceIsCommitted()
void InitializeBody(Map *map, Object *pre_allocated_value, Object *filler_value)
MUST_USE_RESULT MaybeObject * AllocateAliasedArgumentsEntry(int slot)
MemoryChunk * next_chunk() const
static JSFunction * cast(Object *obj)