53 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
57 #if V8_TARGET_ARCH_MIPS && !V8_INTERPRETED_REGEXP
70 #if defined(V8_TARGET_ARCH_X64)
72 code_range_size_(512*
MB),
80 initial_semispace_size_(Page::kPageSize),
81 max_old_generation_size_(192*
MB),
82 max_executable_size_(max_old_generation_size_),
86 initial_semispace_size_(Page::kPageSize),
95 survived_since_last_expansion_(0),
97 always_allocate_scope_depth_(0),
98 linear_allocation_scope_depth_(0),
99 contexts_disposed_(0),
101 flush_monomorphic_ics_(
false),
102 scan_on_scavenge_pages_(0),
104 old_pointer_space_(
NULL),
105 old_data_space_(
NULL),
110 gc_state_(NOT_IN_GC),
111 gc_post_processing_depth_(0),
114 remembered_unmapped_pages_index_(0),
115 unflattened_strings_length_(0),
117 allocation_allowed_(
true),
118 allocation_timeout_(0),
119 disallow_allocation_failure_(
false),
122 new_space_high_promotion_mode_active_(
false),
123 old_gen_promotion_limit_(kMinimumPromotionLimit),
124 old_gen_allocation_limit_(kMinimumAllocationLimit),
125 old_gen_limit_factor_(1),
126 size_of_old_gen_at_last_old_space_gc_(0),
127 external_allocation_limit_(0),
128 amount_of_external_allocated_memory_(0),
129 amount_of_external_allocated_memory_at_last_global_gc_(0),
130 old_gen_exhausted_(
false),
131 store_buffer_rebuilder_(store_buffer()),
132 hidden_symbol_(
NULL),
133 global_gc_prologue_callback_(
NULL),
134 global_gc_epilogue_callback_(
NULL),
135 gc_safe_size_of_old_object_(
NULL),
136 total_regexp_code_generated_(0),
138 young_survivors_after_last_gc_(0),
139 high_survival_rate_period_length_(0),
141 previous_survival_rate_trend_(Heap::STABLE),
142 survival_rate_trend_(Heap::STABLE),
144 total_gc_time_ms_(0),
145 max_alive_after_gc_(0),
147 alive_after_last_gc_(0),
148 last_gc_end_timestamp_(0.0),
151 incremental_marking_(this),
152 number_idle_notifications_(0),
153 last_idle_notification_gc_count_(0),
154 last_idle_notification_gc_count_init_(
false),
155 mark_sweeps_since_idle_round_started_(0),
156 ms_count_at_last_idle_notification_(0),
157 gc_count_at_last_idle_gc_(0),
158 scavenges_since_last_idle_round_(kIdleScavengeThreshold),
159 promotion_queue_(this),
161 chunks_queued_for_free_(
NULL),
162 relocation_mutex_(
NULL) {
166 #if defined(V8_MAX_SEMISPACE_SIZE)
167 max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
172 if (max_virtual > 0) {
173 if (code_range_size_ > 0) {
175 code_range_size_ =
Min(code_range_size_, max_virtual >> 3);
179 memset(roots_, 0,
sizeof(roots_[0]) * kRootListLength);
180 native_contexts_list_ =
NULL;
181 mark_compact_collector_.heap_ =
this;
182 external_string_table_.heap_ =
this;
185 RememberUnmappedPage(
NULL,
false);
187 ClearObjectStats(
true);
235 return old_pointer_space_ !=
NULL &&
236 old_data_space_ !=
NULL &&
237 code_space_ !=
NULL &&
238 map_space_ !=
NULL &&
239 cell_space_ !=
NULL &&
244 int Heap::GcSafeSizeOfOldObject(
HeapObject*
object) {
248 return object->SizeFromMap(object->
map());
253 const char** reason) {
256 isolate_->
counters()->gc_compactor_caused_by_request()->Increment();
257 *reason =
"GC in old space requested";
261 if (FLAG_gc_global || (FLAG_stress_compaction && (gc_count_ & 1) != 0)) {
262 *reason =
"GC in old space forced by flags";
268 isolate_->
counters()->gc_compactor_caused_by_promoted_data()->Increment();
269 *reason =
"promotion limit reached";
274 if (old_gen_exhausted_) {
276 gc_compactor_caused_by_oldspace_exhaustion()->Increment();
277 *reason =
"old generations exhausted";
292 gc_compactor_caused_by_oldspace_exhaustion()->Increment();
293 *reason =
"scavenge might not succeed";
305 void Heap::ReportStatisticsBeforeGC() {
311 if (FLAG_heap_stats) {
312 ReportHeapStatistics(
"Before GC");
313 }
else if (FLAG_log_gc) {
328 if (!FLAG_trace_gc_verbose)
return;
333 PrintPID(
"New space, used: %6" V8_PTR_PREFIX
"d KB"
334 ", available: %6" V8_PTR_PREFIX
"d KB"
335 ", committed: %6" V8_PTR_PREFIX
"d KB\n",
339 PrintPID(
"Old pointers, used: %6" V8_PTR_PREFIX
"d KB"
340 ", available: %6" V8_PTR_PREFIX
"d KB"
341 ", committed: %6" V8_PTR_PREFIX
"d KB\n",
345 PrintPID(
"Old data space, used: %6" V8_PTR_PREFIX
"d KB"
346 ", available: %6" V8_PTR_PREFIX
"d KB"
347 ", committed: %6" V8_PTR_PREFIX
"d KB\n",
351 PrintPID(
"Code space, used: %6" V8_PTR_PREFIX
"d KB"
352 ", available: %6" V8_PTR_PREFIX
"d KB"
353 ", committed: %6" V8_PTR_PREFIX
"d KB\n",
357 PrintPID(
"Map space, used: %6" V8_PTR_PREFIX
"d KB"
358 ", available: %6" V8_PTR_PREFIX
"d KB"
359 ", committed: %6" V8_PTR_PREFIX
"d KB\n",
363 PrintPID(
"Cell space, used: %6" V8_PTR_PREFIX
"d KB"
364 ", available: %6" V8_PTR_PREFIX
"d KB"
365 ", committed: %6" V8_PTR_PREFIX
"d KB\n",
369 PrintPID(
"Large object space, used: %6" V8_PTR_PREFIX
"d KB"
370 ", available: %6" V8_PTR_PREFIX
"d KB"
371 ", committed: %6" V8_PTR_PREFIX
"d KB\n",
375 PrintPID(
"All spaces, used: %6" V8_PTR_PREFIX
"d KB"
376 ", available: %6" V8_PTR_PREFIX
"d KB"
377 ", committed: %6" V8_PTR_PREFIX
"d KB\n",
381 PrintPID(
"Total time spent in GC : %d ms\n", total_gc_time_ms_);
387 void Heap::ReportStatisticsAfterGC() {
391 if (FLAG_heap_stats) {
393 ReportHeapStatistics(
"After GC");
394 }
else if (FLAG_log_gc) {
407 unflattened_strings_length_ = 0;
410 if (FLAG_verify_heap) {
417 allow_allocation(
false);
419 if (FLAG_gc_verbose)
Print();
421 ReportStatisticsBeforeGC();
432 for (
Space* space = spaces.next(); space !=
NULL; space = spaces.next()) {
433 total += space->SizeOfObjects();
443 space = spaces.next()) {
444 space->RepairFreeListsAfterBoot();
459 if (FLAG_verify_heap) {
465 allow_allocation(
true);
466 if (FLAG_print_global_handles) isolate_->
global_handles()->Print();
467 if (FLAG_print_handles) PrintHandles();
468 if (FLAG_gc_verbose)
Print();
469 if (FLAG_code_stats) ReportCodeStatistics(
"After GC");
472 isolate_->
counters()->alive_after_last_gc()->Set(
475 isolate_->
counters()->symbol_table_capacity()->Set(
477 isolate_->
counters()->number_of_symbols()->Set(
478 symbol_table()->NumberOfElements());
481 isolate_->
counters()->external_fragmentation_total()->AddSample(
484 isolate_->
counters()->heap_fraction_map_space()->AddSample(
487 isolate_->
counters()->heap_fraction_cell_space()->AddSample(
491 isolate_->
counters()->heap_sample_total_committed()->AddSample(
493 isolate_->
counters()->heap_sample_total_used()->AddSample(
495 isolate_->
counters()->heap_sample_map_space_committed()->AddSample(
497 isolate_->
counters()->heap_sample_cell_space_committed()->AddSample(
501 #define UPDATE_COUNTERS_FOR_SPACE(space) \
502 isolate_->counters()->space##_bytes_available()->Set( \
503 static_cast<int>(space()->Available())); \
504 isolate_->counters()->space##_bytes_committed()->Set( \
505 static_cast<int>(space()->CommittedMemory())); \
506 isolate_->counters()->space##_bytes_used()->Set( \
507 static_cast<int>(space()->SizeOfObjects()));
508 #define UPDATE_FRAGMENTATION_FOR_SPACE(space) \
509 if (space()->CommittedMemory() > 0) { \
510 isolate_->counters()->external_fragmentation_##space()->AddSample( \
511 static_cast<int>(100 - \
512 (space()->SizeOfObjects() * 100.0) / space()->CommittedMemory())); \
514 #define UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(space) \
515 UPDATE_COUNTERS_FOR_SPACE(space) \
516 UPDATE_FRAGMENTATION_FOR_SPACE(space)
525 #undef UPDATE_COUNTERS_FOR_SPACE
526 #undef UPDATE_FRAGMENTATION_FOR_SPACE
527 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE
530 ReportStatisticsAfterGC();
532 #ifdef ENABLE_DEBUGGER_SUPPORT
533 isolate_->debug()->AfterGarbageCollection();
534 #endif // ENABLE_DEBUGGER_SUPPORT
542 mark_compact_collector_.
SetFlags(flags);
563 const int kMaxNumberOfAttempts = 7;
564 for (
int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
579 const char* gc_reason,
580 const char* collector_reason) {
582 VMState state(isolate_, GC);
590 allocation_timeout_ =
Max(6, FLAG_gc_interval);
594 if (FLAG_trace_incremental_marking) {
595 PrintF(
"[IncrementalMarking] Scavenge during marking.\n");
603 FLAG_incremental_marking_steps) {
605 const intptr_t kStepSizeWhenDelayedByScavenge = 1 *
MB;
609 if (FLAG_trace_incremental_marking) {
610 PrintF(
"[IncrementalMarking] Delaying MarkSweep.\n");
613 collector_reason =
"incremental marking delaying mark-sweep";
617 bool next_gc_likely_to_collect_more =
false;
623 tracer.set_gc_count(gc_count_);
626 tracer.set_collector(collector);
629 ? isolate_->
counters()->gc_scavenger()
630 : isolate_->
counters()->gc_compactor();
632 next_gc_likely_to_collect_more =
633 PerformGarbageCollection(collector, &tracer);
648 return next_gc_likely_to_collect_more;
655 PerformGarbageCollection(
SCAVENGER, &tracer);
664 class SymbolTableVerifier :
public ObjectVisitor {
668 for (
Object** p = start; p < end; p++) {
669 if ((*p)->IsHeapObject()) {
671 CHECK((*p)->IsTheHole() || (*p)->IsUndefined() || (*p)->IsSymbol());
678 static void VerifySymbolTable() {
679 SymbolTableVerifier verifier;
680 HEAP->symbol_table()->IterateElements(&verifier);
682 #endif // VERIFY_HEAP
685 static bool AbortIncrementalMarkingAndCollectGarbage(
688 const char* gc_reason =
NULL) {
690 bool result = heap->CollectGarbage(space, gc_reason);
699 bool gc_performed =
true;
701 static const int kThreshold = 20;
702 while (gc_performed && counter++ < kThreshold) {
703 gc_performed =
false;
706 if (sizes[space] != 0) {
707 MaybeObject* allocation;
709 allocation =
new_space()->AllocateRaw(sizes[space]);
717 "failed to reserve space in the new space");
719 AbortIncrementalMarkingAndCollectGarbage(
721 static_cast<AllocationSpace>(space),
722 "failed to reserve space in paged space");
730 locations_out[space] = node->
address();
760 Object* context = native_contexts_list_;
761 while (!context->IsUndefined()) {
764 Object* caches_or_undefined =
766 if (!caches_or_undefined->IsUndefined()) {
769 int length = caches->
length();
770 for (
int i = 0; i < length; i++) {
787 Object* context = native_contexts_list_;
788 while (!context->IsUndefined()) {
793 if (!cache->IsUndefined()) {
801 void Heap::UpdateSurvivalRateTrend(
int start_new_space_size) {
802 double survival_rate =
803 (
static_cast<double>(young_survivors_after_last_gc_) * 100) /
804 start_new_space_size;
806 if (survival_rate > kYoungSurvivalRateHighThreshold) {
807 high_survival_rate_period_length_++;
809 high_survival_rate_period_length_ = 0;
812 if (survival_rate < kYoungSurvivalRateLowThreshold) {
813 low_survival_rate_period_length_++;
815 low_survival_rate_period_length_ = 0;
818 double survival_rate_diff = survival_rate_ - survival_rate;
820 if (survival_rate_diff > kYoungSurvivalRateAllowedDeviation) {
821 set_survival_rate_trend(DECREASING);
822 }
else if (survival_rate_diff < -kYoungSurvivalRateAllowedDeviation) {
823 set_survival_rate_trend(INCREASING);
825 set_survival_rate_trend(STABLE);
828 survival_rate_ = survival_rate;
833 bool next_gc_likely_to_collect_more =
false;
836 PROFILE(isolate_, CodeMovingGCEvent());
840 if (FLAG_verify_heap) {
845 if (collector ==
MARK_COMPACTOR && global_gc_prologue_callback_) {
846 ASSERT(!allocation_allowed_);
847 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
848 global_gc_prologue_callback_();
854 for (
int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
855 if (gc_type & gc_prologue_callbacks_[i].gc_type) {
864 if (IsHighSurvivalRate()) {
875 bool high_survival_rate_during_scavenges = IsHighSurvivalRate() &&
876 IsStableOrIncreasingSurvivalTrend();
878 UpdateSurvivalRateTrend(start_new_space_size);
882 if (high_survival_rate_during_scavenges &&
883 IsStableOrIncreasingSurvivalTrend()) {
890 old_gen_limit_factor_ = 2;
892 old_gen_limit_factor_ = 1;
895 old_gen_promotion_limit_ =
897 old_gen_allocation_limit_ =
900 old_gen_exhausted_ =
false;
906 UpdateSurvivalRateTrend(start_new_space_size);
909 if (!new_space_high_promotion_mode_active_ &&
911 IsStableOrIncreasingSurvivalTrend() &&
912 IsHighSurvivalRate()) {
917 new_space_high_promotion_mode_active_ =
true;
919 PrintPID(
"Limited new space size due to high promotion rate: %d MB\n",
922 }
else if (new_space_high_promotion_mode_active_ &&
923 IsStableOrDecreasingSurvivalTrend() &&
924 IsLowSurvivalRate()) {
928 new_space_high_promotion_mode_active_ =
false;
930 PrintPID(
"Unlimited new space size due to low promotion rate: %d MB\n",
935 if (new_space_high_promotion_mode_active_ &&
940 isolate_->
counters()->objs_since_last_young()->Set(0);
942 gc_post_processing_depth_++;
943 { DisableAssertNoAllocation allow_allocation;
944 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
945 next_gc_likely_to_collect_more =
948 gc_post_processing_depth_--;
951 Relocatable::PostGarbageCollectionProcessing();
955 amount_of_external_allocated_memory_at_last_global_gc_ =
956 amount_of_external_allocated_memory_;
960 for (
int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
961 if (gc_type & gc_epilogue_callbacks_[i].gc_type) {
962 gc_epilogue_callbacks_[i].callback(gc_type, callback_flags);
966 if (collector ==
MARK_COMPACTOR && global_gc_epilogue_callback_) {
967 ASSERT(!allocation_allowed_);
968 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
969 global_gc_epilogue_callback_();
973 if (FLAG_verify_heap) {
978 return next_gc_likely_to_collect_more;
982 void Heap::MarkCompact(GCTracer* tracer) {
984 LOG(isolate_, ResourceEvent(
"markcompact",
"begin"));
986 mark_compact_collector_.
Prepare(tracer);
989 tracer->set_full_gc_count(ms_count_);
991 MarkCompactPrologue();
995 LOG(isolate_, ResourceEvent(
"markcompact",
"end"));
999 isolate_->
counters()->objs_since_last_full()->Set(0);
1001 contexts_disposed_ = 0;
1003 flush_monomorphic_ics_ =
false;
1007 void Heap::MarkCompactPrologue() {
1020 FlushNumberStringCache();
1021 if (FLAG_cleanup_code_caches_at_gc) {
1022 polymorphic_code_cache()->set_cache(undefined_value());
1031 GcSafeFindCodeForInnerPointer(a);
1044 for (
Object** p = start; p < end; p++) ScavengePointer(p);
1048 void ScavengePointer(
Object** p) {
1052 reinterpret_cast<HeapObject*>(
object));
1062 class VerifyNonPointerSpacePointersVisitor:
public ObjectVisitor {
1065 for (
Object** current = start; current < end; current++) {
1066 if ((*current)->IsHeapObject()) {
1074 static void VerifyNonPointerSpacePointers() {
1077 VerifyNonPointerSpacePointersVisitor v;
1078 HeapObjectIterator code_it(
HEAP->code_space());
1079 for (HeapObject*
object = code_it.Next();
1080 object !=
NULL;
object = code_it.Next())
1081 object->Iterate(&v);
1085 if (!
HEAP->old_data_space()->was_swept_conservatively()) {
1086 HeapObjectIterator data_it(
HEAP->old_data_space());
1087 for (HeapObject*
object = data_it.Next();
1088 object !=
NULL;
object = data_it.Next())
1089 object->Iterate(&v);
1092 #endif // VERIFY_HEAP
1097 survived_since_last_expansion_ > new_space_.
Capacity() &&
1098 !new_space_high_promotion_mode_active_) {
1103 survived_since_last_expansion_ = 0;
1108 static bool IsUnscavengedHeapObject(
Heap* heap,
Object** p) {
1114 void Heap::ScavengeStoreBufferCallback(
1118 heap->store_buffer_rebuilder_.Callback(page, event);
1124 start_of_current_page_ =
NULL;
1125 current_page_ =
NULL;
1127 if (current_page_ !=
NULL) {
1131 store_buffer_->
SetTop(start_of_current_page_);
1132 }
else if (store_buffer_->
Top() - start_of_current_page_ >=
1133 (store_buffer_->
Limit() - store_buffer_->
Top()) >> 2) {
1138 store_buffer_->
SetTop(start_of_current_page_);
1146 start_of_current_page_ = store_buffer_->
Top();
1147 current_page_ = page;
1152 if (current_page_ ==
NULL) {
1160 ASSERT(current_page_ == page);
1163 ASSERT(start_of_current_page_ != store_buffer_->
Top());
1164 store_buffer_->
SetTop(start_of_current_page_);
1181 emergency_stack_ =
NULL;
1186 void PromotionQueue::RelocateQueueHead() {
1189 Page* p = Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
1190 intptr_t* head_start = rear_;
1191 intptr_t* head_end =
1192 Min(front_, reinterpret_cast<intptr_t*>(p->
area_end()));
1195 static_cast<int>(head_end - head_start) / kEntrySizeInWords;
1197 emergency_stack_ =
new List<Entry>(2 * entries_count);
1199 while (head_start != head_end) {
1200 int size =
static_cast<int>(*(head_start++));
1202 emergency_stack_->
Add(Entry(obj, size));
1218 if (map_word.IsForwardingAddress()) {
1219 return map_word.ToForwardingAddress();
1229 void Heap::Scavenge() {
1230 RelocationLock relocation_lock(
this);
1233 if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
1239 LOG(isolate_, ResourceEvent(
"scavenge",
"begin"));
1249 SelectScavengingVisitorsTable();
1284 ScavengeVisitor scavenge_visitor(
this);
1290 StoreBufferRebuildScope scope(
this,
1292 &ScavengeStoreBufferCallback);
1297 HeapObjectIterator cell_iterator(cell_space_);
1298 for (HeapObject* heap_object = cell_iterator.Next();
1299 heap_object !=
NULL;
1300 heap_object = cell_iterator.Next()) {
1301 if (heap_object->IsJSGlobalPropertyCell()) {
1303 Address value_address = cell->ValueAddress();
1304 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
1309 scavenge_visitor.VisitPointer(BitCast<Object**>(&native_contexts_list_));
1311 new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1313 &IsUnscavengedHeapObject);
1316 new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1319 &UpdateNewSpaceReferenceInExternalStringTableEntry);
1324 if (!FLAG_watch_ic_patching) {
1329 ScavengeWeakObjectRetainer weak_object_retainer(
this);
1332 ASSERT(new_space_front == new_space_.
top());
1344 LOG(isolate_, ResourceEvent(
"scavenge",
"end"));
1348 scavenges_since_last_idle_round_++;
1352 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
1356 if (!first_word.IsForwardingAddress()) {
1370 if (FLAG_verify_heap) {
1371 external_string_table_.Verify();
1375 if (external_string_table_.new_space_strings_.is_empty())
return;
1377 Object** start = &external_string_table_.new_space_strings_[0];
1378 Object** end = start + external_string_table_.new_space_strings_.length();
1381 for (
Object** p = start; p < end; ++p) {
1383 String* target = updater_func(
this, p);
1385 if (target ==
NULL)
continue;
1387 ASSERT(target->IsExternalString());
1395 external_string_table_.AddOldString(target);
1400 external_string_table_.ShrinkNewStrings(static_cast<int>(last - start));
1408 if (external_string_table_.old_space_strings_.length() > 0) {
1409 Object** start = &external_string_table_.old_space_strings_[0];
1410 Object** end = start + external_string_table_.old_space_strings_.length();
1411 for (
Object** p = start; p < end; ++p) *p = updater_func(
this, p);
1418 static Object* ProcessFunctionWeakReferences(
Heap* heap,
1421 bool record_slots) {
1422 Object* undefined = heap->undefined_value();
1423 Object* head = undefined;
1425 Object* candidate =
function;
1426 while (candidate != undefined) {
1430 if (retain !=
NULL) {
1431 if (head == undefined) {
1437 tail->set_next_function_link(retain);
1442 next_function, next_function, retain);
1446 candidate_function =
reinterpret_cast<JSFunction*
>(retain);
1447 tail = candidate_function;
1449 ASSERT(retain->IsUndefined() || retain->IsJSFunction());
1451 if (retain == undefined)
break;
1455 candidate = candidate_function->next_function_link();
1460 tail->set_next_function_link(undefined);
1468 Object* undefined = undefined_value();
1469 Object* head = undefined;
1471 Object* candidate = native_contexts_list_;
1481 while (candidate != undefined) {
1483 Context* candidate_context =
reinterpret_cast<Context*
>(candidate);
1485 if (retain !=
NULL) {
1486 if (head == undefined) {
1502 next_context, next_context, retain);
1506 candidate_context =
reinterpret_cast<Context*
>(retain);
1507 tail = candidate_context;
1509 if (retain == undefined)
break;
1512 Object* function_list_head =
1513 ProcessFunctionWeakReferences(
1523 Object** optimized_functions =
1527 optimized_functions, optimized_functions, function_list_head);
1539 Heap::undefined_value(),
1544 native_contexts_list_ = head;
1551 class VisitorAdapter :
public ObjectVisitor {
1554 : visitor_(visitor) {}
1555 virtual void VisitPointers(
Object** start,
Object** end) {
1556 for (
Object** p = start; p < end; p++) {
1557 if ((*p)->IsExternalString()) {
1565 } visitor_adapter(visitor);
1566 external_string_table_.
Iterate(&visitor_adapter);
1576 reinterpret_cast<HeapObject*>(
object));
1581 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
1588 while (new_space_front != new_space_.
top()) {
1601 StoreBufferRebuildScope scope(
this,
1603 &ScavengeStoreBufferCallback);
1613 ASSERT(!target->IsMap());
1615 target->address() + size,
1622 }
while (new_space_front != new_space_.
top());
1624 return new_space_front;
1631 INLINE(
static HeapObject* EnsureDoubleAligned(Heap* heap,
1635 static HeapObject* EnsureDoubleAligned(Heap* heap,
1639 heap->CreateFillerObjectAt(object->address(),
kPointerSize);
1642 heap->CreateFillerObjectAt(object->address() + size -
kPointerSize,
1663 table_.
Register(kVisitSeqAsciiString, &EvacuateSeqAsciiString);
1664 table_.
Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
1665 table_.
Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
1666 table_.
Register(kVisitByteArray, &EvacuateByteArray);
1667 table_.
Register(kVisitFixedArray, &EvacuateFixedArray);
1668 table_.
Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
1670 table_.
Register(kVisitNativeContext,
1671 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1672 template VisitSpecialized<Context::kSize>);
1675 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1676 template VisitSpecialized<ConsString::kSize>);
1678 table_.
Register(kVisitSlicedString,
1679 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1680 template VisitSpecialized<SlicedString::kSize>);
1682 table_.
Register(kVisitSharedFunctionInfo,
1683 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1684 template VisitSpecialized<SharedFunctionInfo::kSize>);
1687 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1691 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1696 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1697 template VisitSpecialized<JSFunction::kSize>);
1699 table_.
Register(kVisitJSFunction, &EvacuateJSFunction);
1704 kVisitDataObjectGeneric>();
1708 kVisitJSObjectGeneric>();
1712 kVisitStructGeneric>();
1720 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
1721 enum SizeRestriction { SMALL, UNKNOWN_SIZE };
1723 static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
1724 bool should_record =
false;
1726 should_record = FLAG_heap_stats;
1728 should_record = should_record || FLAG_log_gc;
1729 if (should_record) {
1730 if (heap->new_space()->Contains(obj)) {
1731 heap->new_space()->RecordAllocation(obj);
1733 heap->new_space()->RecordPromotion(obj);
1741 INLINE(
static void MigrateObject(Heap* heap,
1746 heap->CopyBlock(target->address(), source->address(), size);
1749 source->set_map_word(MapWord::FromForwardingAddress(target));
1753 RecordCopiedObject(heap, target);
1754 HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
1755 Isolate* isolate = heap->isolate();
1756 if (isolate->logger()->is_logging_code_events() ||
1757 CpuProfiler::is_profiling(isolate)) {
1758 if (target->IsSharedFunctionInfo()) {
1759 PROFILE(isolate, SharedFunctionInfoMoveEvent(
1760 source->address(), target->address()));
1766 if (Marking::TransferColor(source, target)) {
1773 template<ObjectContents object_contents,
1774 SizeRestriction size_restriction,
1776 static inline void EvacuateObject(Map* map,
1784 int allocation_size = object_size;
1790 Heap* heap = map->GetHeap();
1791 if (heap->ShouldBePromoted(object->address(), object_size)) {
1792 MaybeObject* maybe_result;
1794 if ((size_restriction != SMALL) &&
1796 maybe_result = heap->lo_space()->AllocateRaw(allocation_size,
1799 if (object_contents == DATA_OBJECT) {
1800 maybe_result = heap->old_data_space()->AllocateRaw(allocation_size);
1803 heap->old_pointer_space()->AllocateRaw(allocation_size);
1808 if (maybe_result->ToObject(&result)) {
1812 target = EnsureDoubleAligned(heap, target, allocation_size);
1819 MigrateObject(heap,
object, target, object_size);
1821 if (object_contents == POINTER_OBJECT) {
1823 heap->promotion_queue()->insert(
1826 heap->promotion_queue()->insert(target, object_size);
1830 heap->tracer()->increment_promoted_objects_size(object_size);
1834 MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size);
1835 heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
1836 Object* result = allocation->ToObjectUnchecked();
1840 target = EnsureDoubleAligned(heap, target, allocation_size);
1847 MigrateObject(heap,
object, target, object_size);
1852 static inline void EvacuateJSFunction(Map* map,
1854 HeapObject*
object) {
1855 ObjectEvacuationStrategy<POINTER_OBJECT>::
1856 template VisitSpecialized<JSFunction::kSize>(map, slot, object);
1858 HeapObject* target = *slot;
1868 map->GetHeap()->mark_compact_collector()->
1869 RecordCodeEntrySlot(code_entry_slot, code);
1874 static inline void EvacuateFixedArray(Map* map,
1876 HeapObject*
object) {
1878 EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(map,
1885 static inline void EvacuateFixedDoubleArray(Map* map,
1887 HeapObject*
object) {
1888 int length =
reinterpret_cast<FixedDoubleArray*
>(object)->length();
1890 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kDoubleAlignment>(
1898 static inline void EvacuateByteArray(Map* map,
1900 HeapObject*
object) {
1901 int object_size =
reinterpret_cast<ByteArray*
>(object)->ByteArraySize();
1902 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
1903 map, slot, object, object_size);
1907 static inline void EvacuateSeqAsciiString(Map* map,
1909 HeapObject*
object) {
1911 SeqAsciiStringSize(map->instance_type());
1912 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
1913 map, slot, object, object_size);
1917 static inline void EvacuateSeqTwoByteString(Map* map,
1919 HeapObject*
object) {
1921 SeqTwoByteStringSize(map->instance_type());
1922 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
1923 map, slot, object, object_size);
1927 static inline bool IsShortcutCandidate(
int type) {
1931 static inline void EvacuateShortcutCandidate(Map* map,
1933 HeapObject*
object) {
1934 ASSERT(IsShortcutCandidate(map->instance_type()));
1936 Heap* heap = map->GetHeap();
1940 heap->empty_string()) {
1946 if (!heap->InNewSpace(first)) {
1947 object->set_map_word(MapWord::FromForwardingAddress(first));
1951 MapWord first_word = first->map_word();
1952 if (first_word.IsForwardingAddress()) {
1953 HeapObject* target = first_word.ToForwardingAddress();
1956 object->set_map_word(MapWord::FromForwardingAddress(target));
1960 heap->DoScavengeObject(first->map(), slot, first);
1961 object->set_map_word(MapWord::FromForwardingAddress(*slot));
1966 EvacuateObject<POINTER_OBJECT, SMALL, kObjectAlignment>(
1967 map, slot, object, object_size);
1970 template<ObjectContents
object_contents>
1971 class ObjectEvacuationStrategy {
1973 template<
int object_size>
1974 static inline void VisitSpecialized(Map* map,
1976 HeapObject*
object) {
1977 EvacuateObject<object_contents, SMALL, kObjectAlignment>(
1978 map, slot, object, object_size);
1981 static inline void Visit(Map* map,
1983 HeapObject*
object) {
1984 int object_size = map->instance_size();
1985 EvacuateObject<object_contents, SMALL, kObjectAlignment>(
1986 map, slot, object, object_size);
1990 static VisitorDispatchTable<ScavengingCallback> table_;
1996 VisitorDispatchTable<ScavengingCallback>
1997 ScavengingVisitor<marks_handling, logging_and_profiling_mode>::table_;
2000 static void InitializeScavengingVisitorsTables() {
2010 void Heap::SelectScavengingVisitorsTable() {
2011 bool logging_and_profiling =
2013 CpuProfiler::is_profiling(
isolate()) ||
2015 isolate()->heap_profiler()->is_profiling());
2018 if (!logging_and_profiling) {
2019 scavenging_visitors_table_.
CopyFrom(
2023 scavenging_visitors_table_.
CopyFrom(
2028 if (!logging_and_profiling) {
2029 scavenging_visitors_table_.
CopyFrom(
2033 scavenging_visitors_table_.
CopyFrom(
2043 scavenging_visitors_table_.
Register(
2044 StaticVisitorBase::kVisitShortcutCandidate,
2046 StaticVisitorBase::kVisitConsString));
2052 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject*
object) {
2054 MapWord first_word =
object->map_word();
2056 Map* map = first_word.ToMap();
2057 map->GetHeap()->DoScavengeObject(map, p,
object);
2062 int instance_size) {
2064 MaybeObject* maybe_result = AllocateRawMap();
2065 if (!maybe_result->ToObject(&result))
return maybe_result;
2068 reinterpret_cast<Map*
>(result)->set_map(raw_unchecked_meta_map());
2069 reinterpret_cast<Map*
>(result)->set_instance_type(instance_type);
2070 reinterpret_cast<Map*
>(result)->set_instance_size(instance_size);
2071 reinterpret_cast<Map*
>(result)->set_visitor_id(
2073 reinterpret_cast<Map*
>(result)->set_inobject_properties(0);
2074 reinterpret_cast<Map*
>(result)->set_pre_allocated_property_fields(0);
2075 reinterpret_cast<Map*
>(result)->set_unused_property_fields(0);
2076 reinterpret_cast<Map*
>(result)->set_bit_field(0);
2077 reinterpret_cast<Map*
>(result)->set_bit_field2(0);
2080 reinterpret_cast<Map*
>(result)->set_bit_field3(bit_field3);
2089 MaybeObject* maybe_result = AllocateRawMap();
2090 if (!maybe_result->To(&result))
return maybe_result;
2092 Map* map =
reinterpret_cast<Map*
>(result);
2105 map->set_instance_descriptors(empty_descriptor_array());
2120 if (!maybe_code_cache->To(&code_cache))
return maybe_code_cache;
2136 if (!maybe_accessors->To(&accessors))
return maybe_accessors;
2147 if (!maybe_info->To(&info))
return maybe_info;
2159 if (!maybe_entry->To(&entry))
return maybe_entry;
2166 const Heap::StringTypeTable Heap::string_type_table[] = {
2167 #define STRING_TYPE_ELEMENT(type, size, name, camel_name) \
2168 {type, size, k##camel_name##MapRootIndex},
2170 #undef STRING_TYPE_ELEMENT
2174 const Heap::ConstantSymbolTable Heap::constant_symbol_table[] = {
2175 #define CONSTANT_SYMBOL_ELEMENT(name, contents) \
2176 {contents, k##name##RootIndex},
2178 #undef CONSTANT_SYMBOL_ELEMENT
2182 const Heap::StructTable Heap::struct_table[] = {
2183 #define STRUCT_TABLE_ELEMENT(NAME, Name, name) \
2184 { NAME##_TYPE, Name::kSize, k##Name##MapRootIndex },
2186 #undef STRUCT_TABLE_ELEMENT
2190 bool Heap::CreateInitialMaps() {
2193 if (!maybe_obj->ToObject(&obj))
return false;
2196 Map* new_meta_map =
reinterpret_cast<Map*
>(obj);
2197 set_meta_map(new_meta_map);
2198 new_meta_map->set_map(new_meta_map);
2200 { MaybeObject* maybe_obj =
2202 if (!maybe_obj->ToObject(&obj))
return false;
2207 if (!maybe_obj->ToObject(&obj))
return false;
2212 { MaybeObject* maybe_obj = AllocateEmptyFixedArray();
2213 if (!maybe_obj->ToObject(&obj))
return false;
2218 if (!maybe_obj->ToObject(&obj))
return false;
2224 if (!maybe_obj->ToObject(&obj))
return false;
2231 { MaybeObject* maybe_obj = AllocateEmptyFixedArray();
2232 if (!maybe_obj->ToObject(&obj))
return false;
2237 meta_map()->set_code_cache(empty_fixed_array());
2238 meta_map()->init_back_pointer(undefined_value());
2239 meta_map()->set_instance_descriptors(empty_descriptor_array());
2241 fixed_array_map()->set_code_cache(empty_fixed_array());
2242 fixed_array_map()->init_back_pointer(undefined_value());
2243 fixed_array_map()->set_instance_descriptors(empty_descriptor_array());
2245 oddball_map()->set_code_cache(empty_fixed_array());
2246 oddball_map()->init_back_pointer(undefined_value());
2247 oddball_map()->set_instance_descriptors(empty_descriptor_array());
2250 meta_map()->set_prototype(null_value());
2251 meta_map()->set_constructor(null_value());
2253 fixed_array_map()->set_prototype(null_value());
2254 fixed_array_map()->set_constructor(null_value());
2256 oddball_map()->set_prototype(null_value());
2257 oddball_map()->set_constructor(null_value());
2259 { MaybeObject* maybe_obj =
2261 if (!maybe_obj->ToObject(&obj))
return false;
2263 set_fixed_cow_array_map(
Map::cast(obj));
2264 ASSERT(fixed_array_map() != fixed_cow_array_map());
2266 { MaybeObject* maybe_obj =
2268 if (!maybe_obj->ToObject(&obj))
return false;
2273 if (!maybe_obj->ToObject(&obj))
return false;
2278 if (!maybe_obj->ToObject(&obj))
return false;
2282 for (
unsigned i = 0; i <
ARRAY_SIZE(string_type_table); i++) {
2283 const StringTypeTable& entry = string_type_table[i];
2284 { MaybeObject* maybe_obj =
AllocateMap(entry.type, entry.size);
2285 if (!maybe_obj->ToObject(&obj))
return false;
2291 if (!maybe_obj->ToObject(&obj))
return false;
2293 set_undetectable_string_map(
Map::cast(obj));
2296 { MaybeObject* maybe_obj =
2298 if (!maybe_obj->ToObject(&obj))
return false;
2300 set_undetectable_ascii_string_map(
Map::cast(obj));
2303 { MaybeObject* maybe_obj =
2305 if (!maybe_obj->ToObject(&obj))
return false;
2307 set_fixed_double_array_map(
Map::cast(obj));
2309 { MaybeObject* maybe_obj =
2311 if (!maybe_obj->ToObject(&obj))
return false;
2315 { MaybeObject* maybe_obj =
2317 if (!maybe_obj->ToObject(&obj))
return false;
2322 if (!maybe_obj->ToObject(&obj))
return false;
2326 { MaybeObject* maybe_obj =
2328 if (!maybe_obj->ToObject(&obj))
return false;
2330 set_external_pixel_array_map(
Map::cast(obj));
2334 if (!maybe_obj->ToObject(&obj))
return false;
2336 set_external_byte_array_map(
Map::cast(obj));
2340 if (!maybe_obj->ToObject(&obj))
return false;
2342 set_external_unsigned_byte_array_map(
Map::cast(obj));
2346 if (!maybe_obj->ToObject(&obj))
return false;
2348 set_external_short_array_map(
Map::cast(obj));
2352 if (!maybe_obj->ToObject(&obj))
return false;
2354 set_external_unsigned_short_array_map(
Map::cast(obj));
2358 if (!maybe_obj->ToObject(&obj))
return false;
2360 set_external_int_array_map(
Map::cast(obj));
2364 if (!maybe_obj->ToObject(&obj))
return false;
2366 set_external_unsigned_int_array_map(
Map::cast(obj));
2370 if (!maybe_obj->ToObject(&obj))
return false;
2372 set_external_float_array_map(
Map::cast(obj));
2374 { MaybeObject* maybe_obj =
2376 if (!maybe_obj->ToObject(&obj))
return false;
2378 set_non_strict_arguments_elements_map(
Map::cast(obj));
2382 if (!maybe_obj->ToObject(&obj))
return false;
2384 set_external_double_array_map(
Map::cast(obj));
2387 if (!maybe_obj->ToObject(&obj))
return false;
2393 if (!maybe_obj->ToObject(&obj))
return false;
2395 set_global_property_cell_map(
Map::cast(obj));
2398 if (!maybe_obj->ToObject(&obj))
return false;
2400 set_one_pointer_filler_map(
Map::cast(obj));
2403 if (!maybe_obj->ToObject(&obj))
return false;
2405 set_two_pointer_filler_map(
Map::cast(obj));
2407 for (
unsigned i = 0; i <
ARRAY_SIZE(struct_table); i++) {
2408 const StructTable& entry = struct_table[i];
2409 { MaybeObject* maybe_obj =
AllocateMap(entry.type, entry.size);
2410 if (!maybe_obj->ToObject(&obj))
return false;
2415 { MaybeObject* maybe_obj =
2417 if (!maybe_obj->ToObject(&obj))
return false;
2421 { MaybeObject* maybe_obj =
2423 if (!maybe_obj->ToObject(&obj))
return false;
2425 set_function_context_map(
Map::cast(obj));
2427 { MaybeObject* maybe_obj =
2429 if (!maybe_obj->ToObject(&obj))
return false;
2433 { MaybeObject* maybe_obj =
2435 if (!maybe_obj->ToObject(&obj))
return false;
2439 { MaybeObject* maybe_obj =
2441 if (!maybe_obj->ToObject(&obj))
return false;
2445 { MaybeObject* maybe_obj =
2447 if (!maybe_obj->ToObject(&obj))
return false;
2451 { MaybeObject* maybe_obj =
2453 if (!maybe_obj->ToObject(&obj))
return false;
2457 { MaybeObject* maybe_obj =
2459 if (!maybe_obj->ToObject(&obj))
return false;
2461 Map* native_context_map =
Map::cast(obj);
2462 native_context_map->set_dictionary_map(
true);
2463 native_context_map->set_visitor_id(StaticVisitorBase::kVisitNativeContext);
2464 set_native_context_map(native_context_map);
2468 if (!maybe_obj->ToObject(&obj))
return false;
2470 set_shared_function_info_map(
Map::cast(obj));
2474 if (!maybe_obj->ToObject(&obj))
return false;
2490 { MaybeObject* maybe_result =
2492 if (!maybe_result->ToObject(&result))
return maybe_result;
2511 if (!maybe_result->ToObject(&result))
return maybe_result;
2521 { MaybeObject* maybe_result = AllocateRawCell();
2522 if (!maybe_result->ToObject(&result))
return maybe_result;
2525 global_property_cell_map());
2531 MaybeObject* Heap::CreateOddball(
const char* to_string,
2536 if (!maybe_result->ToObject(&result))
return maybe_result;
2546 if (!maybe_obj->ToObject(&obj))
return false;
2554 set_neander_map(new_neander_map);
2557 if (!maybe_obj->ToObject(&obj))
return false;
2561 if (!maybe_elements->ToObject(&elements))
return false;
2571 void Heap::CreateJSEntryStub() {
2573 set_js_entry_code(*stub.GetCode());
2577 void Heap::CreateJSConstructEntryStub() {
2578 JSConstructEntryStub stub;
2579 set_js_construct_entry_code(*stub.GetCode());
2583 void Heap::CreateFixedStubs() {
2597 Heap::CreateJSEntryStub();
2598 Heap::CreateJSConstructEntryStub();
2604 CodeStub::GenerateStubsAheadOfTime();
2608 bool Heap::CreateInitialObjects() {
2613 if (!maybe_obj->ToObject(&obj))
return false;
2619 if (!maybe_obj->ToObject(&obj))
return false;
2624 if (!maybe_obj->ToObject(&obj))
return false;
2630 set_the_hole_value(reinterpret_cast<Oddball*>(
Smi::FromInt(0)));
2634 if (!maybe_obj->ToObject(&obj))
return false;
2640 { MaybeObject* maybe_obj =
2641 undefined_value()->Initialize(
"undefined",
2644 if (!maybe_obj->ToObject(&obj))
return false;
2648 { MaybeObject* maybe_obj =
2650 if (!maybe_obj->ToObject(&obj))
return false;
2653 { MaybeObject* maybe_obj = CreateOddball(
"true",
2656 if (!maybe_obj->ToObject(&obj))
return false;
2660 { MaybeObject* maybe_obj = CreateOddball(
"false",
2663 if (!maybe_obj->ToObject(&obj))
return false;
2667 { MaybeObject* maybe_obj = CreateOddball(
"hole",
2670 if (!maybe_obj->ToObject(&obj))
return false;
2674 { MaybeObject* maybe_obj = CreateOddball(
"arguments_marker",
2677 if (!maybe_obj->ToObject(&obj))
return false;
2681 { MaybeObject* maybe_obj = CreateOddball(
"no_interceptor_result_sentinel",
2684 if (!maybe_obj->ToObject(&obj))
return false;
2686 set_no_interceptor_result_sentinel(obj);
2688 { MaybeObject* maybe_obj = CreateOddball(
"termination_exception",
2691 if (!maybe_obj->ToObject(&obj))
return false;
2693 set_termination_exception(obj);
2697 if (!maybe_obj->ToObject(&obj))
return false;
2701 for (
unsigned i = 0; i <
ARRAY_SIZE(constant_symbol_table); i++) {
2702 { MaybeObject* maybe_obj =
2704 if (!maybe_obj->ToObject(&obj))
return false;
2706 roots_[constant_symbol_table[i].index] =
String::cast(obj);
2715 { MaybeObject* maybe_obj =
2717 if (!maybe_obj->ToObject(&obj))
return false;
2722 { MaybeObject* maybe_obj =
2724 if (!maybe_obj->ToObject(&obj))
return false;
2731 if (!maybe_obj->ToObject(&obj))
return false;
2739 if (!maybe_obj->ToObject(&obj))
return false;
2744 if (!maybe_obj->ToObject(&obj))
return false;
2756 if (!maybe_obj->ToObject(&obj))
return false;
2760 if (!maybe_obj->ToObject(&obj))
return false;
2764 { MaybeObject* maybe_obj = AllocateInitialNumberStringCache();
2765 if (!maybe_obj->ToObject(&obj))
return false;
2770 { MaybeObject* maybe_obj =
2772 if (!maybe_obj->ToObject(&obj))
return false;
2779 if (!maybe_obj->ToObject(&obj))
return false;
2785 if (!maybe_obj->ToObject(&obj))
return false;
2791 if (!maybe_obj->ToObject(&obj))
return false;
2796 set_last_script_id(undefined_value());
2821 ASSERT(key_pattern->IsString());
2823 cache = heap->string_split_cache();
2826 ASSERT(key_pattern->IsFixedArray());
2827 cache = heap->regexp_multiple_cache();
2830 uint32_t hash = key_string->
Hash();
2832 ~(kArrayEntriesPerCacheEntry - 1));
2833 if (cache->
get(index + kStringOffset) == key_string &&
2834 cache->
get(index + kPatternOffset) == key_pattern) {
2835 return cache->
get(index + kArrayOffset);
2839 if (cache->
get(index + kStringOffset) == key_string &&
2840 cache->
get(index + kPatternOffset) == key_pattern) {
2841 return cache->
get(index + kArrayOffset);
2853 if (!key_string->IsSymbol())
return;
2855 ASSERT(key_pattern->IsString());
2856 if (!key_pattern->IsSymbol())
return;
2857 cache = heap->string_split_cache();
2860 ASSERT(key_pattern->IsFixedArray());
2861 cache = heap->regexp_multiple_cache();
2864 uint32_t hash = key_string->
Hash();
2866 ~(kArrayEntriesPerCacheEntry - 1));
2868 cache->
set(index + kStringOffset, key_string);
2869 cache->
set(index + kPatternOffset, key_pattern);
2870 cache->
set(index + kArrayOffset, value_array);
2875 cache->
set(index2 + kStringOffset, key_string);
2876 cache->
set(index2 + kPatternOffset, key_pattern);
2877 cache->
set(index2 + kArrayOffset, value_array);
2882 cache->
set(index + kStringOffset, key_string);
2883 cache->
set(index + kPatternOffset, key_pattern);
2884 cache->
set(index + kArrayOffset, value_array);
2890 for (
int i = 0; i < value_array->
length(); i++) {
2894 if (maybe_symbol->ToObject(&symbol)) {
2895 value_array->
set(i, symbol);
2911 MaybeObject* Heap::AllocateInitialNumberStringCache() {
2912 MaybeObject* maybe_obj =
2918 int Heap::FullSizeNumberStringCacheLength() {
2922 int number_string_cache_size = max_semispace_size_ / 512;
2923 number_string_cache_size = Max(kInitialNumberStringCacheSize * 2,
2924 Min(0x4000, number_string_cache_size));
2927 return number_string_cache_size * 2;
2931 void Heap::AllocateFullSizeNumberStringCache() {
2936 MaybeObject* maybe_obj =
2939 if (maybe_obj->ToObject(&new_cache)) {
2949 void Heap::FlushNumberStringCache() {
2951 int len = number_string_cache()->length();
2952 for (
int i = 0; i < len; i++) {
2953 number_string_cache()->set_undefined(
this, i);
2958 static inline int double_get_hash(
double d) {
2959 DoubleRepresentation rep(d);
2960 return static_cast<int>(rep.bits) ^ static_cast<int>(rep.bits >> 32);
2964 static inline int smi_get_hash(Smi* smi) {
2965 return smi->value();
2971 int mask = (number_string_cache()->length() >> 1) - 1;
2972 if (number->IsSmi()) {
2973 hash = smi_get_hash(
Smi::cast(number)) & mask;
2975 hash = double_get_hash(number->
Number()) & mask;
2977 Object* key = number_string_cache()->get(hash * 2);
2978 if (key == number) {
2979 return String::cast(number_string_cache()->
get(hash * 2 + 1));
2980 }
else if (key->IsHeapNumber() &&
2981 number->IsHeapNumber() &&
2983 return String::cast(number_string_cache()->
get(hash * 2 + 1));
2985 return undefined_value();
2991 int mask = (number_string_cache()->length() >> 1) - 1;
2992 if (number->IsSmi()) {
2993 hash = smi_get_hash(
Smi::cast(number)) & mask;
2995 hash = double_get_hash(number->
Number()) & mask;
2997 if (number_string_cache()->get(hash * 2) != undefined_value() &&
2998 number_string_cache()->length() != FullSizeNumberStringCacheLength()) {
3001 AllocateFullSizeNumberStringCache();
3004 number_string_cache()->set(hash * 2, number);
3005 number_string_cache()->set(hash * 2 + 1,
string);
3010 bool check_number_string_cache) {
3011 isolate_->
counters()->number_to_string_runtime()->Increment();
3012 if (check_number_string_cache) {
3014 if (cached != undefined_value()) {
3022 if (number->IsSmi()) {
3032 if (maybe_js_string->ToObject(&js_string)) {
3035 return maybe_js_string;
3040 bool check_number_string_cache) {
3043 if (!maybe->To<
Object>(&number))
return maybe;
3055 switch (array_type) {
3057 return kExternalByteArrayMapRootIndex;
3059 return kExternalUnsignedByteArrayMapRootIndex;
3061 return kExternalShortArrayMapRootIndex;
3063 return kExternalUnsignedShortArrayMapRootIndex;
3065 return kExternalIntArrayMapRootIndex;
3067 return kExternalUnsignedIntArrayMapRootIndex;
3069 return kExternalFloatArrayMapRootIndex;
3071 return kExternalDoubleArrayMapRootIndex;
3073 return kExternalPixelArrayMapRootIndex;
3076 return kUndefinedValueRootIndex;
3092 int int_value =
FastD2I(value);
3107 MaybeObject* maybe_result =
Allocate(foreign_map(), space);
3108 if (!maybe_result->To(&result))
return maybe_result;
3120 share->set_name(name);
3122 share->set_code(illegal);
3125 Code* construct_stub =
3127 share->set_construct_stub(construct_stub);
3128 share->set_instance_class_name(Object_symbol());
3165 if (!maybe_result->ToObject(&result))
return maybe_result;
3171 message->set_type(type);
3172 message->set_arguments(arguments);
3175 message->set_script(script);
3176 message->set_stack_trace(stack_trace);
3177 message->set_stack_frames(stack_frames);
3184 static inline bool Between(uint32_t character, uint32_t from, uint32_t to) {
3186 return character - from <= to - from;
3190 MUST_USE_RESULT static inline MaybeObject* MakeOrFindTwoCharacterString(
3197 if ((!Between(c1,
'0',
'9') || !Between(c2,
'0',
'9')) &&
3198 heap->symbol_table()->LookupTwoCharsSymbolIfExists(c1, c2, &symbol)) {
3205 { MaybeObject* maybe_result = heap->AllocateRawAsciiString(2);
3206 if (!maybe_result->ToObject(&result))
return maybe_result;
3214 { MaybeObject* maybe_result = heap->AllocateRawTwoByteString(2);
3215 if (!maybe_result->ToObject(&result))
return maybe_result;
3226 int first_length = first->
length();
3227 if (first_length == 0) {
3231 int second_length = second->
length();
3232 if (second_length == 0) {
3236 int length = first_length + second_length;
3242 unsigned c1 = first->Get(0);
3243 unsigned c2 = second->Get(0);
3244 return MakeOrFindTwoCharacterString(
this, c1, c2);
3249 bool is_ascii = first_is_ascii && second_is_ascii;
3258 bool is_ascii_data_in_two_byte_string =
false;
3263 is_ascii_data_in_two_byte_string =
3265 if (is_ascii_data_in_two_byte_string) {
3266 isolate_->
counters()->string_add_runtime_ext_to_ascii()->Increment();
3279 if (!maybe_result->ToObject(&result))
return maybe_result;
3285 if (first->IsExternalString()) {
3290 for (
int i = 0; i < first_length; i++) *dest++ = src[i];
3292 if (second->IsExternalString()) {
3297 for (
int i = 0; i < second_length; i++) *dest++ = src[i];
3300 if (is_ascii_data_in_two_byte_string) {
3303 if (!maybe_result->ToObject(&result))
return maybe_result;
3309 isolate_->
counters()->string_add_runtime_ext_to_ascii()->Increment();
3315 if (!maybe_result->ToObject(&result))
return maybe_result;
3325 Map* map = (is_ascii || is_ascii_data_in_two_byte_string) ?
3326 cons_ascii_string_map() : cons_string_map();
3330 if (!maybe_result->ToObject(&result))
return maybe_result;
3348 int length = end - start;
3350 return empty_string();
3351 }
else if (length == 1) {
3353 }
else if (length == 2) {
3357 unsigned c1 = buffer->Get(start);
3358 unsigned c2 = buffer->Get(start + 1);
3359 return MakeOrFindTwoCharacterString(
this, c1, c2);
3365 if (!FLAG_string_slices ||
3374 { MaybeObject* maybe_result = is_ascii
3377 if (!maybe_result->ToObject(&result))
return maybe_result;
3395 if (FLAG_verify_heap) {
3396 buffer->StringVerify();
3408 ? sliced_ascii_string_map()
3409 : sliced_string_map();
3411 if (!maybe_result->ToObject(&result))
return maybe_result;
3418 if (buffer->IsConsString()) {
3423 }
else if (buffer->IsSlicedString()) {
3433 sliced_string->
parent()->IsExternalString());
3440 size_t length = resource->
length();
3448 Map* map = external_ascii_string_map();
3451 if (!maybe_result->ToObject(&result))
return maybe_result;
3455 external_string->
set_length(static_cast<int>(length));
3465 size_t length = resource->
length();
3473 static const size_t kAsciiCheckLengthLimit = 32;
3474 bool is_ascii = length <= kAsciiCheckLengthLimit &&
3476 Map* map = is_ascii ?
3477 external_string_with_ascii_data_map() : external_string_map();
3480 if (!maybe_result->ToObject(&result))
return maybe_result;
3484 external_string->
set_length(static_cast<int>(length));
3494 Object* value = single_character_string_cache()->get(code);
3495 if (value != undefined_value())
return value;
3498 buffer[0] =
static_cast<char>(
code);
3502 if (!maybe_result->ToObject(&result))
return maybe_result;
3503 single_character_string_cache()->set(code, result);
3509 if (!maybe_result->ToObject(&result))
return maybe_result;
3512 answer->
Set(0, code);
3529 if (!maybe_result->ToObject(&result))
return maybe_result;
3532 reinterpret_cast<ByteArray*
>(result)->set_map_no_write_barrier(
3534 reinterpret_cast<ByteArray*
>(result)->set_length(length);
3548 if (!maybe_result->ToObject(&result))
return maybe_result;
3551 reinterpret_cast<ByteArray*
>(result)->set_map_no_write_barrier(
3553 reinterpret_cast<ByteArray*
>(result)->set_length(length);
3559 if (size == 0)
return;
3574 void* external_pointer,
3581 if (!maybe_result->ToObject(&result))
return maybe_result;
3584 reinterpret_cast<ExternalArray*
>(result)->set_map_no_write_barrier(
3586 reinterpret_cast<ExternalArray*
>(result)->set_length(length);
3587 reinterpret_cast<ExternalArray*
>(result)->set_external_pointer(
3602 if (!maybe_reloc_info->To(&reloc_info))
return maybe_reloc_info;
3608 MaybeObject* maybe_result;
3613 if (force_lo_space) {
3616 maybe_result = code_space_->
AllocateRaw(obj_size);
3618 if (!maybe_result->To<
HeapObject>(&result))
return maybe_result;
3620 if (immovable && !force_lo_space &&
3627 if (!maybe_result->To<
HeapObject>(&result))
return maybe_result;
3636 code->set_relocation_info(reloc_info);
3648 if (!self_reference.
is_null()) {
3649 *(self_reference.
location()) = code;
3659 if (FLAG_verify_heap) {
3669 int obj_size = code->
Size();
3670 MaybeObject* maybe_result;
3674 maybe_result = code_space_->
AllocateRaw(obj_size);
3678 if (!maybe_result->ToObject(&result))
return maybe_result;
3683 CopyBlock(new_addr, old_addr, obj_size);
3688 new_code->
Relocate(new_addr - old_addr);
3696 Object* reloc_info_array;
3697 { MaybeObject* maybe_reloc_info_array =
3699 if (!maybe_reloc_info_array->ToObject(&reloc_info_array)) {
3700 return maybe_reloc_info_array;
3710 size_t relocation_offset =
3713 MaybeObject* maybe_result;
3717 maybe_result = code_space_->
AllocateRaw(new_obj_size);
3721 if (!maybe_result->ToObject(&result))
return maybe_result;
3727 memcpy(new_addr, old_addr, relocation_offset);
3738 new_code->
Relocate(new_addr - old_addr);
3741 if (FLAG_verify_heap) {
3757 { MaybeObject* maybe_result =
3759 if (!maybe_result->ToObject(&result))
return maybe_result;
3767 void Heap::InitializeFunction(
JSFunction*
function,
3770 ASSERT(!prototype->IsMap());
3771 function->initialize_properties();
3772 function->initialize_elements();
3773 function->set_shared(shared);
3774 function->set_code(shared->code());
3775 function->set_prototype_or_initial_map(prototype);
3776 function->set_context(undefined_value());
3777 function->set_literals_or_bindings(empty_fixed_array());
3778 function->set_next_function_link(undefined_value());
3795 if (!maybe_map->To(&new_map))
return maybe_map;
3799 if (!maybe_prototype->ToObject(&prototype))
return maybe_prototype;
3803 MaybeObject* maybe_failure =
3805 constructor_symbol(),
function,
DONT_ENUM);
3806 if (maybe_failure->IsFailure())
return maybe_failure;
3819 { MaybeObject* maybe_result =
Allocate(function_map, space);
3820 if (!maybe_result->ToObject(&result))
return maybe_result;
3832 int arguments_object_size;
3833 bool strict_mode_callee = callee->IsJSFunction() &&
3835 if (strict_mode_callee) {
3838 strict_mode_arguments_boilerplate();
3857 { MaybeObject* maybe_result =
3859 if (!maybe_result->ToObject(&result))
return maybe_result;
3874 if (!strict_mode_callee) {
3891 for (
int i = 1; i != count; i++) {
3893 if (prev_key == current_key)
return true;
3894 prev_key = current_key;
3906 int instance_size = fun->shared()->CalculateInstanceSize();
3907 int in_object_properties = fun->shared()->CalculateInObjectProperties();
3910 if (!maybe_map->To(&map))
return maybe_map;
3918 if (!maybe_prototype->To(&prototype))
return maybe_prototype;
3922 map->set_prototype(prototype);
3931 if (fun->shared()->CanGenerateInlineConstructor(prototype)) {
3932 int count = fun->shared()->this_property_assignments_count();
3933 if (count > in_object_properties) {
3935 fun->shared()->ForbidInlineConstructor();
3939 if (!maybe_descriptors->To(&descriptors))
return maybe_descriptors;
3942 for (
int i = 0; i < count; i++) {
3943 String* name = fun->shared()->GetThisPropertyAssignmentName(i);
3944 ASSERT(name->IsSymbol());
3946 descriptors->
Set(i, &field, witness);
3948 descriptors->
Sort();
3954 if (HasDuplicates(descriptors)) {
3955 fun->shared()->ForbidInlineConstructor();
3964 fun->shared()->StartInobjectSlackTracking(map);
3970 void Heap::InitializeJSObjectFromMap(
JSObject* obj,
3973 obj->set_properties(properties);
3989 if (map->constructor()->IsJSFunction() &&
3991 IsInobjectSlackTrackingInProgress()) {
3994 filler = Heap::one_pointer_filler_map();
3996 filler = Heap::undefined_value();
4020 if (!maybe_properties->ToObject(&properties))
return maybe_properties;
4028 { MaybeObject* maybe_obj =
Allocate(map, space);
4029 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
4047 if (!maybe_initial_map->ToObject(&initial_map))
return maybe_initial_map;
4050 Map::cast(initial_map)->set_constructor(constructor);
4058 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject());
4068 if (!maybe_map->To(&map))
return maybe_map;
4072 if (!maybe_module->To(&module))
return maybe_module;
4073 module->set_context(context);
4074 module->set_scope_info(scope_info);
4085 ASSERT(capacity >= length);
4089 MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure);
4091 if (!maybe_array->To(&array))
return maybe_array;
4093 if (capacity == 0) {
4095 array->set_elements(empty_fixed_array());
4100 MaybeObject* maybe_elms =
NULL;
4117 if (!maybe_elms->To(&elms))
return maybe_elms;
4119 array->set_elements(elms);
4129 MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure);
4131 if (!maybe_array->To(&array))
return maybe_array;
4133 array->set_elements(elements);
4146 if (!maybe_map_obj->To<
Map>(&map))
return maybe_map_obj;
4147 map->set_prototype(prototype);
4152 if (!maybe_result->To<
JSProxy>(&result))
return maybe_result;
4154 result->set_handler(handler);
4168 MaybeObject* maybe_map_obj =
4170 if (!maybe_map_obj->To<
Map>(&map))
return maybe_map_obj;
4171 map->set_prototype(prototype);
4178 result->set_handler(handler);
4180 result->set_call_trap(call_trap);
4181 result->set_construct_trap(construct_trap);
4189 ASSERT(map->is_dictionary_map());
4194 ASSERT(map->NextFreePropertyIndex() == 0);
4198 ASSERT(map->unused_property_fields() == 0);
4199 ASSERT(map->inobject_properties() == 0);
4208 MaybeObject* maybe_dictionary =
4210 map->NumberOfOwnDescriptors() * 2 + initial_size);
4211 if (!maybe_dictionary->To(&dictionary))
return maybe_dictionary;
4217 PropertyDetails details = descs->
GetDetails(i);
4219 PropertyDetails d = PropertyDetails(details.attributes(),
4221 details.descriptor_index());
4224 if (!maybe_value->ToObject(&value))
return maybe_value;
4226 MaybeObject* maybe_added = dictionary->
Add(descs->
GetKey(i), value, d);
4227 if (!maybe_added->To(&dictionary))
return maybe_added;
4233 if (!maybe_global->To(&global))
return maybe_global;
4235 InitializeJSObjectFromMap(global, dictionary, map);
4240 if (!maybe_map->To(&new_map))
return maybe_map;
4245 global->set_properties(dictionary);
4248 ASSERT(global->IsGlobalObject());
4260 Map* map = source->
map();
4269 { MaybeObject* maybe_clone =
4271 if (!maybe_clone->ToObject(&clone))
return maybe_clone;
4283 { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size);
4284 if (!maybe_clone->ToObject(&clone))
return maybe_clone;
4299 if (elements->length() > 0) {
4301 { MaybeObject* maybe_elem;
4302 if (elements->map() == fixed_cow_array_map()) {
4309 if (!maybe_elem->ToObject(&elem))
return maybe_elem;
4314 if (properties->length() > 0) {
4317 if (!maybe_prop->ToObject(&prop))
return maybe_prop;
4334 if (!maybe->To<
Map>(&map))
return maybe;
4337 int size_difference =
object->map()->instance_size() - map->
instance_size();
4338 ASSERT(size_difference >= 0);
4340 map->set_prototype(object->
map()->prototype());
4346 if (!maybe->ToObject(&properties))
return maybe;
4353 if (!maybe->To<
String>(&name))
return maybe;
4362 object->set_map(map);
4373 isolate()->context()->native_context());
4377 if (size_difference > 0) {
4393 ASSERT(map->instance_size() ==
object->map()->instance_size());
4394 ASSERT(map->instance_type() ==
object->map()->instance_type());
4397 int prop_size = map->unused_property_fields() - map->inobject_properties();
4400 if (!maybe_properties->ToObject(&properties))
return maybe_properties;
4414 int length =
string.length();
4419 { MaybeObject* maybe_result =
4421 if (!maybe_result->ToObject(&result))
return maybe_result;
4431 int non_ascii_start,
4435 int chars = non_ascii_start;
4438 decoder->Reset(
string.start() + non_ascii_start,
string.length() - chars);
4439 while (decoder->has_more()) {
4440 uint32_t r = decoder->GetNext();
4450 if (!maybe_result->ToObject(&result))
return maybe_result;
4455 decoder->Reset(
string.start(),
string.length());
4458 uint32_t r = decoder->GetNext();
4474 int length =
string.length();
4475 const uc16* start =
string.start();
4479 if (!maybe_result->ToObject(&result))
return maybe_result;
4483 if (!maybe_result->ToObject(&result))
return maybe_result;
4503 return external_symbol_with_ascii_data_map();
4506 return short_external_ascii_symbol_map();
4508 return short_external_symbol_with_ascii_data_map();
4509 default:
return NULL;
4516 uint32_t hash_field) {
4521 bool is_ascii =
true;
4538 map = ascii_symbol_map();
4553 if (!maybe_result->ToObject(&result))
return maybe_result;
4556 reinterpret_cast<HeapObject*
>(result)->set_map_no_write_barrier(map);
4567 uint32_t character = buffer->
GetNext();
4572 answer->
Set(i++, character);
4591 if (size > kMaxObjectSizeInNewSpace) {
4603 { MaybeObject* maybe_result =
AllocateRaw(size, space, retry_space);
4604 if (!maybe_result->ToObject(&result))
return maybe_result;
4614 if (FLAG_verify_heap) {
4637 if (size > kMaxObjectSizeInNewSpace) {
4649 { MaybeObject* maybe_result =
AllocateRaw(size, space, retry_space);
4650 if (!maybe_result->ToObject(&result))
return maybe_result;
4662 MaybeObject* Heap::AllocateJSArray(
4666 JSFunction* array_function = native_context->array_function();
4668 Object* maybe_map_array = native_context->js_array_maps();
4669 if (!maybe_map_array->IsUndefined()) {
4670 Object* maybe_transitioned_map =
4672 if (!maybe_transitioned_map->IsUndefined()) {
4673 map =
Map::cast(maybe_transitioned_map);
4681 MaybeObject* Heap::AllocateEmptyFixedArray() {
4684 { MaybeObject* maybe_result =
4686 if (!maybe_result->ToObject(&result))
return maybe_result;
4689 reinterpret_cast<FixedArray*
>(result)->set_map_no_write_barrier(
4691 reinterpret_cast<FixedArray*
>(result)->set_length(0);
4705 return size <= kMaxObjectSizeInNewSpace
4706 ? new_space_.AllocateRaw(size)
4715 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
4732 for (
int i = 0; i < len; i++) result->
set(i, src->
get(i), mode);
4742 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
4756 if (length == 0)
return empty_fixed_array();
4759 if (!maybe_result->ToObject(&result))
return maybe_result;
4780 if (space ==
NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
4802 ASSERT(heap->empty_fixed_array()->IsFixedArray());
4803 if (length == 0)
return heap->empty_fixed_array();
4808 if (!maybe_result->ToObject(&result))
return maybe_result;
4813 array->set_length(length);
4820 return AllocateFixedArrayWithFiller(
this,
4829 return AllocateFixedArrayWithFiller(
this,
4837 if (length == 0)
return empty_fixed_array();
4841 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
4844 reinterpret_cast<FixedArray*
>(obj)->set_map_no_write_barrier(
4851 MaybeObject* Heap::AllocateEmptyFixedDoubleArray() {
4854 { MaybeObject* maybe_result =
4856 if (!maybe_result->ToObject(&result))
return maybe_result;
4859 reinterpret_cast<FixedDoubleArray*
>(result)->set_map_no_write_barrier(
4860 fixed_double_array_map());
4861 reinterpret_cast<FixedDoubleArray*
>(result)->set_length(0);
4869 if (length == 0)
return empty_fixed_array();
4873 if (!maybe_obj->ToObject(&elements_object))
return maybe_obj;
4886 if (length == 0)
return empty_fixed_array();
4890 if (!maybe_obj->ToObject(&elements_object))
return maybe_obj;
4894 for (
int i = 0; i < length; ++i) {
4914 #ifndef V8_HOST_ARCH_64_BIT
4918 if (space ==
NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
4931 { MaybeObject* maybe_object =
AllocateRaw(size, space, retry_space);
4932 if (!maybe_object->To<
HeapObject>(&
object))
return maybe_object;
4935 return EnsureDoubleAligned(
this,
object, size);
4942 if (!maybe_result->ToObject(&result))
return maybe_result;
4944 reinterpret_cast<HeapObject*
>(result)->set_map_no_write_barrier(
4946 ASSERT(result->IsHashTable());
4953 { MaybeObject* maybe_result =
4955 if (!maybe_result->ToObject(&result))
return maybe_result;
4959 context->set_js_array_maps(undefined_value());
4960 ASSERT(context->IsNativeContext());
4961 ASSERT(result->IsContext());
4969 { MaybeObject* maybe_result =
4971 if (!maybe_result->ToObject(&result))
return maybe_result;
4980 ASSERT(result->IsContext());
4987 { MaybeObject* maybe_result =
4989 if (!maybe_result->ToObject(&result))
return maybe_result;
5003 if (!maybe_result->ToObject(&result))
return maybe_result;
5021 { MaybeObject* maybe_result =
5023 if (!maybe_result->ToObject(&result))
return maybe_result;
5041 if (!maybe_result->ToObject(&result))
return maybe_result;
5057 { MaybeObject* maybe_result =
5059 if (!maybe_result->ToObject(&result))
return maybe_result;
5074 if (!maybe_scope_info->To(&scope_info))
return maybe_scope_info;
5083 #define MAKE_CASE(NAME, Name, name) \
5084 case NAME##_TYPE: map = name##_map(); break;
5095 { MaybeObject* maybe_result =
Allocate(map, space);
5096 if (!maybe_result->ToObject(&result))
return maybe_result;
5110 ASSERT(IsAllocationAllowed());
5118 void Heap::AdvanceIdleIncrementalMarking(intptr_t step_size) {
5123 bool uncommit =
false;
5124 if (gc_count_at_last_idle_gc_ == gc_count_) {
5130 gc_count_at_last_idle_gc_ = gc_count_;
5142 const int kMaxHint = 1000;
5144 const int kMinHintForFullGC = 100;
5145 intptr_t size_factor =
Min(
Max(hint, 20), kMaxHint) / 4;
5149 intptr_t step_size =
5152 if (contexts_disposed_ > 0) {
5153 if (hint >= kMaxHint) {
5159 int mark_sweep_time =
Min(TimeMarkSweepWouldTakeInMs(), 1000);
5160 if (hint >= mark_sweep_time && !FLAG_expose_gc &&
5162 HistogramTimerScope scope(isolate_->
counters()->gc_context());
5164 "idle notification: contexts disposed");
5166 AdvanceIdleIncrementalMarking(step_size);
5167 contexts_disposed_ = 0;
5181 return IdleGlobalGC();
5201 if (mark_sweeps_since_idle_round_started_ >= kMaxMarkSweepsInIdleRound) {
5202 if (EnoughGarbageSinceLastIdleRound()) {
5209 int new_mark_sweeps = ms_count_ - ms_count_at_last_idle_notification_;
5210 mark_sweeps_since_idle_round_started_ += new_mark_sweeps;
5211 ms_count_at_last_idle_notification_ = ms_count_;
5213 int remaining_mark_sweeps = kMaxMarkSweepsInIdleRound -
5214 mark_sweeps_since_idle_round_started_;
5216 if (remaining_mark_sweeps <= 0) {
5227 if (remaining_mark_sweeps <= 2 && hint >= kMinHintForFullGC) {
5229 "idle notification: finalize idle round");
5235 AdvanceIdleIncrementalMarking(step_size);
5241 bool Heap::IdleGlobalGC() {
5242 static const int kIdlesBeforeScavenge = 4;
5243 static const int kIdlesBeforeMarkSweep = 7;
5244 static const int kIdlesBeforeMarkCompact = 8;
5245 static const int kMaxIdleCount = kIdlesBeforeMarkCompact + 1;
5246 static const unsigned int kGCsBetweenCleanup = 4;
5248 if (!last_idle_notification_gc_count_init_) {
5249 last_idle_notification_gc_count_ = gc_count_;
5250 last_idle_notification_gc_count_init_ =
true;
5253 bool uncommit =
true;
5254 bool finished =
false;
5260 if (gc_count_ - last_idle_notification_gc_count_ < kGCsBetweenCleanup) {
5261 number_idle_notifications_ =
5262 Min(number_idle_notifications_ + 1, kMaxIdleCount);
5264 number_idle_notifications_ = 0;
5265 last_idle_notification_gc_count_ = gc_count_;
5268 if (number_idle_notifications_ == kIdlesBeforeScavenge) {
5271 last_idle_notification_gc_count_ = gc_count_;
5272 }
else if (number_idle_notifications_ == kIdlesBeforeMarkSweep) {
5280 last_idle_notification_gc_count_ = gc_count_;
5282 }
else if (number_idle_notifications_ == kIdlesBeforeMarkCompact) {
5285 last_idle_notification_gc_count_ = gc_count_;
5286 number_idle_notifications_ = 0;
5288 }
else if (number_idle_notifications_ > kIdlesBeforeMarkCompact) {
5307 for (Space* space = spaces.next(); space !=
NULL; space = spaces.next())
5312 void Heap::ReportCodeStatistics(
const char* title) {
5313 PrintF(
">>>>>> Code Stats (%s) >>>>>>\n", title);
5314 PagedSpace::ResetCodeStatistics();
5317 code_space_->CollectCodeStatistics();
5318 lo_space_->CollectCodeStatistics();
5319 PagedSpace::ReportCodeStatistics();
5326 void Heap::ReportHeapStatistics(
const char* title) {
5328 PrintF(
">>>>>> =============== %s (%d) =============== >>>>>>\n",
5331 old_gen_promotion_limit_);
5333 old_gen_allocation_limit_);
5334 PrintF(
"old_gen_limit_factor_ %d\n", old_gen_limit_factor_);
5341 PrintF(
"Heap statistics : ");
5345 PrintF(
"Old pointer space : ");
5346 old_pointer_space_->ReportStatistics();
5347 PrintF(
"Old data space : ");
5348 old_data_space_->ReportStatistics();
5350 code_space_->ReportStatistics();
5352 map_space_->ReportStatistics();
5354 cell_space_->ReportStatistics();
5355 PrintF(
"Large object space : ");
5356 lo_space_->ReportStatistics();
5357 PrintF(
">>>>>> ========================================= >>>>>>\n");
5371 old_pointer_space_->
Contains(addr) ||
5393 return old_pointer_space_->
Contains(addr);
5395 return old_data_space_->
Contains(addr);
5397 return code_space_->
Contains(addr);
5401 return cell_space_->
Contains(addr);
5411 void Heap::Verify() {
5419 new_space_.Verify();
5421 old_pointer_space_->Verify(&visitor);
5422 map_space_->Verify(&visitor);
5425 old_data_space_->Verify(&no_dirty_regions_visitor);
5426 code_space_->Verify(&no_dirty_regions_visitor);
5427 cell_space_->Verify(&no_dirty_regions_visitor);
5429 lo_space_->Verify();
5437 { MaybeObject* maybe_new_table =
5438 symbol_table()->LookupSymbol(
string, &symbol);
5439 if (!maybe_new_table->ToObject(&new_table))
return maybe_new_table;
5452 { MaybeObject* maybe_new_table =
5453 symbol_table()->LookupAsciiSymbol(
string, &symbol);
5454 if (!maybe_new_table->ToObject(&new_table))
return maybe_new_table;
5469 { MaybeObject* maybe_new_table =
5470 symbol_table()->LookupSubStringAsciiSymbol(
string,
5474 if (!maybe_new_table->ToObject(&new_table))
return maybe_new_table;
5487 { MaybeObject* maybe_new_table =
5488 symbol_table()->LookupTwoByteSymbol(
string, &symbol);
5489 if (!maybe_new_table->ToObject(&new_table))
return maybe_new_table;
5500 if (string->IsSymbol())
return string;
5503 { MaybeObject* maybe_new_table =
5504 symbol_table()->LookupString(
string, &symbol);
5505 if (!maybe_new_table->ToObject(&new_table))
return maybe_new_table;
5516 if (string->IsSymbol()) {
5520 return symbol_table()->LookupSymbolIfExists(
string, symbol);
5526 while (it.has_next()) {
5548 bool record_slots =
false;
5554 while (slot_address < end) {
5555 Object** slot =
reinterpret_cast<Object**
>(slot_address);
5561 if (object->IsHeapObject()) {
5563 callback(reinterpret_cast<HeapObject**>(slot),
5565 Object* new_object = *slot;
5570 reinterpret_cast<Address>(slot));
5572 SLOW_ASSERT(!MarkCompactCollector::IsOnEvacuationCandidate(new_object));
5573 }
else if (record_slots &&
5574 MarkCompactCollector::IsOnEvacuationCandidate(
object)) {
5584 typedef bool (*CheckStoreBufferFilter)(
Object** addr);
5587 bool IsAMapPointerAddress(
Object** addr) {
5588 uintptr_t a =
reinterpret_cast<uintptr_t
>(addr);
5595 bool EverythingsAPointer(
Object** addr) {
5600 static void CheckStoreBuffer(Heap* heap,
5603 Object**** store_buffer_position,
5604 Object*** store_buffer_top,
5605 CheckStoreBufferFilter filter,
5606 Address special_garbage_start,
5607 Address special_garbage_end) {
5608 Map* free_space_map = heap->free_space_map();
5609 for ( ; current < limit; current++) {
5613 if (o == free_space_map) {
5615 FreeSpace* free_space =
5617 int skip = free_space->Size();
5618 ASSERT(current_address + skip <= reinterpret_cast<Address>(limit));
5621 current =
reinterpret_cast<Object**
>(current_address);
5626 if (current_address == special_garbage_start &&
5627 special_garbage_end != special_garbage_start) {
5629 current =
reinterpret_cast<Object**
>(current_address);
5632 if (!(*filter)(current))
continue;
5633 ASSERT(current_address < special_garbage_start ||
5634 current_address >= special_garbage_end);
5640 if (!heap->InNewSpace(o))
continue;
5641 while (**store_buffer_position < current &&
5642 *store_buffer_position < store_buffer_top) {
5643 (*store_buffer_position)++;
5645 if (**store_buffer_position != current ||
5646 *store_buffer_position == store_buffer_top) {
5647 Object** obj_start = current;
5648 while (!(*obj_start)->IsMap()) obj_start--;
5658 void Heap::OldPointerSpaceCheckStoreBuffer() {
5660 PageIterator pages(space);
5664 while (pages.has_next()) {
5665 Page* page = pages.next();
5666 Object** current =
reinterpret_cast<Object**
>(page->area_start());
5668 Address end = page->area_end();
5674 CheckStoreBuffer(
this,
5677 &store_buffer_position,
5679 &EverythingsAPointer,
5686 void Heap::MapSpaceCheckStoreBuffer() {
5688 PageIterator pages(space);
5692 while (pages.has_next()) {
5693 Page* page = pages.next();
5694 Object** current =
reinterpret_cast<Object**
>(page->area_start());
5696 Address end = page->area_end();
5702 CheckStoreBuffer(
this,
5705 &store_buffer_position,
5707 &IsAMapPointerAddress,
5714 void Heap::LargeObjectSpaceCheckStoreBuffer() {
5715 LargeObjectIterator it(
lo_space());
5716 for (HeapObject*
object = it.Next();
object !=
NULL;
object = it.Next()) {
5720 if (object->IsFixedArray()) {
5723 Object** current =
reinterpret_cast<Object**
>(
object->address());
5725 reinterpret_cast<Object**
>(
object->address() +
object->Size());
5726 CheckStoreBuffer(
this,
5729 &store_buffer_position,
5731 &EverythingsAPointer,
5748 v->Synchronize(VisitorSynchronization::kSymbolTable);
5752 external_string_table_.
Iterate(v);
5754 v->Synchronize(VisitorSynchronization::kExternalStringsTable);
5760 v->Synchronize(VisitorSynchronization::kStrongRootList);
5762 v->VisitPointer(BitCast<Object**>(&hidden_symbol_));
5763 v->Synchronize(VisitorSynchronization::kSymbol);
5766 v->Synchronize(VisitorSynchronization::kBootstrapper);
5768 v->Synchronize(VisitorSynchronization::kTop);
5769 Relocatable::Iterate(v);
5770 v->Synchronize(VisitorSynchronization::kRelocatable);
5772 #ifdef ENABLE_DEBUGGER_SUPPORT
5773 isolate_->debug()->
Iterate(v);
5778 v->Synchronize(VisitorSynchronization::kDebug);
5780 v->Synchronize(VisitorSynchronization::kCompilationCache);
5785 v->Synchronize(VisitorSynchronization::kHandleScope);
5793 v->Synchronize(VisitorSynchronization::kBuiltins);
5808 v->Synchronize(VisitorSynchronization::kGlobalHandles);
5812 v->Synchronize(VisitorSynchronization::kThreadManager);
5834 intptr_t max_old_gen_size,
5835 intptr_t max_executable_size) {
5838 if (FLAG_stress_compaction) {
5843 if (max_semispace_size > 0) {
5846 if (FLAG_trace_gc) {
5847 PrintPID(
"Max semispace size cannot be less than %dkbytes\n",
5851 max_semispace_size_ = max_semispace_size;
5860 if (max_semispace_size_ > reserved_semispace_size_) {
5861 max_semispace_size_ = reserved_semispace_size_;
5862 if (FLAG_trace_gc) {
5863 PrintPID(
"Max semispace size cannot be more than %dkbytes\n",
5864 reserved_semispace_size_ >> 10);
5870 reserved_semispace_size_ = max_semispace_size_;
5873 if (max_old_gen_size > 0) max_old_generation_size_ = max_old_gen_size;
5874 if (max_executable_size > 0) {
5880 if (max_executable_size_ > max_old_generation_size_) {
5881 max_executable_size_ = max_old_generation_size_;
5888 initial_semispace_size_ =
Min(initial_semispace_size_, max_semispace_size_);
5889 external_allocation_limit_ = 16 * max_semispace_size_;
5893 max_old_generation_size_ =
Max(static_cast<intptr_t>(paged_space_count *
5895 RoundUp(max_old_generation_size_,
5904 return ConfigureHeap(static_cast<intptr_t>(FLAG_max_new_space_size / 2) *
KB,
5905 static_cast<intptr_t>(FLAG_max_old_space_size) *
MB,
5906 static_cast<intptr_t>(FLAG_max_executable_size) * MB);
5933 if (take_snapshot) {
5934 HeapIterator iterator;
5937 obj = iterator.next()) {
5957 intptr_t Heap::PromotedExternalMemorySize() {
5958 if (amount_of_external_allocated_memory_
5959 <= amount_of_external_allocated_memory_at_last_global_gc_)
return 0;
5960 return amount_of_external_allocated_memory_
5961 - amount_of_external_allocated_memory_at_last_global_gc_;
5967 static const int kMarkTag = 2;
5970 class HeapDebugUtils {
5972 explicit HeapDebugUtils(Heap* heap)
5973 : search_for_any_global_(
false),
5974 search_target_(
NULL),
5975 found_target_(
false),
5980 class MarkObjectVisitor :
public ObjectVisitor {
5982 explicit MarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
5986 for (
Object** p = start; p < end; p++) {
5987 if ((*p)->IsHeapObject())
5988 utils_->MarkObjectRecursively(p);
5992 HeapDebugUtils* utils_;
5995 void MarkObjectRecursively(
Object** p) {
5996 if (!(*p)->IsHeapObject())
return;
6002 if (!map->IsHeapObject())
return;
6004 if (found_target_)
return;
6005 object_stack_.Add(obj);
6006 if ((search_for_any_global_ && obj->IsJSGlobalObject()) ||
6007 (!search_for_any_global_ && (obj == search_target_))) {
6008 found_target_ =
true;
6015 Address map_addr = map_p->address();
6017 obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag));
6019 MarkObjectRecursively(&map);
6021 MarkObjectVisitor mark_visitor(
this);
6023 obj->IterateBody(map_p->instance_type(), obj->SizeFromMap(map_p),
6027 object_stack_.RemoveLast();
6031 class UnmarkObjectVisitor :
public ObjectVisitor {
6033 explicit UnmarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
6037 for (
Object** p = start; p < end; p++) {
6038 if ((*p)->IsHeapObject())
6039 utils_->UnmarkObjectRecursively(p);
6043 HeapDebugUtils* utils_;
6047 void UnmarkObjectRecursively(
Object** p) {
6048 if (!(*p)->IsHeapObject())
return;
6052 Object* map = obj->map();
6054 if (map->IsHeapObject())
return;
6058 map_addr -= kMarkTag;
6064 obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_p));
6066 UnmarkObjectRecursively(reinterpret_cast<Object**>(&map_p));
6068 UnmarkObjectVisitor unmark_visitor(
this);
6070 obj->IterateBody(
Map::cast(map_p)->instance_type(),
6076 void MarkRootObjectRecursively(
Object** root) {
6077 if (search_for_any_global_) {
6080 ASSERT(search_target_->IsHeapObject());
6082 found_target_ =
false;
6083 object_stack_.Clear();
6085 MarkObjectRecursively(root);
6086 UnmarkObjectRecursively(root);
6088 if (found_target_) {
6089 PrintF(
"=====================================\n");
6090 PrintF(
"==== Path to object ====\n");
6091 PrintF(
"=====================================\n\n");
6093 ASSERT(!object_stack_.is_empty());
6094 for (
int i = 0; i < object_stack_.length(); i++) {
6095 if (i > 0)
PrintF(
"\n |\n |\n V\n\n");
6096 Object* obj = object_stack_[i];
6099 PrintF(
"=====================================\n");
6104 class MarkRootVisitor:
public ObjectVisitor {
6106 explicit MarkRootVisitor(HeapDebugUtils* utils) : utils_(utils) { }
6110 for (
Object** p = start; p < end; p++) {
6111 if ((*p)->IsHeapObject())
6112 utils_->MarkRootObjectRecursively(p);
6116 HeapDebugUtils* utils_;
6119 bool search_for_any_global_;
6122 List<Object*> object_stack_;
6133 static void InitializeGCOnce() {
6134 InitializeScavengingVisitorsTables();
6141 allocation_timeout_ = FLAG_gc_interval;
6142 debug_utils_ =
new HeapDebugUtils(
this);
6157 CallOnce(&initialize_gc_once, &InitializeGCOnce);
6159 MarkMapPointersAsEncoded(
false);
6166 if (!new_space_.
SetUp(reserved_semispace_size_, max_semispace_size_)) {
6171 old_pointer_space_ =
6173 max_old_generation_size_,
6176 if (old_pointer_space_ ==
NULL)
return false;
6177 if (!old_pointer_space_->
SetUp())
return false;
6182 max_old_generation_size_,
6185 if (old_data_space_ ==
NULL)
return false;
6186 if (!old_data_space_->
SetUp())
return false;
6192 if (code_range_size_ > 0) {
6200 if (code_space_ ==
NULL)
return false;
6201 if (!code_space_->
SetUp())
return false;
6205 if (map_space_ ==
NULL)
return false;
6206 if (!map_space_->
SetUp())
return false;
6210 if (cell_space_ ==
NULL)
return false;
6211 if (!cell_space_->
SetUp())
return false;
6217 if (lo_space_ ==
NULL)
return false;
6218 if (!lo_space_->
SetUp())
return false;
6221 ASSERT(hash_seed() == 0);
6222 if (FLAG_randomize_hashes) {
6223 if (FLAG_hash_seed == 0) {
6231 if (create_heap_objects) {
6233 if (!CreateInitialMaps())
return false;
6237 if (!CreateInitialObjects())
return false;
6239 native_contexts_list_ = undefined_value();
6242 LOG(isolate_, IntPtrTEvent(
"heap-capacity",
Capacity()));
6243 LOG(isolate_, IntPtrTEvent(
"heap-available",
Available()));
6247 if (FLAG_parallel_recompilation) relocation_mutex_ =
OS::CreateMutex();
6261 roots_[kStackLimitRootIndex] =
6262 reinterpret_cast<Object*
>(
6264 roots_[kRealStackLimitRootIndex] =
6265 reinterpret_cast<Object*
>(
6272 if (FLAG_verify_heap) {
6277 if (FLAG_print_cumulative_gc_stat) {
6279 PrintF(
"gc_count=%d ", gc_count_);
6280 PrintF(
"mark_sweep_count=%d ", ms_count_);
6282 PrintF(
"total_gc_time=%d ", total_gc_time_ms_);
6295 if (old_pointer_space_ !=
NULL) {
6297 delete old_pointer_space_;
6298 old_pointer_space_ =
NULL;
6301 if (old_data_space_ !=
NULL) {
6303 delete old_data_space_;
6304 old_data_space_ =
NULL;
6307 if (code_space_ !=
NULL) {
6313 if (map_space_ !=
NULL) {
6319 if (cell_space_ !=
NULL) {
6325 if (lo_space_ !=
NULL) {
6336 delete relocation_mutex_;
6339 delete debug_utils_;
6340 debug_utils_ =
NULL;
6350 space = spaces.next()) {
6351 space->ReleaseAllUnusedPages();
6358 GCPrologueCallbackPair pair(callback, gc_type);
6360 return gc_prologue_callbacks_.
Add(pair);
6366 for (
int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
6367 if (gc_prologue_callbacks_[i].callback == callback) {
6368 gc_prologue_callbacks_.
Remove(i);
6378 GCEpilogueCallbackPair pair(callback, gc_type);
6380 return gc_epilogue_callbacks_.
Add(pair);
6386 for (
int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
6387 if (gc_epilogue_callbacks_[i].callback == callback) {
6388 gc_epilogue_callbacks_.
Remove(i);
6398 class PrintHandleVisitor:
public ObjectVisitor {
6401 for (
Object** p = start; p < end; p++)
6402 PrintF(
" handle %p to %p\n",
6403 reinterpret_cast<void*>(p),
6404 reinterpret_cast<void*>(*p));
6408 void Heap::PrintHandles() {
6410 PrintHandleVisitor v;
6417 Space* AllSpaces::next() {
6418 switch (counter_++) {
6420 return HEAP->new_space();
6422 return HEAP->old_pointer_space();
6424 return HEAP->old_data_space();
6426 return HEAP->code_space();
6428 return HEAP->map_space();
6430 return HEAP->cell_space();
6432 return HEAP->lo_space();
6439 PagedSpace* PagedSpaces::next() {
6440 switch (counter_++) {
6442 return HEAP->old_pointer_space();
6444 return HEAP->old_data_space();
6446 return HEAP->code_space();
6448 return HEAP->map_space();
6450 return HEAP->cell_space();
6458 OldSpace* OldSpaces::next() {
6459 switch (counter_++) {
6461 return HEAP->old_pointer_space();
6463 return HEAP->old_data_space();
6465 return HEAP->code_space();
6482 size_func_(size_func) {
6499 if (iterator_ !=
NULL) {
6510 return CreateIterator();
6518 switch (current_space_) {
6558 MarkReachableObjects();
6562 Isolate::Current()->heap()->mark_compact_collector()->ClearMarkbits();
6567 return !mark_bit.
Get();
6576 for (
Object** p = start; p < end; p++) {
6577 if (!(*p)->IsHeapObject())
continue;
6580 if (!mark_bit.Get()) {
6582 marking_stack_.Add(obj);
6587 void TransitiveClosure() {
6588 while (!marking_stack_.is_empty()) {
6589 HeapObject* obj = marking_stack_.RemoveLast();
6595 List<HeapObject*> marking_stack_;
6598 void MarkReachableObjects() {
6599 Heap* heap = Isolate::Current()->heap();
6600 MarkingVisitor visitor;
6601 heap->IterateRoots(&visitor,
VISIT_ALL);
6602 visitor.TransitiveClosure();
6605 AssertNoAllocation no_alloc;
6609 HeapIterator::HeapIterator()
6610 : filtering_(HeapIterator::kNoFiltering),
6616 HeapIterator::HeapIterator(HeapIterator::HeapObjectsFiltering filtering)
6617 : filtering_(filtering),
6623 HeapIterator::~HeapIterator() {
6628 void HeapIterator::Init() {
6630 space_iterator_ =
new SpaceIterator;
6631 switch (filtering_) {
6632 case kFilterUnreachable:
6633 filter_ =
new UnreachableObjectsFilter;
6638 object_iterator_ = space_iterator_->next();
6642 void HeapIterator::Shutdown() {
6646 if (filtering_ != kNoFiltering) {
6651 delete space_iterator_;
6652 space_iterator_ =
NULL;
6653 object_iterator_ =
NULL;
6659 HeapObject* HeapIterator::next() {
6660 if (filter_ ==
NULL)
return NextObject();
6662 HeapObject* obj = NextObject();
6663 while (obj !=
NULL && filter_->SkipObject(obj)) obj = NextObject();
6668 HeapObject* HeapIterator::NextObject() {
6670 if (object_iterator_ ==
NULL)
return NULL;
6672 if (HeapObject* obj = object_iterator_->next_object()) {
6677 while (space_iterator_->has_next()) {
6678 object_iterator_ = space_iterator_->next();
6679 if (HeapObject* obj = object_iterator_->next_object()) {
6685 object_iterator_ =
NULL;
6690 void HeapIterator::reset() {
6697 #if defined(DEBUG) || defined(LIVE_OBJECT_LIST)
6699 Object*
const PathTracer::kAnyGlobalObject =
reinterpret_cast<Object*
>(
NULL);
6701 class PathTracer::MarkVisitor:
public ObjectVisitor {
6703 explicit MarkVisitor(PathTracer* tracer) : tracer_(tracer) {}
6706 for (
Object** p = start; !tracer_->found() && (p < end); p++) {
6707 if ((*p)->IsHeapObject())
6708 tracer_->MarkRecursively(p,
this);
6713 PathTracer* tracer_;
6717 class PathTracer::UnmarkVisitor:
public ObjectVisitor {
6719 explicit UnmarkVisitor(PathTracer* tracer) : tracer_(tracer) {}
6722 for (
Object** p = start; p < end; p++) {
6723 if ((*p)->IsHeapObject())
6724 tracer_->UnmarkRecursively(p,
this);
6729 PathTracer* tracer_;
6733 void PathTracer::VisitPointers(
Object** start,
Object** end) {
6734 bool done = ((what_to_find_ == FIND_FIRST) && found_target_);
6736 for (
Object** p = start; !done && (p < end); p++) {
6737 if ((*p)->IsHeapObject()) {
6739 done = ((what_to_find_ == FIND_FIRST) && found_target_);
6745 void PathTracer::Reset() {
6746 found_target_ =
false;
6747 object_stack_.Clear();
6751 void PathTracer::TracePathFrom(
Object** root) {
6752 ASSERT((search_target_ == kAnyGlobalObject) ||
6753 search_target_->IsHeapObject());
6754 found_target_in_trace_ =
false;
6757 MarkVisitor mark_visitor(
this);
6758 MarkRecursively(root, &mark_visitor);
6760 UnmarkVisitor unmark_visitor(
this);
6761 UnmarkRecursively(root, &unmark_visitor);
6767 static bool SafeIsNativeContext(HeapObject* obj) {
6768 return obj->map() == obj->GetHeap()->raw_unchecked_native_context_map();
6772 void PathTracer::MarkRecursively(
Object** p, MarkVisitor* mark_visitor) {
6773 if (!(*p)->IsHeapObject())
return;
6777 Object* map = obj->map();
6779 if (!map->IsHeapObject())
return;
6781 if (found_target_in_trace_)
return;
6782 object_stack_.Add(obj);
6783 if (((search_target_ == kAnyGlobalObject) && obj->IsJSGlobalObject()) ||
6784 (obj == search_target_)) {
6785 found_target_in_trace_ =
true;
6786 found_target_ =
true;
6790 bool is_native_context = SafeIsNativeContext(obj);
6795 Address map_addr = map_p->address();
6797 obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag));
6802 Object** start =
reinterpret_cast<Object**
>(obj->address() +
6804 Object** end =
reinterpret_cast<Object**
>(obj->address() +
6806 mark_visitor->VisitPointers(start, end);
6808 obj->IterateBody(map_p->instance_type(),
6809 obj->SizeFromMap(map_p),
6815 MarkRecursively(&map, mark_visitor);
6817 if (!found_target_in_trace_)
6818 object_stack_.RemoveLast();
6822 void PathTracer::UnmarkRecursively(
Object** p, UnmarkVisitor* unmark_visitor) {
6823 if (!(*p)->IsHeapObject())
return;
6827 Object* map = obj->map();
6829 if (map->IsHeapObject())
return;
6833 map_addr -= kMarkTag;
6839 obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_p));
6841 UnmarkRecursively(reinterpret_cast<Object**>(&map_p), unmark_visitor);
6843 obj->IterateBody(
Map::cast(map_p)->instance_type(),
6849 void PathTracer::ProcessResults() {
6850 if (found_target_) {
6851 PrintF(
"=====================================\n");
6852 PrintF(
"==== Path to object ====\n");
6853 PrintF(
"=====================================\n\n");
6855 ASSERT(!object_stack_.is_empty());
6856 for (
int i = 0; i < object_stack_.length(); i++) {
6857 if (i > 0)
PrintF(
"\n |\n |\n V\n\n");
6858 Object* obj = object_stack_[i];
6861 PrintF(
"=====================================\n");
6864 #endif // DEBUG || LIVE_OBJECT_LIST
6871 void Heap::TracePathToObjectFrom(
Object* target,
Object* root) {
6873 tracer.VisitPointer(&root);
6879 void Heap::TracePathToObject(
Object* target) {
6888 void Heap::TracePathToGlobal() {
6889 PathTracer
tracer(PathTracer::kAnyGlobalObject,
6890 PathTracer::FIND_ALL,
6897 static intptr_t CountTotalHolesSize() {
6898 intptr_t holes_size = 0;
6900 for (OldSpace* space = spaces.next();
6902 space = spaces.next()) {
6903 holes_size += space->Waste() + space->Available();
6909 GCTracer::GCTracer(Heap* heap,
6910 const char* gc_reason,
6911 const char* collector_reason)
6913 start_object_size_(0),
6914 start_memory_size_(0),
6917 allocated_since_last_gc_(0),
6918 spent_in_mutator_(0),
6919 promoted_objects_size_(0),
6921 gc_reason_(gc_reason),
6922 collector_reason_(collector_reason) {
6923 if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat)
return;
6925 start_object_size_ = heap_->SizeOfObjects();
6926 start_memory_size_ = heap_->isolate()->memory_allocator()->Size();
6928 for (
int i = 0; i < Scope::kNumberOfScopes; i++) {
6932 in_free_list_or_wasted_before_gc_ = CountTotalHolesSize();
6934 allocated_since_last_gc_ =
6935 heap_->SizeOfObjects() - heap_->alive_after_last_gc_;
6937 if (heap_->last_gc_end_timestamp_ > 0) {
6938 spent_in_mutator_ =
Max(start_time_ - heap_->last_gc_end_timestamp_, 0.0);
6941 steps_count_ = heap_->incremental_marking()->steps_count();
6942 steps_took_ = heap_->incremental_marking()->steps_took();
6943 longest_step_ = heap_->incremental_marking()->longest_step();
6944 steps_count_since_last_gc_ =
6945 heap_->incremental_marking()->steps_count_since_last_gc();
6946 steps_took_since_last_gc_ =
6947 heap_->incremental_marking()->steps_took_since_last_gc();
6951 GCTracer::~GCTracer() {
6953 if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat)
return;
6955 bool first_gc = (heap_->last_gc_end_timestamp_ == 0);
6957 heap_->alive_after_last_gc_ = heap_->SizeOfObjects();
6960 int time =
static_cast<int>(heap_->last_gc_end_timestamp_ - start_time_);
6963 if (FLAG_print_cumulative_gc_stat) {
6964 heap_->total_gc_time_ms_ += time;
6965 heap_->max_gc_pause_ =
Max(heap_->max_gc_pause_, time);
6966 heap_->max_alive_after_gc_ =
Max(heap_->max_alive_after_gc_,
6967 heap_->alive_after_last_gc_);
6969 heap_->min_in_mutator_ =
Min(heap_->min_in_mutator_,
6970 static_cast<int>(spent_in_mutator_));
6972 }
else if (FLAG_trace_gc_verbose) {
6973 heap_->total_gc_time_ms_ += time;
6976 if (collector_ ==
SCAVENGER && FLAG_trace_gc_ignore_scavenger)
return;
6978 PrintPID(
"%8.0f ms: ", heap_->isolate()->time_millis_since_init());
6980 if (!FLAG_trace_gc_nvp) {
6981 int external_time =
static_cast<int>(scopes_[Scope::EXTERNAL]);
6983 double end_memory_size_mb =
6984 static_cast<double>(heap_->isolate()->memory_allocator()->Size()) /
MB;
6986 PrintF(
"%s %.1f (%.1f) -> %.1f (%.1f) MB, ",
6988 static_cast<double>(start_object_size_) /
MB,
6989 static_cast<double>(start_memory_size_) /
MB,
6990 SizeOfHeapObjects(),
6991 end_memory_size_mb);
6993 if (external_time > 0)
PrintF(
"%d / ", external_time);
6995 if (steps_count_ > 0) {
6997 PrintF(
" (+ %d ms in %d steps since last GC)",
6998 static_cast<int>(steps_took_since_last_gc_),
6999 steps_count_since_last_gc_);
7001 PrintF(
" (+ %d ms in %d steps since start of marking, "
7002 "biggest step %f ms)",
7003 static_cast<int>(steps_took_),
7009 if (gc_reason_ !=
NULL) {
7010 PrintF(
" [%s]", gc_reason_);
7013 if (collector_reason_ !=
NULL) {
7014 PrintF(
" [%s]", collector_reason_);
7019 PrintF(
"pause=%d ", time);
7020 PrintF(
"mutator=%d ", static_cast<int>(spent_in_mutator_));
7022 switch (collector_) {
7034 PrintF(
"external=%d ", static_cast<int>(scopes_[Scope::EXTERNAL]));
7035 PrintF(
"mark=%d ", static_cast<int>(scopes_[Scope::MC_MARK]));
7036 PrintF(
"sweep=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP]));
7037 PrintF(
"sweepns=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP_NEWSPACE]));
7038 PrintF(
"evacuate=%d ", static_cast<int>(scopes_[Scope::MC_EVACUATE_PAGES]));
7040 static_cast<int>(scopes_[Scope::MC_UPDATE_NEW_TO_NEW_POINTERS]));
7042 static_cast<int>(scopes_[Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS]));
7044 static_cast<int>(scopes_[Scope::MC_UPDATE_OLD_TO_NEW_POINTERS]));
7045 PrintF(
"compaction_ptrs=%d ",
7046 static_cast<int>(scopes_[Scope::MC_UPDATE_POINTERS_TO_EVACUATED]));
7047 PrintF(
"intracompaction_ptrs=%d ", static_cast<int>(scopes_[
7048 Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED]));
7049 PrintF(
"misc_compaction=%d ",
7050 static_cast<int>(scopes_[Scope::MC_UPDATE_MISC_POINTERS]));
7055 in_free_list_or_wasted_before_gc_);
7062 PrintF(
"stepscount=%d ", steps_count_since_last_gc_);
7063 PrintF(
"stepstook=%d ", static_cast<int>(steps_took_since_last_gc_));
7065 PrintF(
"stepscount=%d ", steps_count_);
7066 PrintF(
"stepstook=%d ", static_cast<int>(steps_took_));
7072 heap_->PrintShortHeapStatistics();
7076 const char* GCTracer::CollectorString() {
7077 switch (collector_) {
7081 return "Mark-sweep";
7083 return "Unknown GC";
7087 int KeyedLookupCache::Hash(Map* map, String* name) {
7089 uintptr_t addr_hash =
7090 static_cast<uint32_t
>(
reinterpret_cast<uintptr_t
>(map)) >>
kMapHashShift;
7091 return static_cast<uint32_t
>((addr_hash ^ name->Hash()) &
kCapacityMask);
7096 int index = (Hash(map, name) &
kHashMask);
7098 Key& key = keys_[index + i];
7099 if ((key.map == map) && key.name->Equals(name)) {
7100 return field_offsets_[index + i];
7109 if (
HEAP->LookupSymbolIfExists(name, &symbol)) {
7110 int index = (Hash(map, symbol) &
kHashMask);
7114 Key& key = keys_[index];
7116 if (key.map == free_entry_indicator) {
7119 field_offsets_[index + i] = field_offset;
7125 for (
int i = kEntriesPerBucket - 1; i > 0; i--) {
7126 Key& key = keys_[index + i];
7127 Key& key2 = keys_[index + i - 1];
7129 field_offsets_[index + i] = field_offsets_[index + i - 1];
7133 Key& key = keys_[index];
7136 field_offsets_[index] = field_offset;
7142 for (
int index = 0; index <
kLength; index++) keys_[index].map =
NULL;
7147 for (
int index = 0; index < kLength; index++) keys_[index].source =
NULL;
7152 void Heap::GarbageCollectionGreedyCheck() {
7155 if (disallow_allocation_failure())
return;
7161 TranscendentalCache::SubCache::SubCache(Type t)
7163 isolate_(
Isolate::Current()) {
7164 uint32_t in0 = 0xffffffffu;
7165 uint32_t in1 = 0xffffffffu;
7166 for (
int i = 0; i < kCacheSize; i++) {
7167 elements_[i].in[0] = in0;
7168 elements_[i].in[1] = in1;
7169 elements_[i].output =
NULL;
7176 if (caches_[i] !=
NULL) {
7186 for (
int i = 0; i < new_space_strings_.length(); ++i) {
7187 if (new_space_strings_[i] == heap_->raw_unchecked_the_hole_value()) {
7190 if (heap_->
InNewSpace(new_space_strings_[i])) {
7191 new_space_strings_[last++] = new_space_strings_[i];
7193 old_space_strings_.Add(new_space_strings_[i]);
7196 new_space_strings_.Rewind(last);
7198 for (
int i = 0; i < old_space_strings_.length(); ++i) {
7199 if (old_space_strings_[i] == heap_->raw_unchecked_the_hole_value()) {
7203 old_space_strings_[last++] = old_space_strings_[i];
7205 old_space_strings_.Rewind(last);
7207 if (FLAG_verify_heap) {
7215 new_space_strings_.Free();
7216 old_space_strings_.Free();
7222 chunks_queued_for_free_ = chunk;
7227 if (chunks_queued_for_free_ ==
NULL)
return;
7230 for (chunk = chunks_queued_for_free_; chunk !=
NULL; chunk = next) {
7248 while (inner <= inner_last) {
7254 if (area_end < inner->address()) area_end = chunk_end;
7266 for (chunk = chunks_queued_for_free_; chunk !=
NULL; chunk = next) {
7270 chunks_queued_for_free_ =
NULL;
7275 uintptr_t p =
reinterpret_cast<uintptr_t
>(page);
7282 remembered_unmapped_pages_[remembered_unmapped_pages_index_] =
7284 remembered_unmapped_pages_index_++;
7285 remembered_unmapped_pages_index_ %= kRememberedUnmappedPages;
7289 void Heap::ClearObjectStats(
bool clear_last_time_stats) {
7290 memset(object_counts_, 0,
sizeof(object_counts_));
7291 memset(object_sizes_, 0,
sizeof(object_sizes_));
7292 if (clear_last_time_stats) {
7293 memset(object_counts_last_time_, 0,
sizeof(object_counts_last_time_));
7294 memset(object_sizes_last_time_, 0,
sizeof(object_sizes_last_time_));
7305 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
7306 counters->count_of_##name()->Increment( \
7307 static_cast<int>(object_counts_[name])); \
7308 counters->count_of_##name()->Decrement( \
7309 static_cast<int>(object_counts_last_time_[name])); \
7310 counters->size_of_##name()->Increment( \
7311 static_cast<int>(object_sizes_[name])); \
7312 counters->size_of_##name()->Decrement( \
7313 static_cast<int>(object_sizes_last_time_[name]));
7315 #undef ADJUST_LAST_TIME_OBJECT_COUNT
7317 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
7318 index = FIRST_CODE_KIND_SUB_TYPE + Code::name; \
7319 counters->count_of_CODE_TYPE_##name()->Increment( \
7320 static_cast<int>(object_counts_[index])); \
7321 counters->count_of_CODE_TYPE_##name()->Decrement( \
7322 static_cast<int>(object_counts_last_time_[index])); \
7323 counters->size_of_CODE_TYPE_##name()->Increment( \
7324 static_cast<int>(object_sizes_[index])); \
7325 counters->size_of_CODE_TYPE_##name()->Decrement( \
7326 static_cast<int>(object_sizes_last_time_[index]));
7328 #undef ADJUST_LAST_TIME_OBJECT_COUNT
7329 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
7330 index = FIRST_FIXED_ARRAY_SUB_TYPE + name; \
7331 counters->count_of_FIXED_ARRAY_##name()->Increment( \
7332 static_cast<int>(object_counts_[index])); \
7333 counters->count_of_FIXED_ARRAY_##name()->Decrement( \
7334 static_cast<int>(object_counts_last_time_[index])); \
7335 counters->size_of_FIXED_ARRAY_##name()->Increment( \
7336 static_cast<int>(object_sizes_[index])); \
7337 counters->size_of_FIXED_ARRAY_##name()->Decrement( \
7338 static_cast<int>(object_sizes_last_time_[index]));
7340 #undef ADJUST_LAST_TIME_OBJECT_COUNT
7342 memcpy(object_counts_last_time_, object_counts_,
sizeof(object_counts_));
7343 memcpy(object_sizes_last_time_, object_sizes_,
sizeof(object_sizes_));
static int SizeOfMarkedObject(HeapObject *object)
static bool IsBlack(MarkBit mark_bit)
void set_length(int value)
MUST_USE_RESULT MaybeObject * AllocateJSModule(Context *context, ScopeInfo *scope_info)
intptr_t OldGenPromotionLimit(intptr_t old_gen_size)
ContextSlotCache * context_slot_cache()
const uint32_t kShortcutTypeTag
void GarbageCollectionEpilogue()
static const int kEmptyStringHash
static const int kPointerFieldsEndOffset
MUST_USE_RESULT MaybeObject * CopyCode(Code *code)
void set_elements_kind(ElementsKind elements_kind)
static void Clear(FixedArray *cache)
static uchar TrailSurrogate(int char_code)
static const int kMaxLength
Code * builtin(Name name)
TranscendentalCache * transcendental_cache() const
static int NumberOfHandles()
#define SLOW_ASSERT(condition)
const intptr_t kSmiTagMask
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8Slow(Vector< const char > str, int non_ascii_start, PretenureFlag pretenure=NOT_TENURED)
static uchar LeadSurrogate(int char_code)
const intptr_t kDoubleAlignmentMask
static const int kCodeEntryOffset
MUST_USE_RESULT MaybeObject * AllocateFixedArray(int length, PretenureFlag pretenure)
bool has_instance_prototype()
static const int kMaxAsciiCharCode
bool Contains(const T &elm) const
bool NextGCIsLikelyToBeFull()
MUST_USE_RESULT MaybeObject * AllocateExternalStringFromAscii(const ExternalAsciiString::Resource *resource)
MUST_USE_RESULT MaybeObject * AllocateSymbol(Vector< const char > str, int chars, uint32_t hash_field)
int inobject_properties()
void Callback(MemoryChunk *page, StoreBufferEvent event)
#define INSTANCE_TYPE_LIST(V)
void set_size(Heap *heap, int size_in_bytes)
MUST_USE_RESULT MaybeObject * CopyDropDescriptors()
void ClearOptimizedCodeMap()
#define STRUCT_TABLE_ELEMENT(NAME, Name, name)
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
intptr_t * old_pointer_space_size
MUST_USE_RESULT MaybeObject * AllocateFunctionPrototype(JSFunction *function)
bool Contains(Address addr)
void set(int index, Object *value)
CompilationCache * compilation_cache()
intptr_t * cell_space_size
static const int kMapHashShift
void PrintF(const char *format,...)
void PrintStack(StringStream *accumulator)
#define ASSERT_TAG_ALIGNED(address)
bool OldGenerationPromotionLimitReached()
void set_function_with_prototype(bool value)
bool InNewSpace(Object *object)
static String * cast(Object *obj)
MUST_USE_RESULT MaybeObject * Add(Key key, Object *value, PropertyDetails details)
static const int kArgumentsObjectSize
void IdentifyNewSpaceWeakIndependentHandles(WeakSlotCallbackWithHeap f)
MUST_USE_RESULT MaybeObject * AllocateFunctionContext(int length, JSFunction *function)
MUST_USE_RESULT MaybeObject * Allocate(Map *map, AllocationSpace space)
MUST_USE_RESULT MaybeObject * AllocateSubString(String *buffer, int start, int end, PretenureFlag pretenure=NOT_TENURED)
void(* ObjectSlotCallback)(HeapObject **from, HeapObject *to)
HandleScopeImplementer * handle_scope_implementer()
void set_opt_count(int opt_count)
static DescriptorArray * cast(Object *obj)
static Failure * InternalError()
void IterateWeakRoots(ObjectVisitor *v, VisitMode mode)
bool SkipObject(HeapObject *object)
static int SizeOf(Map *map, HeapObject *object)
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes)
MUST_USE_RESULT MaybeObject * ReinitializeJSGlobalProxy(JSFunction *constructor, JSGlobalProxy *global)
int unused_property_fields()
void set_length(Smi *length)
bool SetUp(const size_t requested_size)
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArrayWithMap(FixedDoubleArray *src, Map *map)
MUST_USE_RESULT MaybeObject * AllocateGlobalObject(JSFunction *constructor)
static uint32_t encode(intvalue)
void Prepare(GCTracer *tracer)
void set_scan_on_scavenge(bool scan)
static Smi * FromInt(int value)
#define LOG(isolate, Call)
MUST_USE_RESULT MaybeObject * AllocateJSFunctionProxy(Object *handler, Object *call_trap, Object *construct_trap, Object *prototype)
void set_second(String *second, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static Object * GetObjectFromEntryAddress(Address location_of_address)
MUST_USE_RESULT MaybeObject * AllocateJSObject(JSFunction *constructor, PretenureFlag pretenure=NOT_TENURED)
void CompletelyClearInstanceofCache()
V8_DECLARE_ONCE(initialize_gc_once)
static MemoryChunk * FromAddress(Address a)
void set_ic_age(int count)
void CollectAllGarbage(int flags, const char *gc_reason=NULL)
static HeapObject * cast(Object *obj)
Map * MapForExternalArrayType(ExternalArrayType array_type)
void SetNumberStringCache(Object *number, String *str)
static const byte kArgumentMarker
MUST_USE_RESULT MaybeObject * AllocateModuleContext(ScopeInfo *scope_info)
MUST_USE_RESULT MaybeObject * AllocateCodeCache()
void set_pre_allocated_property_fields(int value)
void CallOnce(OnceType *once, NoArgFunction init_func)
static const byte kUndefined
MUST_USE_RESULT MaybeObject * AllocateNativeContext()
void AddGCPrologueCallback(GCPrologueCallback callback, GCType gc_type_filter)
const int kVariableSizeSentinel
static const int kAlignedSize
RootListIndex RootIndexForExternalArrayType(ExternalArrayType array_type)
MUST_USE_RESULT MaybeObject * LookupAsciiSymbol(Vector< const char > str)
static Failure * OutOfMemoryException()
static bool IsOutsideAllocatedSpace(void *pointer)
bool SetUp(intptr_t max_capacity, intptr_t capacity_executable)
bool IsAsciiRepresentation()
static ExternalTwoByteString * cast(Object *obj)
void VisitExternalResources(v8::ExternalResourceVisitor *visitor)
void IterateDeferredHandles(ObjectVisitor *visitor)
intptr_t OldGenAllocationLimit(intptr_t old_gen_size)
static Map * cast(Object *obj)
void set_start_position(int value)
static const int kEmptyHashField
void ResetAllocationInfo()
MUST_USE_RESULT MaybeObject * AllocateByteArray(int length, PretenureFlag pretenure)
static const byte kTheHole
static ByteArray * cast(Object *obj)
bool has_fast_object_elements()
void set_end_position(int end_position)
void set_context(Object *context)
static FreeSpace * cast(Object *obj)
void mark_out_of_memory()
Bootstrapper * bootstrapper()
void Set(int descriptor_number, Descriptor *desc, const WhitenessWitness &)
bool InFromSpace(Object *object)
MUST_USE_RESULT MaybeObject * Uint32ToString(uint32_t value, bool check_number_string_cache=true)
void Relocate(intptr_t delta)
PromotionQueue * promotion_queue()
void SetTop(Object ***top)
static Foreign * cast(Object *obj)
Map * SymbolMapForString(String *str)
intptr_t inline_allocation_limit_step()
intptr_t * code_space_size
void AddGCEpilogueCallback(GCEpilogueCallback callback, GCType gc_type_filter)
MUST_USE_RESULT MaybeObject * AllocateRawAsciiString(int length, PretenureFlag pretenure=NOT_TENURED)
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
#define ASSERT(condition)
bool InSpace(Address addr, AllocationSpace space)
void(* GCPrologueCallback)(GCType type, GCCallbackFlags flags)
v8::Handle< v8::Value > Print(const v8::Arguments &args)
MUST_USE_RESULT MaybeObject * AllocateGlobalContext(JSFunction *function, ScopeInfo *scope_info)
static void IncrementLiveBytesFromGC(Address address, int by)
void Step(intptr_t allocated, CompletionAction action)
#define PROFILE(isolate, Call)
Object * instance_prototype()
KeyedLookupCache * keyed_lookup_cache()
static const int kReduceMemoryFootprintMask
MUST_USE_RESULT MaybeObject * LookupTwoByteSymbol(Vector< const uc16 > str)
void IterateStrongRoots(ObjectVisitor *v)
virtual Object * RetainAs(Object *object)
static Context * cast(Object *context)
static const int kMaxLength
const intptr_t kCodeAlignment
MUST_USE_RESULT MaybeObject * LookupSymbol(Vector< const char > str)
bool SetUp(bool create_heap_objects)
ThreadManager * thread_manager()
#define ADJUST_LAST_TIME_OBJECT_COUNT(name)
MUST_USE_RESULT MaybeObject * AllocateBlockContext(JSFunction *function, Context *previous, ScopeInfo *info)
int SizeFromMap(Map *map)
static const int kMaxPreAllocatedPropertyFields
intptr_t CommittedMemoryExecutable()
void set_is_undetectable()
static void Iterate(ObjectVisitor *visitor)
int GetInternalFieldCount()
void initialize_elements()
void VisitPointers(Object **start, Object **end)
#define STRING_TYPE_LIST(V)
MUST_USE_RESULT MaybeObject * CopyJSObject(JSObject *source)
void set_first(String *first, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
MUST_USE_RESULT MaybeObject * Copy()
static ExternalAsciiString * cast(Object *obj)
static const int kMaxSize
static const int kPageSize
void init_back_pointer(Object *undefined)
void set_foreign_address(Address value)
void SeqTwoByteStringSet(int index, uint16_t value)
static Code * cast(Object *obj)
virtual const uint16_t * data() const =0
MUST_USE_RESULT MaybeObject * AllocateInternalSymbol(unibrow::CharacterStream *buffer, int chars, uint32_t hash_field)
static bool IsAtEnd(Address addr)
void IterateAndMarkPointersToFromSpace(Address start, Address end, ObjectSlotCallback callback)
static PolymorphicCodeCache * cast(Object *obj)
MUST_USE_RESULT MaybeObject * AllocateJSArrayWithElements(FixedArrayBase *array_base, ElementsKind elements_kind, PretenureFlag pretenure=NOT_TENURED)
ArrayStorageAllocationMode
virtual Object * RetainAs(Object *object)=0
static Object ** RawField(HeapObject *obj, int offset)
StoreBuffer * store_buffer()
MUST_USE_RESULT MaybeObject * AllocateHeapNumber(double value, PretenureFlag pretenure)
static Smi * cast(Object *object)
void set_function_token_position(int function_token_position)
static const int kInvalidEnumCache
#define STRING_TYPE_ELEMENT(type, size, name, camel_name)
static bool IsAscii(const char *chars, int length)
static MUST_USE_RESULT MaybeObject * InitializeIntrinsicFunctionNames(Heap *heap, Object *dictionary)
bool CollectGarbage(AllocationSpace space, GarbageCollector collector, const char *gc_reason, const char *collector_reason)
void set_closure(JSFunction *closure)
static MarkBit MarkBitFrom(Address addr)
StackGuard * stack_guard()
MUST_USE_RESULT MaybeObject * AllocateWithContext(JSFunction *function, Context *previous, JSObject *extension)
void set_dictionary_map(bool value)
void Free(MemoryChunk *chunk)
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
#define UPDATE_COUNTERS_FOR_SPACE(space)
GlobalObject * global_object()
Object * InObjectPropertyAtPut(int index, Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
MUST_USE_RESULT MaybeObject * AllocateConsString(String *first, String *second)
static Struct * cast(Object *that)
static int GetBuiltinsCount()
void InitializeBody(int object_size, Object *value)
void RepairFreeListsAfterBoot()
MUST_USE_RESULT MaybeObject * NumberToString(Object *number, bool check_number_string_cache=true)
static const int kMinLength
UnicodeCache * unicode_cache()
String *(* ExternalStringTableUpdaterCallback)(Heap *heap, Object **pointer)
String * GetKey(int descriptor_number)
void set_the_hole(int index)
static const int kEndMarker
bool IdleNotification(int hint)
MUST_USE_RESULT MaybeObject * AllocateStringFromAscii(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
virtual size_t length() const =0
void EnsureHeapIsIterable()
static const int kArgumentsObjectSizeStrict
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedDoubleArray(int length, PretenureFlag pretenure=NOT_TENURED)
bool PostGarbageCollectionProcessing(GarbageCollector collector)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
int(* HeapObjectCallback)(HeapObject *obj)
void set_global_object(GlobalObject *object)
void set_num_literals(int value)
static const int kMaxLength
const char * IntToCString(int n, Vector< char > buffer)
#define UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(space)
void set_unchecked(int index, Smi *value)
void Register(StaticVisitorBase::VisitorId id, Callback callback)
intptr_t CommittedMemory()
static bool IsMarked(HeapObject *object)
void IteratePointersToNewSpace(ObjectSlotCallback callback)
#define HEAP_PROFILE(heap, call)
void RemoveGCEpilogueCallback(GCEpilogueCallback callback)
static SlicedString * cast(Object *obj)
RuntimeProfiler * runtime_profiler()
intptr_t CommittedMemory()
int pre_allocated_property_fields()
void set_expected_nof_properties(int value)
void set_instruction_size(int value)
Context * native_context()
void InitializeBody(int object_size)
virtual intptr_t SizeOfObjects()
void LowerInlineAllocationLimit(intptr_t step)
static const int kStoreBufferSize
static const uchar kMaxNonSurrogateCharCode
static bool IsValid(intptr_t value)
void set_resource(const Resource *buffer)
#define MAKE_CASE(NAME, Name, name)
void CollectAllAvailableGarbage(const char *gc_reason=NULL)
bool ConfigureHeapDefault()
PagedSpace * paged_space(int idx)
void set_aliased_context_slot(int count)
ElementsKind GetElementsKind()
static const int kNoGCFlags
MUST_USE_RESULT MaybeObject * AllocateFixedArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
MemoryAllocator * memory_allocator()
MUST_USE_RESULT MaybeObject * AllocateInitialMap(JSFunction *fun)
static Oddball * cast(Object *obj)
static Address & Address_at(Address addr)
MUST_USE_RESULT MaybeObject * AllocateForeign(Address address, PretenureFlag pretenure=NOT_TENURED)
const char * DoubleToCString(double v, Vector< char > buffer)
static UnseededNumberDictionary * cast(Object *obj)
void QueueMemoryChunkForFree(MemoryChunk *chunk)
void CheckpointObjectStats()
MUST_USE_RESULT MaybeObject * AllocateExternalArray(int length, ExternalArrayType array_type, void *external_pointer, PretenureFlag pretenure)
#define CONSTANT_SYMBOL_ELEMENT(name, contents)
intptr_t * cell_space_capacity
bool IsAligned(T value, U alignment)
intptr_t * memory_allocator_size
static SeqAsciiString * cast(Object *obj)
void set_inobject_properties(int value)
void set_hash_field(uint32_t value)
void MarkCompactPrologue()
void Iterate(ObjectVisitor *v)
GlobalHandles * global_handles()
void IncrementYoungSurvivorsCounter(int survived)
~UnreachableObjectsFilter()
MUST_USE_RESULT MaybeObject * AllocatePolymorphicCodeCache()
intptr_t * code_space_capacity
void VisitPointer(Object **p)
static void Enter(Heap *heap, String *key_string, Object *key_pattern, FixedArray *value_array, ResultsCacheType type)
void Update(Map *map, String *name, int field_offset)
const uint32_t kShortcutTypeMask
void ReserveSpace(int *sizes, Address *addresses)
static Handle< Object > SetLocalPropertyIgnoreAttributes(Handle< JSObject > object, Handle< String > key, Handle< Object > value, PropertyAttributes attributes)
void set_end_position(int value)
OldSpace * old_pointer_space()
void UncommitMarkingDeque()
bool ConfigureHeap(int max_semispace_size, intptr_t max_old_gen_size, intptr_t max_executable_size)
T RoundUp(T x, intptr_t m)
static Mutex * CreateMutex()
intptr_t * map_space_size
static double TimeCurrentMillis()
static FixedDoubleArray * cast(Object *obj)
MUST_USE_RESULT MaybeObject * AllocateTypeFeedbackInfo()
bool IsTwoByteRepresentation()
void set_age_mark(Address mark)
void IterateAllRoots(ObjectVisitor *v)
static const int kMaxNonCodeHeapObjectSize
bool contains(Address address)
static const int kMinLength
void set_length(int value)
static const int kMakeHeapIterableMask
MUST_USE_RESULT MaybeObject * AllocateJSArrayAndStorage(ElementsKind elements_kind, int length, int capacity, ArrayStorageAllocationMode mode=DONT_INITIALIZE_ARRAY_ELEMENTS, PretenureFlag pretenure=NOT_TENURED)
void EnsureSpace(intptr_t space_needed)
void Iterate(ObjectVisitor *v)
static const int kNextFunctionLinkOffset
bool InToSpace(Object *object)
void CopyFrom(const CodeDesc &desc)
static int SizeFor(int length)
static int IterateBody(Map *map, HeapObject *obj)
void set_start_position_and_type(int value)
WriteBarrierMode GetWriteBarrierMode(const AssertNoAllocation &)
void set_resource(const Resource *buffer)
PropertyDetails GetDetails(int descriptor_number)
void GarbageCollectionPrologue()
void Iterate(ObjectVisitor *v)
void Iterate(ObjectVisitor *v)
byte * relocation_start()
LargeObjectSpace * lo_space()
static ScopeInfo * Empty()
const Address kFromSpaceZapValue
bool ToSpaceContains(Address address)
MUST_USE_RESULT MaybeObject * AllocateJSMessageObject(String *type, JSArray *arguments, int start_position, int end_position, Object *script, Object *stack_trace, Object *stack_frames)
DeoptimizerData * deoptimizer_data()
static MUST_USE_RESULT MaybeObject * Allocate(int at_least_space_for)
Callback GetVisitorById(StaticVisitorBase::VisitorId id)
MUST_USE_RESULT MaybeObject * AllocateExternalStringFromTwoByte(const ExternalTwoByteString::Resource *resource)
MUST_USE_RESULT MaybeObject * AllocatePartialMap(InstanceType instance_type, int instance_size)
MUST_USE_RESULT MaybeObject * CreateCode(const CodeDesc &desc, Code::Flags flags, Handle< Object > self_reference, bool immovable=false)
static Object * Lookup(Heap *heap, String *key_string, Object *key_pattern, ResultsCacheType type)
virtual bool SkipObject(HeapObject *object)=0
activate correct semantics for inheriting readonliness false
DescriptorLookupCache * descriptor_lookup_cache()
void set_map_no_write_barrier(Map *value)
void set_check_type(CheckType value)
static JSMessageObject * cast(Object *obj)
void initialize_storage()
static const int kAbortIncrementalMarkingMask
static const int kNonWeakFieldsEndOffset
void RemoveGCPrologueCallback(GCPrologueCallback callback)
Vector< const char > CStrVector(const char *data)
LazyDynamicInstance< Mutex, CreateMutexTrait, ThreadSafeInitOnceTrait >::type LazyMutex
static Local< Context > ToLocal(v8::internal::Handle< v8::internal::Context > obj)
intptr_t CommittedMemory()
Object * GetNumberStringCache(Object *number)
MUST_USE_RESULT MaybeObject * AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure)
static int SizeFor(int length)
void SetArea(Address area_start, Address area_end)
static const int kMaxSize
void IterateNewSpaceStrongAndDependentRoots(ObjectVisitor *v)
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
void RecordWrites(Address address, int start, int len)
void UpdateMarkingDequeAfterScavenge()
static SeqTwoByteString * cast(Object *obj)
static JSFunctionResultCache * cast(Object *obj)
void Iterate(ObjectVisitor *v)
void(* GCEpilogueCallback)(GCType type, GCCallbackFlags flags)
intptr_t get_max_alive_after_gc()
void UpdateReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
MUST_USE_RESULT MaybeObject * AllocateJSProxy(Object *handler, Object *prototype)
void ProcessWeakReferences(WeakObjectRetainer *retainer)
void ClearNormalizedMapCaches()
static const int kHeaderSize
static void VisitPointer(Heap *heap, Object **p)
MUST_USE_RESULT MaybeObject * NumberFromDouble(double value, PretenureFlag pretenure=NOT_TENURED)
bool SlowContains(Address addr)
intptr_t * old_data_space_capacity
static int SizeFor(int length)
bool is_compacting() const
MUST_USE_RESULT MaybeObject * AllocateArgumentsObject(Object *callee, int length)
intptr_t SizeExecutable()
int Lookup(Map *map, String *name)
InnerPointerToCodeCache * inner_pointer_to_code_cache()
void set_instance_type(InstanceType value)
#define FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(V)
static HeapNumber * cast(Object *obj)
static void WriteToFlat(String *source, sinkchar *sink, int from, int to)
static StringDictionary * cast(Object *obj)
void set_value(double value)
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
virtual size_t length() const =0
void IterateRoots(ObjectVisitor *v, VisitMode mode)
static const int kLengthOffset
static double nan_value()
MUST_USE_RESULT MaybeObject * ReinitializeJSReceiver(JSReceiver *object, InstanceType type, int size)
MUST_USE_RESULT MaybeObject * AllocateAccessorPair()
void set_counters(int value)
MUST_USE_RESULT MaybeObject * AllocateCatchContext(JSFunction *function, Context *previous, String *name, Object *thrown_object)
const uint32_t kFreeListZapValue
static uint32_t RandomPrivate(Isolate *isolate)
static const int kArgumentsLengthIndex
#define CODE_KIND_LIST(V)
static int SizeFor(int length)
void CheckNewSpaceExpansionCriteria()
const intptr_t kObjectAlignment
INLINE(static HeapObject *EnsureDoubleAligned(Heap *heap, HeapObject *object, int size))
static NewSpacePage * FromLimit(Address address_limit)
void RecordStats(HeapStats *stats)
MUST_USE_RESULT MaybeObject * AllocateScopeInfo(int length)
bool LookupSymbolIfExists(String *str, String **symbol)
static JSGlobalPropertyCell * cast(Object *obj)
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
IncrementalMarking * incremental_marking()
bool Contains(Address addr)
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedArray(int length)
void set_extension(Object *object)
intptr_t CommittedMemory()
static const int kStartMarker
void set_bit_field(byte value)
static TypeFeedbackCells * cast(Object *obj)
static int SizeFor(int length)
virtual const char * data() const =0
MUST_USE_RESULT MaybeObject * Initialize(const char *to_string, Object *to_number, byte kind)
void Iterate(v8::internal::ObjectVisitor *v)
NewSpacePage * next_page() const
int number_of_descriptors()
void MemsetPointer(T **dest, U *value, int counter)
void set_owner(Space *space)
MUST_USE_RESULT MaybeObject * AllocateJSObjectFromMap(Map *map, PretenureFlag pretenure=NOT_TENURED)
ScavengeWeakObjectRetainer(Heap *heap)
void RememberUnmappedPage(Address page, bool compacted)
void NotifyOfHighPromotionRate()
static MUST_USE_RESULT MaybeObject * Allocate(int number_of_descriptors, int slack=0)
static void UpdateReferencesForScavengeGC()
void Set(int index, uint16_t value)
static const int kNotFound
static const int kRegExpResultsCacheSize
void PrintPID(const char *format,...)
#define ASSERT_EQ(v1, v2)
static const int kBodyOffset
MUST_USE_RESULT MaybeObject * LookupSingleCharacterStringFromCode(uint16_t code)
InstanceType instance_type()
static void CopyBlock(Address dst, Address src, int byte_size)
MUST_USE_RESULT MaybeObject * AllocateJSGlobalPropertyCell(Object *value)
static bool ShouldZapGarbage()
static HeapObject * FromAddress(Address address)
void set_size(size_t size)
MUST_USE_RESULT MaybeObject * AllocateFixedDoubleArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
MUST_USE_RESULT MaybeObject * AllocateRawFixedArray(int length)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage message
ScavengeVisitor(Heap *heap)
static const unsigned kMaxAsciiCharCodeU
static const int kArgumentsCalleeIndex
MUST_USE_RESULT MaybeObject * AllocateHashTable(int length, PretenureFlag pretenure=NOT_TENURED)
static FixedArray * cast(Object *obj)
static const unsigned kMaxOneByteChar
static const int kHeaderSize
UnreachableObjectsFilter()
Object * FindCodeObject(Address a)
void set_previous(Context *context)
intptr_t PromotedSpaceSizeOfObjects()
void IterateNewSpaceWeakIndependentRoots(ObjectVisitor *v)
intptr_t * old_pointer_space_capacity
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
StaticResource< Utf8Decoder > * utf8_decoder()
Object * GetCallbacksObject(int descriptor_number)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
void set_instance_size(int value)
static VisitorId GetVisitorId(int instance_type, int instance_size)
void ClearJSFunctionResultCaches()
void set_compiler_hints(int value)
void RecordStats(HeapStats *stats, bool take_snapshot=false)
void set_formal_parameter_count(int value)
bool HasFastDoubleElements()
static const int kMaxLength
String * TryFlattenGetString(PretenureFlag pretenure=NOT_TENURED)
virtual ~HeapObjectsFilter()
void set_bit_field2(byte value)
void CopyFrom(VisitorDispatchTable *other)
void CreateFillerObjectAt(Address addr, int size)
static int GetLastError()
MUST_USE_RESULT MaybeObject * AllocateSharedFunctionInfo(Object *name)
bool AdvanceSweepers(int step_size)
void RegisterSpecializations()
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static NormalizedMapCache * cast(Object *obj)
static const int kMaxLength
intptr_t * map_space_capacity
static int SizeFor(int body_size)
void set_stress_deopt_counter(int counter)
static intptr_t MaxVirtualMemory()
static const intptr_t kAllocatedThreshold
static const int kCapacityMask
static void ScavengeObject(HeapObject **p, HeapObject *object)
bool is_keyed_call_stub()
void set_visitor_id(int visitor_id)
bool IsSweepingComplete()
void set_length(int value)
void set_this_property_assignments_count(int value)
bool SetUp(int reserved_semispace_size_, int max_semispace_size)
void IterateBuiltins(ObjectVisitor *v)
void CopyChars(sinkchar *dest, const sourcechar *src, int chars)
static VisitorDispatchTable< ScavengingCallback > * GetTable()
void set_ast_node_count(int count)
intptr_t * memory_allocator_capacity
static ConsString * cast(Object *obj)
virtual intptr_t SizeOfObjects()
void set_offset(int offset)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
static FixedArrayBase * cast(Object *object)
void set_flags(Flags flags)
MUST_USE_RESULT MaybeObject * AllocateStruct(InstanceType type)
void EnterDirectlyIntoStoreBuffer(Address addr)
intptr_t * old_data_space_size
MUST_USE_RESULT MaybeObject * AllocateRaw(int object_size, Executability executable)
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
MUST_USE_RESULT MaybeObject * AllocateMap(InstanceType instance_type, int instance_size, ElementsKind elements_kind=TERMINAL_FAST_ELEMENTS_KIND)
const intptr_t kDoubleAlignment
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
intptr_t MaxExecutableSize()
static const int kMaxLength
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
static MUST_USE_RESULT MaybeObject * Allocate(int at_least_space_for, MinimumCapacity capacity_option=USE_DEFAULT_MINIMUM_CAPACITY, PretenureFlag pretenure=NOT_TENURED)
void InitializeDescriptors(DescriptorArray *descriptors)
void set_next_chunk(MemoryChunk *next)
void PrintShortHeapStatistics()
static JSObject * cast(Object *obj)
static const int kHashMask
AllocationSpace TargetSpaceId(InstanceType type)
uint32_t RoundUpToPowerOf2(uint32_t x)
OldSpace * old_data_space()
MUST_USE_RESULT MaybeObject * AllocateRawTwoByteString(int length, PretenureFlag pretenure=NOT_TENURED)
static void AssertValidRange(Address from, Address to)
MarkCompactCollector * mark_compact_collector()
MUST_USE_RESULT MaybeObject * AllocateFunction(Map *function_map, SharedFunctionInfo *shared, Object *prototype, PretenureFlag pretenure=TENURED)
void UpdateNewSpaceReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
void set_initial_map(Map *value)
static const int kAlignedSize
bool CommitFromSpaceIfNeeded()
AllocationSpace identity()
void set_unused_property_fields(int value)
void UpdateSamplesAfterScavenge()
static const int kIsExtensible
MUST_USE_RESULT MaybeObject * AllocateStringFromTwoByte(Vector< const uc16 > str, PretenureFlag pretenure=NOT_TENURED)
static const int kNonCodeObjectAreaSize
static const int kEntriesPerBucket
void PrepareForScavenge()
static const int kPointerFieldsBeginOffset
void set_bit_field3(int value)
void EnsureFromSpaceIsCommitted()
void InitializeBody(Map *map, Object *pre_allocated_value, Object *filler_value)
MUST_USE_RESULT MaybeObject * AllocateAliasedArgumentsEntry(int slot)
MemoryChunk * next_chunk() const
void set_parent(String *parent, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static JSFunction * cast(Object *obj)