56 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
60 #if V8_TARGET_ARCH_MIPS && !V8_INTERPRETED_REGEXP
76 initial_semispace_size_(Page::kPageSize),
83 maximum_committed_(0),
84 survived_since_last_expansion_(0),
86 always_allocate_scope_depth_(0),
87 linear_allocation_scope_depth_(0),
88 contexts_disposed_(0),
90 flush_monomorphic_ics_(
false),
91 scan_on_scavenge_pages_(0),
93 old_pointer_space_(
NULL),
94 old_data_space_(
NULL),
98 property_cell_space_(
NULL),
100 gc_state_(NOT_IN_GC),
101 gc_post_processing_depth_(0),
104 remembered_unmapped_pages_index_(0),
105 unflattened_strings_length_(0),
107 allocation_timeout_(0),
109 new_space_high_promotion_mode_active_(
false),
110 old_generation_allocation_limit_(kMinimumOldGenerationAllocationLimit),
111 size_of_old_gen_at_last_old_space_gc_(0),
112 external_allocation_limit_(0),
113 amount_of_external_allocated_memory_(0),
114 amount_of_external_allocated_memory_at_last_global_gc_(0),
115 old_gen_exhausted_(
false),
116 inline_allocation_disabled_(
false),
117 store_buffer_rebuilder_(store_buffer()),
118 hidden_string_(
NULL),
119 gc_safe_size_of_old_object_(
NULL),
120 total_regexp_code_generated_(0),
122 young_survivors_after_last_gc_(0),
123 high_survival_rate_period_length_(0),
124 low_survival_rate_period_length_(0),
126 previous_survival_rate_trend_(Heap::STABLE),
127 survival_rate_trend_(Heap::STABLE),
129 total_gc_time_ms_(0.0),
130 max_alive_after_gc_(0),
132 alive_after_last_gc_(0),
133 last_gc_end_timestamp_(0.0),
136 mark_compact_collector_(this),
139 incremental_marking_(this),
140 number_idle_notifications_(0),
141 last_idle_notification_gc_count_(0),
142 last_idle_notification_gc_count_init_(
false),
143 mark_sweeps_since_idle_round_started_(0),
144 gc_count_at_last_idle_gc_(0),
145 scavenges_since_last_idle_round_(kIdleScavengeThreshold),
146 full_codegen_bytes_generated_(0),
147 crankshaft_codegen_bytes_generated_(0),
148 gcs_since_last_deopt_(0),
150 no_weak_object_verification_scope_depth_(0),
152 allocation_sites_scratchpad_length_(0),
153 promotion_queue_(this),
155 external_string_table_(this),
156 chunks_queued_for_free_(
NULL),
157 gc_callbacks_depth_(0) {
161 #if defined(V8_MAX_SEMISPACE_SIZE)
162 max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
170 if (max_virtual > 0) {
171 if (code_range_size_ > 0) {
173 code_range_size_ =
Min(code_range_size_, max_virtual >> 3);
177 memset(roots_, 0,
sizeof(roots_[0]) * kRootListLength);
178 native_contexts_list_ =
NULL;
183 RememberUnmappedPage(
NULL,
false);
185 ClearObjectStats(
true);
241 if (current_committed_memory > maximum_committed_) {
242 maximum_committed_ = current_committed_memory;
261 return old_pointer_space_ !=
NULL &&
262 old_data_space_ !=
NULL &&
263 code_space_ !=
NULL &&
264 map_space_ !=
NULL &&
265 cell_space_ !=
NULL &&
266 property_cell_space_ !=
NULL &&
271 int Heap::GcSafeSizeOfOldObject(
HeapObject*
object) {
275 return object->SizeFromMap(object->
map());
280 const char** reason) {
283 isolate_->
counters()->gc_compactor_caused_by_request()->Increment();
284 *reason =
"GC in old space requested";
288 if (FLAG_gc_global || (FLAG_stress_compaction && (gc_count_ & 1) != 0)) {
289 *reason =
"GC in old space forced by flags";
295 isolate_->
counters()->gc_compactor_caused_by_promoted_data()->Increment();
296 *reason =
"promotion limit reached";
301 if (old_gen_exhausted_) {
303 gc_compactor_caused_by_oldspace_exhaustion()->Increment();
304 *reason =
"old generations exhausted";
319 gc_compactor_caused_by_oldspace_exhaustion()->Increment();
320 *reason =
"scavenge might not succeed";
332 void Heap::ReportStatisticsBeforeGC() {
338 if (FLAG_heap_stats) {
339 ReportHeapStatistics(
"Before GC");
340 }
else if (FLAG_log_gc) {
355 if (!FLAG_trace_gc_verbose)
return;
360 PrintPID(
"New space, used: %6" V8_PTR_PREFIX
"d KB"
361 ", available: %6" V8_PTR_PREFIX
"d KB"
362 ", committed: %6" V8_PTR_PREFIX
"d KB\n",
366 PrintPID(
"Old pointers, used: %6" V8_PTR_PREFIX
"d KB"
367 ", available: %6" V8_PTR_PREFIX
"d KB"
368 ", committed: %6" V8_PTR_PREFIX
"d KB\n",
372 PrintPID(
"Old data space, used: %6" V8_PTR_PREFIX
"d KB"
373 ", available: %6" V8_PTR_PREFIX
"d KB"
374 ", committed: %6" V8_PTR_PREFIX
"d KB\n",
378 PrintPID(
"Code space, used: %6" V8_PTR_PREFIX
"d KB"
379 ", available: %6" V8_PTR_PREFIX
"d KB"
380 ", committed: %6" V8_PTR_PREFIX
"d KB\n",
384 PrintPID(
"Map space, used: %6" V8_PTR_PREFIX
"d KB"
385 ", available: %6" V8_PTR_PREFIX
"d KB"
386 ", committed: %6" V8_PTR_PREFIX
"d KB\n",
390 PrintPID(
"Cell space, used: %6" V8_PTR_PREFIX
"d KB"
391 ", available: %6" V8_PTR_PREFIX
"d KB"
392 ", committed: %6" V8_PTR_PREFIX
"d KB\n",
396 PrintPID(
"PropertyCell space, used: %6" V8_PTR_PREFIX
"d KB"
397 ", available: %6" V8_PTR_PREFIX
"d KB"
398 ", committed: %6" V8_PTR_PREFIX
"d KB\n",
402 PrintPID(
"Large object space, used: %6" V8_PTR_PREFIX
"d KB"
403 ", available: %6" V8_PTR_PREFIX
"d KB"
404 ", committed: %6" V8_PTR_PREFIX
"d KB\n",
408 PrintPID(
"All spaces, used: %6" V8_PTR_PREFIX
"d KB"
409 ", available: %6" V8_PTR_PREFIX
"d KB"
410 ", committed: %6" V8_PTR_PREFIX
"d KB\n",
414 PrintPID(
"External memory reported: %6" V8_PTR_PREFIX
"d KB\n",
415 static_cast<intptr_t>(amount_of_external_allocated_memory_ /
KB));
416 PrintPID(
"Total time spent in GC : %.1f ms\n", total_gc_time_ms_);
422 void Heap::ReportStatisticsAfterGC() {
426 if (FLAG_heap_stats) {
428 ReportHeapStatistics(
"After GC");
429 }
else if (FLAG_log_gc) {
438 void Heap::GarbageCollectionPrologue() {
442 unflattened_strings_length_ = 0;
444 if (FLAG_flush_code && FLAG_flush_code_incrementally) {
449 if (FLAG_verify_heap) {
460 if (FLAG_gc_verbose)
Print();
462 ReportStatisticsBeforeGC();
467 if (
isolate()->concurrent_osr_enabled()) {
475 AllSpaces spaces(
this);
476 for (
Space* space = spaces.next(); space !=
NULL; space = spaces.next()) {
477 total += space->SizeOfObjects();
489 if (current_kind == Code::FUNCTION ||
490 current_kind == Code::OPTIMIZED_FUNCTION) {
498 PagedSpaces spaces(
this);
501 space = spaces.next()) {
502 space->RepairFreeListsAfterBoot();
507 void Heap::ProcessPretenuringFeedback() {
508 if (FLAG_allocation_site_pretenuring) {
509 int tenure_decisions = 0;
510 int dont_tenure_decisions = 0;
511 int allocation_mementos_found = 0;
512 int allocation_sites = 0;
513 int active_allocation_sites = 0;
517 bool use_scratchpad =
518 allocation_sites_scratchpad_length_ < kAllocationSiteScratchpadSize;
522 bool trigger_deoptimization =
false;
523 while (use_scratchpad ?
524 i < allocation_sites_scratchpad_length_ :
525 list_element->IsAllocationSite()) {
531 active_allocation_sites++;
537 dont_tenure_decisions++;
540 if (use_scratchpad) {
543 list_element = site->weak_next();
547 if (trigger_deoptimization) {
551 FlushAllocationSitesScratchpad();
553 if (FLAG_trace_pretenuring_statistics &&
554 (allocation_mementos_found > 0 ||
555 tenure_decisions > 0 ||
556 dont_tenure_decisions > 0)) {
557 PrintF(
"GC: (mode, #visited allocation sites, #active allocation sites, "
558 "#mementos, #tenure decisions, #donttenure decisions) "
559 "(%s, %d, %d, %d, %d, %d)\n",
560 use_scratchpad ?
"use scratchpad" :
"use list",
562 active_allocation_sites,
563 allocation_mementos_found,
565 dont_tenure_decisions);
576 while (list_element->IsAllocationSite()) {
579 site->dependent_code()->MarkCodeForDeoptimization(
584 list_element = site->weak_next();
590 void Heap::GarbageCollectionEpilogue() {
599 ProcessPretenuringFeedback();
602 if (FLAG_verify_heap) {
610 if (FLAG_print_global_handles) isolate_->
global_handles()->Print();
611 if (FLAG_print_handles) PrintHandles();
612 if (FLAG_gc_verbose)
Print();
613 if (FLAG_code_stats) ReportCodeStatistics(
"After GC");
615 if (FLAG_deopt_every_n_garbage_collections > 0) {
619 if (++gcs_since_last_deopt_ == FLAG_deopt_every_n_garbage_collections) {
621 gcs_since_last_deopt_ = 0;
627 isolate_->
counters()->alive_after_last_gc()->Set(
630 isolate_->
counters()->string_table_capacity()->Set(
632 isolate_->
counters()->number_of_symbols()->Set(
633 string_table()->NumberOfElements());
635 if (full_codegen_bytes_generated_ + crankshaft_codegen_bytes_generated_ > 0) {
636 isolate_->
counters()->codegen_fraction_crankshaft()->AddSample(
637 static_cast<int>((crankshaft_codegen_bytes_generated_ * 100.0) /
638 (crankshaft_codegen_bytes_generated_
639 + full_codegen_bytes_generated_)));
643 isolate_->
counters()->external_fragmentation_total()->AddSample(
646 isolate_->
counters()->heap_fraction_new_space()->
647 AddSample(static_cast<int>(
649 isolate_->
counters()->heap_fraction_old_pointer_space()->AddSample(
653 isolate_->
counters()->heap_fraction_old_data_space()->AddSample(
657 isolate_->
counters()->heap_fraction_code_space()->
658 AddSample(static_cast<int>(
660 isolate_->
counters()->heap_fraction_map_space()->AddSample(
663 isolate_->
counters()->heap_fraction_cell_space()->AddSample(
666 isolate_->
counters()->heap_fraction_property_cell_space()->
667 AddSample(static_cast<int>(
670 isolate_->
counters()->heap_fraction_lo_space()->
671 AddSample(static_cast<int>(
674 isolate_->
counters()->heap_sample_total_committed()->AddSample(
676 isolate_->
counters()->heap_sample_total_used()->AddSample(
678 isolate_->
counters()->heap_sample_map_space_committed()->AddSample(
680 isolate_->
counters()->heap_sample_cell_space_committed()->AddSample(
683 heap_sample_property_cell_space_committed()->
684 AddSample(static_cast<int>(
686 isolate_->
counters()->heap_sample_code_space_committed()->AddSample(
689 isolate_->
counters()->heap_sample_maximum_committed()->AddSample(
693 #define UPDATE_COUNTERS_FOR_SPACE(space) \
694 isolate_->counters()->space##_bytes_available()->Set( \
695 static_cast<int>(space()->Available())); \
696 isolate_->counters()->space##_bytes_committed()->Set( \
697 static_cast<int>(space()->CommittedMemory())); \
698 isolate_->counters()->space##_bytes_used()->Set( \
699 static_cast<int>(space()->SizeOfObjects()));
700 #define UPDATE_FRAGMENTATION_FOR_SPACE(space) \
701 if (space()->CommittedMemory() > 0) { \
702 isolate_->counters()->external_fragmentation_##space()->AddSample( \
703 static_cast<int>(100 - \
704 (space()->SizeOfObjects() * 100.0) / space()->CommittedMemory())); \
706 #define UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(space) \
707 UPDATE_COUNTERS_FOR_SPACE(space) \
708 UPDATE_FRAGMENTATION_FOR_SPACE(space)
718 #undef UPDATE_COUNTERS_FOR_SPACE
719 #undef UPDATE_FRAGMENTATION_FOR_SPACE
720 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE
723 ReportStatisticsAfterGC();
725 #ifdef ENABLE_DEBUGGER_SUPPORT
726 isolate_->debug()->AfterGarbageCollection();
727 #endif // ENABLE_DEBUGGER_SUPPORT
732 const char* gc_reason,
737 mark_compact_collector_.
SetFlags(flags);
755 if (
isolate()->concurrent_recompilation_enabled()) {
763 const int kMaxNumberOfAttempts = 7;
764 const int kMinNumberOfAttempts = 2;
765 for (
int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
767 attempt + 1 >= kMinNumberOfAttempts) {
778 void Heap::EnsureFillerObjectAtTop() {
786 if (from_top < from_limit) {
787 int remaining_in_page =
static_cast<int>(from_limit - from_top);
794 const char* gc_reason,
795 const char* collector_reason,
798 VMState<GC> state(isolate_);
806 allocation_timeout_ = Max(6, FLAG_gc_interval);
809 EnsureFillerObjectAtTop();
812 if (FLAG_trace_incremental_marking) {
813 PrintF(
"[IncrementalMarking] Scavenge during marking.\n");
821 FLAG_incremental_marking_steps) {
823 const intptr_t kStepSizeWhenDelayedByScavenge = 1 *
MB;
827 if (FLAG_trace_incremental_marking) {
828 PrintF(
"[IncrementalMarking] Delaying MarkSweep.\n");
831 collector_reason =
"incremental marking delaying mark-sweep";
835 bool next_gc_likely_to_collect_more =
false;
838 ASSERT(AllowHeapAllocation::IsAllowed());
840 GarbageCollectionPrologue();
843 tracer.set_gc_count(gc_count_);
846 tracer.set_collector(collector);
849 HistogramTimerScope histogram_timer_scope(
851 : isolate_->
counters()->gc_compactor());
852 next_gc_likely_to_collect_more =
853 PerformGarbageCollection(collector, &
tracer, gc_callback_flags);
856 GarbageCollectionEpilogue();
868 return next_gc_likely_to_collect_more;
873 if (
isolate()->concurrent_recompilation_enabled()) {
877 flush_monomorphic_ics_ =
true;
879 return ++contexts_disposed_;
887 if (len == 0)
return;
889 ASSERT(array->
map() != fixed_cow_array_map());
895 for (
int i = 0; i < len; i++) {
908 class StringTableVerifier :
public ObjectVisitor {
912 for (
Object** p = start; p < end; p++) {
913 if ((*p)->IsHeapObject()) {
915 CHECK((*p)->IsTheHole() || (*p)->IsUndefined() ||
916 (*p)->IsInternalizedString());
923 static void VerifyStringTable(Heap* heap) {
924 StringTableVerifier verifier;
925 heap->string_table()->IterateElements(&verifier);
927 #endif // VERIFY_HEAP
930 static bool AbortIncrementalMarkingAndCollectGarbage(
933 const char* gc_reason =
NULL) {
935 bool result = heap->CollectGarbage(space, gc_reason);
942 bool gc_performed =
true;
944 static const int kThreshold = 20;
945 while (gc_performed && counter++ < kThreshold) {
946 gc_performed =
false;
949 if (sizes[space] != 0) {
950 MaybeObject* allocation;
952 allocation =
new_space()->AllocateRaw(sizes[space]);
960 "failed to reserve space in the new space");
962 AbortIncrementalMarkingAndCollectGarbage(
964 static_cast<AllocationSpace>(space),
965 "failed to reserve space in paged space");
973 locations_out[space] = node->
address();
986 void Heap::EnsureFromSpaceIsCommitted() {
998 Object* context = native_contexts_list_;
999 while (!context->IsUndefined()) {
1002 Object* caches_or_undefined =
1004 if (!caches_or_undefined->IsUndefined()) {
1007 int length = caches->
length();
1008 for (
int i = 0; i < length; i++) {
1024 Object* context = native_contexts_list_;
1025 while (!context->IsUndefined()) {
1030 if (!cache->IsUndefined()) {
1038 void Heap::UpdateSurvivalRateTrend(
int start_new_space_size) {
1039 if (start_new_space_size == 0)
return;
1041 double survival_rate =
1042 (
static_cast<double>(young_survivors_after_last_gc_) * 100) /
1043 start_new_space_size;
1045 if (survival_rate > kYoungSurvivalRateHighThreshold) {
1046 high_survival_rate_period_length_++;
1048 high_survival_rate_period_length_ = 0;
1051 if (survival_rate < kYoungSurvivalRateLowThreshold) {
1052 low_survival_rate_period_length_++;
1054 low_survival_rate_period_length_ = 0;
1057 double survival_rate_diff = survival_rate_ - survival_rate;
1059 if (survival_rate_diff > kYoungSurvivalRateAllowedDeviation) {
1060 set_survival_rate_trend(DECREASING);
1061 }
else if (survival_rate_diff < -kYoungSurvivalRateAllowedDeviation) {
1062 set_survival_rate_trend(INCREASING);
1064 set_survival_rate_trend(STABLE);
1067 survival_rate_ = survival_rate;
1070 bool Heap::PerformGarbageCollection(
1074 bool next_gc_likely_to_collect_more =
false;
1077 PROFILE(isolate_, CodeMovingGCEvent());
1081 if (FLAG_verify_heap) {
1082 VerifyStringTable(
this);
1090 if (scope.CheckReenter()) {
1093 VMState<EXTERNAL> state(isolate_);
1094 HandleScope handle_scope(isolate_);
1099 EnsureFromSpaceIsCommitted();
1103 if (IsHighSurvivalRate()) {
1112 MarkCompact(tracer);
1113 sweep_generation_++;
1115 UpdateSurvivalRateTrend(start_new_space_size);
1119 old_generation_allocation_limit_ =
1122 old_gen_exhausted_ =
false;
1128 UpdateSurvivalRateTrend(start_new_space_size);
1131 if (!new_space_high_promotion_mode_active_ &&
1133 IsStableOrIncreasingSurvivalTrend() &&
1134 IsHighSurvivalRate()) {
1140 if (FLAG_trace_gc) {
1141 PrintPID(
"Limited new space size due to high promotion rate: %d MB\n",
1147 if (FLAG_pretenuring) {
1148 if (!FLAG_allocation_site_pretenuring) {
1152 }
else if (new_space_high_promotion_mode_active_ &&
1153 IsStableOrDecreasingSurvivalTrend() &&
1154 IsLowSurvivalRate()) {
1159 if (FLAG_trace_gc) {
1160 PrintPID(
"Unlimited new space size due to low promotion rate: %d MB\n",
1165 if (FLAG_pretenuring && !FLAG_allocation_site_pretenuring) {
1170 if (new_space_high_promotion_mode_active_ &&
1175 isolate_->
counters()->objs_since_last_young()->Set(0);
1181 gc_post_processing_depth_++;
1184 next_gc_likely_to_collect_more =
1188 gc_post_processing_depth_--;
1193 Relocatable::PostGarbageCollectionProcessing(isolate_);
1197 amount_of_external_allocated_memory_at_last_global_gc_ =
1198 amount_of_external_allocated_memory_;
1202 if (scope.CheckReenter()) {
1205 VMState<EXTERNAL> state(isolate_);
1206 HandleScope handle_scope(isolate_);
1212 if (FLAG_verify_heap) {
1213 VerifyStringTable(
this);
1217 return next_gc_likely_to_collect_more;
1222 for (
int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
1223 if (gc_type & gc_prologue_callbacks_[i].gc_type) {
1224 if (!gc_prologue_callbacks_[i].pass_isolate_) {
1227 gc_prologue_callbacks_[i].callback);
1228 callback(gc_type, flags);
1231 gc_prologue_callbacks_[i].callback(isolate, gc_type, flags);
1240 for (
int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
1241 if (gc_type & gc_epilogue_callbacks_[i].gc_type) {
1242 if (!gc_epilogue_callbacks_[i].pass_isolate_) {
1245 gc_epilogue_callbacks_[i].callback);
1246 callback(gc_type, gc_callback_flags);
1249 gc_epilogue_callbacks_[i].callback(
1250 isolate, gc_type, gc_callback_flags);
1257 void Heap::MarkCompact(GCTracer* tracer) {
1259 LOG(isolate_, ResourceEvent(
"markcompact",
"begin"));
1263 mark_compact_collector_.
Prepare(tracer);
1266 tracer->set_full_gc_count(ms_count_);
1268 MarkCompactPrologue();
1272 LOG(isolate_, ResourceEvent(
"markcompact",
"end"));
1276 isolate_->
counters()->objs_since_last_full()->Set(0);
1278 flush_monomorphic_ics_ =
false;
1280 if (FLAG_allocation_site_pretenuring) {
1281 EvaluateOldSpaceLocalPretenuring(size_of_objects_before_gc);
1286 void Heap::MarkCompactPrologue() {
1299 FlushNumberStringCache();
1300 if (FLAG_cleanup_code_caches_at_gc) {
1301 polymorphic_code_cache()->set_cache(undefined_value());
1317 for (
Object** p = start; p < end; p++) ScavengePointer(p);
1321 void ScavengePointer(
Object** p) {
1325 reinterpret_cast<HeapObject*>(
object));
1335 class VerifyNonPointerSpacePointersVisitor:
public ObjectVisitor {
1337 explicit VerifyNonPointerSpacePointersVisitor(Heap* heap) : heap_(heap) {}
1339 for (
Object** current = start; current < end; current++) {
1340 if ((*current)->IsHeapObject()) {
1351 static void VerifyNonPointerSpacePointers(Heap* heap) {
1354 VerifyNonPointerSpacePointersVisitor v(heap);
1355 HeapObjectIterator code_it(heap->code_space());
1356 for (HeapObject*
object = code_it.Next();
1357 object !=
NULL;
object = code_it.Next())
1358 object->Iterate(&v);
1362 if (!heap->old_data_space()->was_swept_conservatively()) {
1363 HeapObjectIterator data_it(heap->old_data_space());
1364 for (HeapObject*
object = data_it.Next();
1365 object !=
NULL;
object = data_it.Next())
1366 object->Iterate(&v);
1369 #endif // VERIFY_HEAP
1374 survived_since_last_expansion_ > new_space_.
Capacity() &&
1375 !new_space_high_promotion_mode_active_) {
1380 survived_since_last_expansion_ = 0;
1385 static bool IsUnscavengedHeapObject(
Heap* heap,
Object** p) {
1391 void Heap::ScavengeStoreBufferCallback(
1395 heap->store_buffer_rebuilder_.Callback(page, event);
1401 start_of_current_page_ =
NULL;
1402 current_page_ =
NULL;
1404 if (current_page_ !=
NULL) {
1408 store_buffer_->
SetTop(start_of_current_page_);
1409 }
else if (store_buffer_->
Top() - start_of_current_page_ >=
1410 (store_buffer_->
Limit() - store_buffer_->
Top()) >> 2) {
1415 store_buffer_->
SetTop(start_of_current_page_);
1423 start_of_current_page_ = store_buffer_->
Top();
1424 current_page_ = page;
1429 if (current_page_ ==
NULL) {
1437 ASSERT(current_page_ == page);
1440 ASSERT(start_of_current_page_ != store_buffer_->
Top());
1441 store_buffer_->
SetTop(start_of_current_page_);
1458 emergency_stack_ =
NULL;
1463 void PromotionQueue::RelocateQueueHead() {
1466 Page* p = Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
1467 intptr_t* head_start = rear_;
1468 intptr_t* head_end =
1469 Min(front_, reinterpret_cast<intptr_t*>(p->
area_end()));
1472 static_cast<int>(head_end - head_start) / kEntrySizeInWords;
1474 emergency_stack_ =
new List<Entry>(2 * entries_count);
1476 while (head_start != head_end) {
1477 int size =
static_cast<int>(*(head_start++));
1479 emergency_stack_->
Add(Entry(obj, size));
1495 if (map_word.IsForwardingAddress()) {
1496 return map_word.ToForwardingAddress();
1506 void Heap::Scavenge() {
1507 RelocationLock relocation_lock(
this);
1510 if (FLAG_verify_heap) VerifyNonPointerSpacePointers(
this);
1516 LOG(isolate_, ResourceEvent(
"scavenge",
"begin"));
1526 SelectScavengingVisitorsTable();
1562 ScavengeVisitor scavenge_visitor(
this);
1568 StoreBufferRebuildScope scope(
this,
1570 &ScavengeStoreBufferCallback);
1576 HeapObjectIterator cell_iterator(cell_space_);
1577 for (HeapObject* heap_object = cell_iterator.Next();
1578 heap_object !=
NULL;
1579 heap_object = cell_iterator.Next()) {
1580 if (heap_object->IsCell()) {
1582 Address value_address = cell->ValueAddress();
1583 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
1589 HeapObjectIterator js_global_property_cell_iterator(property_cell_space_);
1590 for (HeapObject* heap_object = js_global_property_cell_iterator.Next();
1591 heap_object !=
NULL;
1592 heap_object = js_global_property_cell_iterator.Next()) {
1593 if (heap_object->IsPropertyCell()) {
1595 Address value_address = cell->ValueAddress();
1596 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
1597 Address type_address = cell->TypeAddress();
1598 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(type_address));
1604 if (collector->is_code_flushing_enabled()) {
1605 collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor);
1609 scavenge_visitor.VisitPointer(BitCast<Object**>(&native_contexts_list_));
1611 new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1613 while (
isolate()->global_handles()->IterateObjectGroups(
1614 &scavenge_visitor, &IsUnscavengedHeapObject)) {
1615 new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1621 &IsUnscavengedHeapObject);
1624 new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1627 &UpdateNewSpaceReferenceInExternalStringTableEntry);
1633 ScavengeWeakObjectRetainer weak_object_retainer(
this);
1636 ASSERT(new_space_front == new_space_.
top());
1648 LOG(isolate_, ResourceEvent(
"scavenge",
"end"));
1652 scavenges_since_last_idle_round_++;
1656 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
1660 if (!first_word.IsForwardingAddress()) {
1674 if (FLAG_verify_heap) {
1675 external_string_table_.Verify();
1679 if (external_string_table_.new_space_strings_.is_empty())
return;
1681 Object** start = &external_string_table_.new_space_strings_[0];
1682 Object** end = start + external_string_table_.new_space_strings_.length();
1685 for (
Object** p = start; p < end; ++p) {
1687 String* target = updater_func(
this, p);
1689 if (target ==
NULL)
continue;
1691 ASSERT(target->IsExternalString());
1699 external_string_table_.AddOldString(target);
1704 external_string_table_.ShrinkNewStrings(static_cast<int>(last - start));
1712 if (external_string_table_.old_space_strings_.length() > 0) {
1713 Object** start = &external_string_table_.old_space_strings_[0];
1714 Object** end = start + external_string_table_.old_space_strings_.length();
1715 for (
Object** p = start; p < end; ++p) *p = updater_func(
this, p);
1730 bool record_slots) {
1731 Object* undefined = heap->undefined_value();
1732 Object* head = undefined;
1735 while (list != undefined) {
1737 T* candidate =
reinterpret_cast<T*
>(list);
1739 if (retained !=
NULL) {
1740 if (head == undefined) {
1750 collector->RecordSlot(next_slot, next_slot, retained);
1754 ASSERT(!retained->IsUndefined());
1755 candidate =
reinterpret_cast<T*
>(retained);
1760 WeakListVisitor<T>::VisitLiveObject(
1761 heap, tail, retainer, record_slots);
1763 WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
1767 list = WeakListVisitor<T>::WeakNext(candidate);
1772 WeakListVisitor<T>::SetWeakNext(tail, undefined);
1779 static void ClearWeakList(Heap* heap,
1781 Object* undefined = heap->undefined_value();
1782 while (list != undefined) {
1783 T* candidate =
reinterpret_cast<T*
>(list);
1784 list = WeakListVisitor<T>::WeakNext(candidate);
1785 WeakListVisitor<T>::SetWeakNext(candidate, undefined);
1793 function->set_next_function_link(next);
1797 return function->next_function_link();
1816 code->set_next_code_link(next);
1820 return code->next_code_link();
1851 bool record_slots) {
1853 DoWeakList<JSFunction>(heap, context, retainer, record_slots,
1855 DoWeakList<Code>(heap, context, retainer, record_slots,
1857 DoWeakList<Code>(heap, context, retainer, record_slots,
1868 Object* list_head = VisitWeakList<T>(heap, context->
get(index), retainer,
1878 heap->mark_compact_collector()->RecordSlot(
1879 head_slot, head_slot, list_head);
1884 ClearWeakList<JSFunction>(heap,
1904 ProcessArrayBuffers(retainer, record_slots);
1905 ProcessNativeContexts(retainer, record_slots);
1908 ProcessAllocationSites(retainer, record_slots);
1912 bool record_slots) {
1914 VisitWeakList<Context>(
1917 native_contexts_list_ = head;
1924 obj->set_weak_next(next);
1928 return obj->weak_next();
1934 bool record_slots) {}
1947 obj->set_weak_next(next);
1951 return obj->weak_next();
1957 bool record_slots) {
1958 Object* typed_array_obj =
1959 VisitWeakList<JSArrayBufferView>(
1961 array_buffer->weak_first_view(),
1962 retainer, record_slots);
1963 array_buffer->set_weak_first_view(typed_array_obj);
1964 if (typed_array_obj != heap->undefined_value() && record_slots) {
1967 heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj);
1981 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer,
1982 bool record_slots) {
1983 Object* array_buffer_obj =
1984 VisitWeakList<JSArrayBuffer>(
this,
1986 retainer, record_slots);
1991 void Heap::TearDownArrayBuffers() {
1992 Object* undefined = undefined_value();
1996 o = buffer->weak_next();
1998 array_buffers_list_ = undefined;
2005 obj->set_weak_next(next);
2009 return obj->weak_next();
2015 bool record_slots) {}
2025 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer,
2026 bool record_slots) {
2027 Object* allocation_site_obj =
2028 VisitWeakList<AllocationSite>(
this,
2030 retainer, record_slots);
2038 bool marked =
false;
2039 while (cur->IsAllocationSite()) {
2041 if (casted->GetPretenureMode() ==
flag) {
2042 casted->ResetPretenureDecision();
2043 casted->set_deopt_dependent_code(
true);
2046 cur = casted->weak_next();
2052 void Heap::EvaluateOldSpaceLocalPretenuring(
2053 uint64_t size_of_objects_before_gc) {
2055 double old_generation_survival_rate =
2056 (
static_cast<double>(size_of_objects_after_gc) * 100) /
2057 static_cast<double>(size_of_objects_before_gc);
2059 if (old_generation_survival_rate < kOldSurvivalRateLowThreshold) {
2064 ResetAllAllocationSitesDependentCode(
TENURED);
2065 if (FLAG_trace_pretenuring) {
2066 PrintF(
"Deopt all allocation sites dependent code due to low survival "
2067 "rate in the old generation %f\n", old_generation_survival_rate);
2077 class ExternalStringTableVisitorAdapter :
public ObjectVisitor {
2079 explicit ExternalStringTableVisitorAdapter(
2081 virtual void VisitPointers(
Object** start,
Object** end) {
2082 for (
Object** p = start; p < end; p++) {
2083 ASSERT((*p)->IsExternalString());
2090 } external_string_table_visitor(visitor);
2092 external_string_table_.
Iterate(&external_string_table_visitor);
2102 reinterpret_cast<HeapObject*>(
object));
2107 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
2114 while (new_space_front != new_space_.
top()) {
2118 NewSpaceScavenger::IterateBody(object->map(), object);
2127 StoreBufferRebuildScope scope(
this,
2129 &ScavengeStoreBufferCallback);
2139 ASSERT(!target->IsMap());
2141 target->address() +
size,
2148 }
while (new_space_front != new_space_.
top());
2150 return new_space_front;
2158 INLINE(
static HeapObject* EnsureDoubleAligned(Heap* heap,
2162 static HeapObject* EnsureDoubleAligned(Heap* heap,
2166 heap->CreateFillerObjectAt(object->address(),
kPointerSize);
2169 heap->CreateFillerObjectAt(object->address() + size -
kPointerSize,
2190 table_.
Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString);
2191 table_.
Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
2192 table_.
Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
2193 table_.
Register(kVisitByteArray, &EvacuateByteArray);
2194 table_.
Register(kVisitFixedArray, &EvacuateFixedArray);
2195 table_.
Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
2196 table_.
Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
2197 table_.
Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
2199 table_.
Register(kVisitNativeContext,
2200 &ObjectEvacuationStrategy<POINTER_OBJECT>::
2201 template VisitSpecialized<Context::kSize>);
2204 &ObjectEvacuationStrategy<POINTER_OBJECT>::
2205 template VisitSpecialized<ConsString::kSize>);
2207 table_.
Register(kVisitSlicedString,
2208 &ObjectEvacuationStrategy<POINTER_OBJECT>::
2209 template VisitSpecialized<SlicedString::kSize>);
2212 &ObjectEvacuationStrategy<POINTER_OBJECT>::
2213 template VisitSpecialized<Symbol::kSize>);
2215 table_.
Register(kVisitSharedFunctionInfo,
2216 &ObjectEvacuationStrategy<POINTER_OBJECT>::
2217 template VisitSpecialized<SharedFunctionInfo::kSize>);
2220 &ObjectEvacuationStrategy<POINTER_OBJECT>::
2224 &ObjectEvacuationStrategy<POINTER_OBJECT>::
2227 table_.
Register(kVisitJSArrayBuffer,
2228 &ObjectEvacuationStrategy<POINTER_OBJECT>::
2231 table_.
Register(kVisitJSTypedArray,
2232 &ObjectEvacuationStrategy<POINTER_OBJECT>::
2236 &ObjectEvacuationStrategy<POINTER_OBJECT>::
2240 &ObjectEvacuationStrategy<POINTER_OBJECT>::
2245 &ObjectEvacuationStrategy<POINTER_OBJECT>::
2246 template VisitSpecialized<JSFunction::kSize>);
2248 table_.
Register(kVisitJSFunction, &EvacuateJSFunction);
2253 kVisitDataObjectGeneric>();
2257 kVisitJSObjectGeneric>();
2261 kVisitStructGeneric>();
2269 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
2271 static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
2272 bool should_record =
false;
2274 should_record = FLAG_heap_stats;
2276 should_record = should_record || FLAG_log_gc;
2277 if (should_record) {
2278 if (heap->new_space()->Contains(obj)) {
2279 heap->new_space()->RecordAllocation(obj);
2281 heap->new_space()->RecordPromotion(obj);
2289 INLINE(
static void MigrateObject(Heap* heap,
2294 heap->CopyBlock(target->address(), source->address(),
size);
2297 source->set_map_word(MapWord::FromForwardingAddress(target));
2301 RecordCopiedObject(heap, target);
2302 Isolate* isolate = heap->isolate();
2303 HeapProfiler* heap_profiler = isolate->heap_profiler();
2304 if (heap_profiler->is_tracking_object_moves()) {
2305 heap_profiler->ObjectMoveEvent(source->address(), target->address(),
2308 if (isolate->logger()->is_logging_code_events() ||
2309 isolate->cpu_profiler()->is_profiling()) {
2310 if (target->IsSharedFunctionInfo()) {
2311 PROFILE(isolate, SharedFunctionInfoMoveEvent(
2312 source->address(), target->address()));
2318 if (Marking::TransferColor(source, target)) {
2325 template<ObjectContents
object_contents,
int alignment>
2326 static inline void EvacuateObject(Map*
map,
2333 int allocation_size = object_size;
2339 Heap* heap = map->GetHeap();
2340 if (heap->ShouldBePromoted(object->address(), object_size)) {
2341 MaybeObject* maybe_result;
2343 if (object_contents == DATA_OBJECT) {
2345 maybe_result = heap->old_data_space()->AllocateRaw(allocation_size);
2348 maybe_result = heap->old_pointer_space()->AllocateRaw(allocation_size);
2352 if (maybe_result->ToObject(&result)) {
2356 target = EnsureDoubleAligned(heap, target, allocation_size);
2363 MigrateObject(heap,
object, target, object_size);
2365 if (object_contents == POINTER_OBJECT) {
2367 heap->promotion_queue()->insert(
2370 heap->promotion_queue()->insert(target, object_size);
2374 heap->tracer()->increment_promoted_objects_size(object_size);
2379 MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size);
2380 heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
2381 Object* result = allocation->ToObjectUnchecked();
2385 target = EnsureDoubleAligned(heap, target, allocation_size);
2392 MigrateObject(heap,
object, target, object_size);
2397 static inline void EvacuateJSFunction(Map* map,
2399 HeapObject*
object) {
2400 ObjectEvacuationStrategy<POINTER_OBJECT>::
2401 template VisitSpecialized<JSFunction::kSize>(
map, slot, object);
2403 HeapObject* target = *slot;
2404 MarkBit mark_bit = Marking::MarkBitFrom(target);
2405 if (Marking::IsBlack(mark_bit)) {
2413 map->GetHeap()->mark_compact_collector()->
2414 RecordCodeEntrySlot(code_entry_slot, code);
2419 static inline void EvacuateFixedArray(Map* map,
2421 HeapObject*
object) {
2423 EvacuateObject<POINTER_OBJECT, kObjectAlignment>(
2424 map, slot, object, object_size);
2428 static inline void EvacuateFixedDoubleArray(Map* map,
2430 HeapObject*
object) {
2431 int length =
reinterpret_cast<FixedDoubleArray*
>(object)->length();
2433 EvacuateObject<DATA_OBJECT, kDoubleAlignment>(
2434 map, slot, object, object_size);
2438 static inline void EvacuateFixedTypedArray(Map* map,
2440 HeapObject*
object) {
2441 int object_size =
reinterpret_cast<FixedTypedArrayBase*
>(object)->
size();
2442 EvacuateObject<DATA_OBJECT, kObjectAlignment>(
2443 map, slot, object, object_size);
2447 static inline void EvacuateFixedFloat64Array(Map* map,
2449 HeapObject*
object) {
2450 int object_size =
reinterpret_cast<FixedFloat64Array*
>(object)->
size();
2451 EvacuateObject<DATA_OBJECT, kDoubleAlignment>(
2452 map, slot, object, object_size);
2456 static inline void EvacuateByteArray(Map* map,
2458 HeapObject*
object) {
2459 int object_size =
reinterpret_cast<ByteArray*
>(object)->ByteArraySize();
2460 EvacuateObject<DATA_OBJECT, kObjectAlignment>(
2461 map, slot, object, object_size);
2465 static inline void EvacuateSeqOneByteString(Map* map,
2467 HeapObject*
object) {
2469 SeqOneByteStringSize(map->instance_type());
2470 EvacuateObject<DATA_OBJECT, kObjectAlignment>(
2471 map, slot, object, object_size);
2475 static inline void EvacuateSeqTwoByteString(Map* map,
2477 HeapObject*
object) {
2479 SeqTwoByteStringSize(map->instance_type());
2480 EvacuateObject<DATA_OBJECT, kObjectAlignment>(
2481 map, slot, object, object_size);
2485 static inline bool IsShortcutCandidate(
int type) {
2489 static inline void EvacuateShortcutCandidate(Map* map,
2491 HeapObject*
object) {
2492 ASSERT(IsShortcutCandidate(map->instance_type()));
2494 Heap* heap = map->GetHeap();
2498 heap->empty_string()) {
2504 if (!heap->InNewSpace(first)) {
2505 object->set_map_word(MapWord::FromForwardingAddress(first));
2509 MapWord first_word = first->map_word();
2510 if (first_word.IsForwardingAddress()) {
2511 HeapObject* target = first_word.ToForwardingAddress();
2514 object->set_map_word(MapWord::FromForwardingAddress(target));
2518 heap->DoScavengeObject(first->map(), slot, first);
2519 object->set_map_word(MapWord::FromForwardingAddress(*slot));
2524 EvacuateObject<POINTER_OBJECT, kObjectAlignment>(
2525 map, slot, object, object_size);
2528 template<ObjectContents
object_contents>
2529 class ObjectEvacuationStrategy {
2531 template<
int object_size>
2532 static inline void VisitSpecialized(Map* map,
2534 HeapObject*
object) {
2535 EvacuateObject<object_contents, kObjectAlignment>(
2536 map, slot, object, object_size);
2539 static inline void Visit(Map* map,
2541 HeapObject*
object) {
2542 int object_size = map->instance_size();
2543 EvacuateObject<object_contents, kObjectAlignment>(
2544 map, slot, object, object_size);
2548 static VisitorDispatchTable<ScavengingCallback> table_;
2554 VisitorDispatchTable<ScavengingCallback>
2555 ScavengingVisitor<marks_handling, logging_and_profiling_mode>::table_;
2558 static void InitializeScavengingVisitorsTables() {
2568 void Heap::SelectScavengingVisitorsTable() {
2569 bool logging_and_profiling =
2576 if (!logging_and_profiling) {
2577 scavenging_visitors_table_.
CopyFrom(
2581 scavenging_visitors_table_.
CopyFrom(
2586 if (!logging_and_profiling) {
2587 scavenging_visitors_table_.
CopyFrom(
2591 scavenging_visitors_table_.
CopyFrom(
2601 scavenging_visitors_table_.
Register(
2602 StaticVisitorBase::kVisitShortcutCandidate,
2604 StaticVisitorBase::kVisitConsString));
2610 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject*
object) {
2611 SLOW_ASSERT(object->GetIsolate()->heap()->InFromSpace(
object));
2612 MapWord first_word =
object->map_word();
2614 Map* map = first_word.ToMap();
2615 map->GetHeap()->DoScavengeObject(map, p,
object);
2620 int instance_size) {
2623 if (!maybe_result->ToObject(&result))
return maybe_result;
2626 reinterpret_cast<Map*
>(result)->set_map(raw_unchecked_meta_map());
2627 reinterpret_cast<Map*
>(result)->set_instance_type(instance_type);
2628 reinterpret_cast<Map*
>(result)->set_instance_size(instance_size);
2629 reinterpret_cast<Map*
>(result)->set_visitor_id(
2631 reinterpret_cast<Map*
>(result)->set_inobject_properties(0);
2632 reinterpret_cast<Map*
>(result)->set_pre_allocated_property_fields(0);
2633 reinterpret_cast<Map*
>(result)->set_unused_property_fields(0);
2634 reinterpret_cast<Map*
>(result)->set_bit_field(0);
2635 reinterpret_cast<Map*
>(result)->set_bit_field2(0);
2638 reinterpret_cast<Map*
>(result)->set_bit_field3(bit_field3);
2648 if (!maybe_result->To(&result))
return maybe_result;
2650 Map* map =
reinterpret_cast<Map*
>(result);
2665 map->set_instance_descriptors(empty_descriptor_array());
2680 if (!maybe_code_cache->To(&code_cache))
return maybe_code_cache;
2696 if (!maybe_accessors->To(&accessors))
return maybe_accessors;
2708 if (!maybe_info->To(&info))
return maybe_info;
2719 if (!maybe_entry->To(&entry))
return maybe_entry;
2726 const Heap::StringTypeTable Heap::string_type_table[] = {
2727 #define STRING_TYPE_ELEMENT(type, size, name, camel_name) \
2728 {type, size, k##camel_name##MapRootIndex},
2730 #undef STRING_TYPE_ELEMENT
2734 const Heap::ConstantStringTable Heap::constant_string_table[] = {
2735 #define CONSTANT_STRING_ELEMENT(name, contents) \
2736 {contents, k##name##RootIndex},
2738 #undef CONSTANT_STRING_ELEMENT
2742 const Heap::StructTable Heap::struct_table[] = {
2743 #define STRUCT_TABLE_ELEMENT(NAME, Name, name) \
2744 { NAME##_TYPE, Name::kSize, k##Name##MapRootIndex },
2746 #undef STRUCT_TABLE_ELEMENT
2750 bool Heap::CreateInitialMaps() {
2753 if (!maybe_obj->ToObject(&obj))
return false;
2756 Map* new_meta_map =
reinterpret_cast<Map*
>(
obj);
2757 set_meta_map(new_meta_map);
2758 new_meta_map->set_map(new_meta_map);
2760 { MaybeObject* maybe_obj =
2762 if (!maybe_obj->ToObject(&obj))
return false;
2767 if (!maybe_obj->ToObject(&obj))
return false;
2771 { MaybeObject* maybe_obj =
2773 if (!maybe_obj->ToObject(&obj))
return false;
2775 set_constant_pool_array_map(
Map::cast(obj));
2778 { MaybeObject* maybe_obj = AllocateEmptyFixedArray();
2779 if (!maybe_obj->ToObject(&obj))
return false;
2784 if (!maybe_obj->ToObject(&obj))
return false;
2790 if (!maybe_obj->ToObject(&obj))
return false;
2797 { MaybeObject* maybe_obj = AllocateEmptyFixedArray();
2798 if (!maybe_obj->ToObject(&obj))
return false;
2803 { MaybeObject* maybe_obj = AllocateEmptyConstantPoolArray();
2804 if (!maybe_obj->ToObject(&obj))
return false;
2809 meta_map()->set_code_cache(empty_fixed_array());
2811 meta_map()->init_back_pointer(undefined_value());
2812 meta_map()->set_instance_descriptors(empty_descriptor_array());
2814 fixed_array_map()->set_code_cache(empty_fixed_array());
2815 fixed_array_map()->set_dependent_code(
2817 fixed_array_map()->init_back_pointer(undefined_value());
2818 fixed_array_map()->set_instance_descriptors(empty_descriptor_array());
2820 oddball_map()->set_code_cache(empty_fixed_array());
2822 oddball_map()->init_back_pointer(undefined_value());
2823 oddball_map()->set_instance_descriptors(empty_descriptor_array());
2825 constant_pool_array_map()->set_code_cache(empty_fixed_array());
2826 constant_pool_array_map()->set_dependent_code(
2828 constant_pool_array_map()->init_back_pointer(undefined_value());
2829 constant_pool_array_map()->set_instance_descriptors(empty_descriptor_array());
2832 meta_map()->set_prototype(null_value());
2833 meta_map()->set_constructor(null_value());
2835 fixed_array_map()->set_prototype(null_value());
2836 fixed_array_map()->set_constructor(null_value());
2838 oddball_map()->set_prototype(null_value());
2839 oddball_map()->set_constructor(null_value());
2841 constant_pool_array_map()->set_prototype(null_value());
2842 constant_pool_array_map()->set_constructor(null_value());
2845 #define ALLOCATE_MAP(instance_type, size, field_name) \
2847 if (!AllocateMap((instance_type), size)->To(&map)) return false; \
2848 set_##field_name##_map(map); \
2851 #define ALLOCATE_VARSIZE_MAP(instance_type, field_name) \
2852 ALLOCATE_MAP(instance_type, kVariableSizeSentinel, field_name)
2855 ASSERT(fixed_array_map() != fixed_cow_array_map());
2862 for (
unsigned i = 0; i <
ARRAY_SIZE(string_type_table); i++) {
2863 const StringTypeTable& entry = string_type_table[i];
2864 { MaybeObject* maybe_obj =
AllocateMap(entry.type, entry.size);
2865 if (!maybe_obj->ToObject(&obj))
return false;
2871 undetectable_string_map()->set_is_undetectable();
2874 undetectable_ascii_string_map()->set_is_undetectable();
2880 #define ALLOCATE_EXTERNAL_ARRAY_MAP(Type, type, TYPE, ctype, size) \
2881 ALLOCATE_MAP(EXTERNAL_##TYPE##_ARRAY_TYPE, ExternalArray::kAlignedSize, \
2882 external_##type##_array)
2885 #undef ALLOCATE_EXTERNAL_ARRAY_MAP
2887 #define ALLOCATE_FIXED_TYPED_ARRAY_MAP(Type, type, TYPE, ctype, size) \
2888 ALLOCATE_VARSIZE_MAP(FIXED_##TYPE##_ARRAY_TYPE, \
2889 fixed_##type##_array)
2892 #undef ALLOCATE_FIXED_TYPED_ARRAY_MAP
2901 ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler)
2904 for (
unsigned i = 0; i <
ARRAY_SIZE(struct_table); i++) {
2905 const StructTable& entry = struct_table[i];
2907 if (!
AllocateMap(entry.type, entry.size)->To(&map))
2909 roots_[entry.index] =
map;
2922 native_context_map()->set_dictionary_map(
true);
2923 native_context_map()->set_visitor_id(
2924 StaticVisitorBase::kVisitNativeContext);
2927 shared_function_info)
2933 external_map()->set_is_extensible(
false);
2934 #undef ALLOCATE_VARSIZE_MAP
2941 set_empty_byte_array(byte_array);
2944 #define ALLOCATE_EMPTY_EXTERNAL_ARRAY(Type, type, TYPE, ctype, size) \
2945 { ExternalArray* obj; \
2946 if (!AllocateEmptyExternalArray(kExternal##Type##Array)->To(&obj)) \
2948 set_empty_external_##type##_array(obj); \
2952 #undef ALLOCATE_EMPTY_EXTERNAL_ARRAY
2954 #define ALLOCATE_EMPTY_FIXED_TYPED_ARRAY(Type, type, TYPE, ctype, size) \
2955 { FixedTypedArrayBase* obj; \
2956 if (!AllocateEmptyFixedTypedArray(kExternal##Type##Array)->To(&obj)) \
2958 set_empty_fixed_##type##_array(obj); \
2962 #undef ALLOCATE_EMPTY_FIXED_TYPED_ARRAY
2979 if (!maybe_result->ToObject(&result))
return maybe_result;
2988 MaybeObject* Heap::AllocateCell(
Object* value) {
2994 if (!maybe_result->ToObject(&result))
return maybe_result;
3002 MaybeObject* Heap::AllocatePropertyCell() {
3007 MaybeObject* maybe_result =
3009 if (!maybe_result->ToObject(&result))
return maybe_result;
3012 global_property_cell_map());
3016 cell->set_value(the_hole_value());
3025 if (!maybe_result->To(&result))
return maybe_result;
3026 result->set_value(value);
3033 MaybeObject* maybe_result =
Allocate(allocation_site_map(),
3035 if (!maybe_result->To(&site))
return maybe_result;
3045 MaybeObject* Heap::CreateOddball(
const char* to_string,
3050 if (!maybe_result->ToObject(&result))
return maybe_result;
3060 if (!maybe_obj->ToObject(&obj))
return false;
3068 set_neander_map(new_neander_map);
3071 if (!maybe_obj->ToObject(&obj))
return false;
3075 if (!maybe_elements->ToObject(&elements))
return false;
3085 void Heap::CreateJSEntryStub() {
3087 set_js_entry_code(*stub.GetCode(isolate()));
3091 void Heap::CreateJSConstructEntryStub() {
3092 JSConstructEntryStub stub;
3093 set_js_construct_entry_code(*stub.GetCode(isolate()));
3097 void Heap::CreateFixedStubs() {
3108 CodeStub::GenerateStubsAheadOfTime(
isolate());
3122 Heap::CreateJSEntryStub();
3123 Heap::CreateJSConstructEntryStub();
3127 bool Heap::CreateInitialObjects() {
3132 if (!maybe_obj->ToObject(&obj))
return false;
3138 if (!maybe_obj->ToObject(&obj))
return false;
3143 if (!maybe_obj->ToObject(&obj))
return false;
3149 set_the_hole_value(reinterpret_cast<Oddball*>(
Smi::FromInt(0)));
3152 { MaybeObject* maybe_obj =
3154 if (!maybe_obj->ToObject(&obj))
return false;
3160 { MaybeObject* maybe_obj =
3161 undefined_value()->Initialize(
this,
3165 if (!maybe_obj->ToObject(&obj))
return false;
3169 { MaybeObject* maybe_obj = null_value()->Initialize(
3171 if (!maybe_obj->ToObject(&obj))
return false;
3174 { MaybeObject* maybe_obj = CreateOddball(
"true",
3177 if (!maybe_obj->ToObject(&obj))
return false;
3181 { MaybeObject* maybe_obj = CreateOddball(
"false",
3184 if (!maybe_obj->ToObject(&obj))
return false;
3188 { MaybeObject* maybe_obj = CreateOddball(
"hole",
3191 if (!maybe_obj->ToObject(&obj))
return false;
3195 { MaybeObject* maybe_obj = CreateOddball(
"uninitialized",
3198 if (!maybe_obj->ToObject(&obj))
return false;
3202 { MaybeObject* maybe_obj = CreateOddball(
"arguments_marker",
3205 if (!maybe_obj->ToObject(&obj))
return false;
3209 { MaybeObject* maybe_obj = CreateOddball(
"no_interceptor_result_sentinel",
3212 if (!maybe_obj->ToObject(&obj))
return false;
3214 set_no_interceptor_result_sentinel(obj);
3216 { MaybeObject* maybe_obj = CreateOddball(
"termination_exception",
3219 if (!maybe_obj->ToObject(&obj))
return false;
3221 set_termination_exception(obj);
3223 for (
unsigned i = 0; i <
ARRAY_SIZE(constant_string_table); i++) {
3224 { MaybeObject* maybe_obj =
3226 if (!maybe_obj->ToObject(&obj))
return false;
3228 roots_[constant_string_table[i].index] =
String::cast(obj);
3239 if (!maybe_obj->ToObject(&obj))
return false;
3246 if (!maybe_obj->ToObject(&obj))
return false;
3254 if (!maybe_obj->ToObject(&obj))
return false;
3259 if (!maybe_obj->ToObject(&obj))
return false;
3270 { MaybeObject* maybe_obj =
3272 if (!maybe_obj->ToObject(&obj))
return false;
3276 if (!maybe_obj->ToObject(&obj))
return false;
3280 { MaybeObject* maybe_obj = AllocateInitialNumberStringCache();
3281 if (!maybe_obj->ToObject(&obj))
return false;
3286 { MaybeObject* maybe_obj =
3288 if (!maybe_obj->ToObject(&obj))
return false;
3295 if (!maybe_obj->ToObject(&obj))
return false;
3301 if (!maybe_obj->ToObject(&obj))
return false;
3307 if (!maybe_obj->ToObject(&obj))
return false;
3311 { MaybeObject* maybe_obj = AllocateCell(undefined_value());
3312 if (!maybe_obj->ToObject(&obj))
return false;
3317 set_symbol_registry(undefined_value());
3321 if (!maybe_obj->ToObject(&obj))
return false;
3324 if (!maybe_obj->ToObject(&obj))
return false;
3330 if (!maybe_obj->ToObject(&obj))
return false;
3333 if (!maybe_obj->ToObject(&obj))
return false;
3338 if (!maybe_obj->ToObject(&obj))
return false;
3344 if (!maybe_obj->ToObject(&obj))
return false;
3350 if (!maybe_obj->ToObject(&obj))
return false;
3356 if (!maybe_obj->ToObject(&obj))
return false;
3362 if (!maybe_obj->ToObject(&obj))
return false;
3368 if (!maybe_obj->ToObject(&obj))
return false;
3374 if (!maybe_obj->ToObject(&obj))
return false;
3380 if (!maybe_obj->ToObject(&obj))
return false;
3387 { MaybeObject* maybe_obj = AllocateAllocationSitesScratchpad();
3388 if (!maybe_obj->ToObject(&obj))
return false;
3391 InitializeAllocationSitesScratchpad();
3411 kStoreBufferTopRootIndex,
3412 kStackLimitRootIndex,
3413 kNumberStringCacheRootIndex,
3414 kInstanceofCacheFunctionRootIndex,
3415 kInstanceofCacheMapRootIndex,
3416 kInstanceofCacheAnswerRootIndex,
3417 kCodeStubsRootIndex,
3418 kNonMonomorphicCacheRootIndex,
3419 kPolymorphicCodeCacheRootIndex,
3420 kLastScriptIdRootIndex,
3421 kEmptyScriptRootIndex,
3422 kRealStackLimitRootIndex,
3423 kArgumentsAdaptorDeoptPCOffsetRootIndex,
3424 kConstructStubDeoptPCOffsetRootIndex,
3425 kGetterStubDeoptPCOffsetRootIndex,
3426 kSetterStubDeoptPCOffsetRootIndex,
3430 for (
unsigned int i = 0; i <
ARRAY_SIZE(writable_roots); i++) {
3431 if (root_index == writable_roots[i])
3449 if (!key_string->IsInternalizedString())
return Smi::FromInt(0);
3451 ASSERT(key_pattern->IsString());
3452 if (!key_pattern->IsInternalizedString())
return Smi::FromInt(0);
3453 cache = heap->string_split_cache();
3456 ASSERT(key_pattern->IsFixedArray());
3457 cache = heap->regexp_multiple_cache();
3460 uint32_t hash = key_string->
Hash();
3462 ~(kArrayEntriesPerCacheEntry - 1));
3463 if (cache->
get(index + kStringOffset) == key_string &&
3464 cache->
get(index + kPatternOffset) == key_pattern) {
3465 return cache->
get(index + kArrayOffset);
3469 if (cache->
get(index + kStringOffset) == key_string &&
3470 cache->
get(index + kPatternOffset) == key_pattern) {
3471 return cache->
get(index + kArrayOffset);
3483 if (!key_string->IsInternalizedString())
return;
3485 ASSERT(key_pattern->IsString());
3486 if (!key_pattern->IsInternalizedString())
return;
3487 cache = heap->string_split_cache();
3490 ASSERT(key_pattern->IsFixedArray());
3491 cache = heap->regexp_multiple_cache();
3494 uint32_t hash = key_string->
Hash();
3496 ~(kArrayEntriesPerCacheEntry - 1));
3498 cache->
set(index + kStringOffset, key_string);
3499 cache->
set(index + kPatternOffset, key_pattern);
3500 cache->
set(index + kArrayOffset, value_array);
3505 cache->
set(index2 + kStringOffset, key_string);
3506 cache->
set(index2 + kPatternOffset, key_pattern);
3507 cache->
set(index2 + kArrayOffset, value_array);
3512 cache->
set(index + kStringOffset, key_string);
3513 cache->
set(index + kPatternOffset, key_pattern);
3514 cache->
set(index + kArrayOffset, value_array);
3520 for (
int i = 0; i < value_array->
length(); i++) {
3522 Object* internalized_str;
3524 if (maybe_string->ToObject(&internalized_str)) {
3525 value_array->
set(i, internalized_str);
3541 MaybeObject* Heap::AllocateInitialNumberStringCache() {
3542 MaybeObject* maybe_obj =
3548 int Heap::FullSizeNumberStringCacheLength() {
3552 int number_string_cache_size = max_semispace_size_ / 512;
3553 number_string_cache_size = Max(kInitialNumberStringCacheSize * 2,
3554 Min(0x4000, number_string_cache_size));
3557 return number_string_cache_size * 2;
3561 void Heap::AllocateFullSizeNumberStringCache() {
3566 MaybeObject* maybe_obj =
3569 if (maybe_obj->ToObject(&new_cache)) {
3579 void Heap::FlushNumberStringCache() {
3581 int len = number_string_cache()->length();
3582 for (
int i = 0; i < len; i++) {
3583 number_string_cache()->set_undefined(i);
3588 static inline int double_get_hash(
double d) {
3589 DoubleRepresentation rep(d);
3590 return static_cast<int>(rep.bits) ^ static_cast<int>(rep.bits >> 32);
3594 static inline int smi_get_hash(Smi* smi) {
3595 return smi->value();
3601 int mask = (number_string_cache()->length() >> 1) - 1;
3602 if (number->IsSmi()) {
3603 hash = smi_get_hash(
Smi::cast(number)) & mask;
3605 hash = double_get_hash(number->
Number()) & mask;
3607 Object* key = number_string_cache()->get(hash * 2);
3608 if (key == number) {
3609 return String::cast(number_string_cache()->
get(hash * 2 + 1));
3610 }
else if (key->IsHeapNumber() &&
3611 number->IsHeapNumber() &&
3613 return String::cast(number_string_cache()->
get(hash * 2 + 1));
3615 return undefined_value();
3621 int mask = (number_string_cache()->length() >> 1) - 1;
3622 if (number->IsSmi()) {
3623 hash = smi_get_hash(
Smi::cast(number)) & mask;
3625 hash = double_get_hash(number->
Number()) & mask;
3627 if (number_string_cache()->get(hash * 2) != undefined_value() &&
3628 number_string_cache()->length() != FullSizeNumberStringCacheLength()) {
3631 AllocateFullSizeNumberStringCache();
3634 number_string_cache()->set(hash * 2, number);
3635 number_string_cache()->set(hash * 2 + 1,
string);
3640 bool check_number_string_cache) {
3641 isolate_->
counters()->number_to_string_runtime()->Increment();
3642 if (check_number_string_cache) {
3644 if (cached != undefined_value()) {
3652 if (number->IsSmi()) {
3664 MaybeObject* maybe_js_string =
3666 if (maybe_js_string->ToObject(&js_string)) {
3669 return maybe_js_string;
3674 bool check_number_string_cache) {
3677 if (!maybe->To<
Object>(&number))
return maybe;
3682 MaybeObject* Heap::AllocateAllocationSitesScratchpad() {
3683 MaybeObject* maybe_obj =
3689 void Heap::FlushAllocationSitesScratchpad() {
3690 for (
int i = 0; i < allocation_sites_scratchpad_length_; i++) {
3691 allocation_sites_scratchpad()->set_undefined(i);
3693 allocation_sites_scratchpad_length_ = 0;
3697 void Heap::InitializeAllocationSitesScratchpad() {
3698 ASSERT(allocation_sites_scratchpad()->length() ==
3699 kAllocationSiteScratchpadSize);
3700 for (
int i = 0; i < kAllocationSiteScratchpadSize; i++) {
3701 allocation_sites_scratchpad()->set_undefined(i);
3706 void Heap::AddAllocationSiteToScratchpad(AllocationSite* site,
3707 ScratchpadSlotMode
mode) {
3708 if (allocation_sites_scratchpad_length_ < kAllocationSiteScratchpadSize) {
3712 allocation_sites_scratchpad()->set(
3714 Object** slot = allocation_sites_scratchpad()->RawFieldOfElementAt(
3715 allocation_sites_scratchpad_length_);
3725 allocation_sites_scratchpad_length_++;
3737 switch (array_type) {
3738 #define ARRAY_TYPE_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \
3739 case kExternal##Type##Array: \
3740 return kExternal##Type##ArrayMapRootIndex;
3743 #undef ARRAY_TYPE_TO_ROOT_INDEX
3747 return kUndefinedValueRootIndex;
3759 switch (array_type) {
3760 #define ARRAY_TYPE_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \
3761 case kExternal##Type##Array: \
3762 return kFixed##Type##ArrayMapRootIndex;
3765 #undef ARRAY_TYPE_TO_ROOT_INDEX
3769 return kUndefinedValueRootIndex;
3776 switch (elementsKind) {
3777 #define ELEMENT_KIND_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \
3778 case EXTERNAL_##TYPE##_ELEMENTS: \
3779 return kEmptyExternal##Type##ArrayRootIndex;
3782 #undef ELEMENT_KIND_TO_ROOT_INDEX
3786 return kUndefinedValueRootIndex;
3793 switch (elementsKind) {
3794 #define ELEMENT_KIND_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \
3795 case TYPE##_ELEMENTS: \
3796 return kEmptyFixed##Type##ArrayRootIndex;
3799 #undef ELEMENT_KIND_TO_ROOT_INDEX
3802 return kUndefinedValueRootIndex;
3823 if (IsMinusZero(value)) {
3827 int int_value =
FastD2I(value);
3842 MaybeObject* maybe_result =
Allocate(foreign_map(), space);
3843 if (!maybe_result->To(&result))
return maybe_result;
3855 share->set_name(name);
3857 share->set_code(illegal);
3860 Code* construct_stub =
3862 share->set_construct_stub(construct_stub);
3863 share->set_instance_class_name(Object_string());
3896 if (!maybe_result->ToObject(&result))
return maybe_result;
3902 message->set_type(type);
3903 message->set_arguments(arguments);
3906 message->set_script(script);
3907 message->set_stack_frames(stack_frames);
3914 size_t length = resource->
length();
3919 Map* map = external_ascii_string_map();
3922 if (!maybe_result->ToObject(&result))
return maybe_result;
3926 external_string->
set_length(static_cast<int>(length));
3936 size_t length = resource->
length();
3943 static const size_t kOneByteCheckLengthLimit = 32;
3944 bool is_one_byte = length <= kOneByteCheckLengthLimit &&
3946 Map* map = is_one_byte ?
3947 external_string_with_one_byte_data_map() : external_string_map();
3950 if (!maybe_result->ToObject(&result))
return maybe_result;
3954 external_string->
set_length(static_cast<int>(length));
3964 Object* value = single_character_string_cache()->get(code);
3965 if (value != undefined_value())
return value;
3968 buffer[0] =
static_cast<uint8_t
>(
code);
3973 if (!maybe_result->ToObject(&result))
return maybe_result;
3974 single_character_string_cache()->set(code, result);
3995 if (!maybe_result->ToObject(&result))
return maybe_result;
3998 reinterpret_cast<ByteArray*
>(result)->set_map_no_write_barrier(
4000 reinterpret_cast<ByteArray*
>(result)->set_length(length);
4006 if (size == 0)
return;
4008 if (size == kPointerSize) {
4010 }
else if (size == 2 * kPointerSize) {
4020 Address address =
object->address();
4028 return (!is_in_old_pointer_space && !is_in_old_data_space) ||
4036 Marking::IsBlack(Marking::MarkBitFrom(address))) {
4048 void* external_pointer,
4054 if (!maybe_result->ToObject(&result))
return maybe_result;
4057 reinterpret_cast<ExternalArray*
>(result)->set_map_no_write_barrier(
4059 reinterpret_cast<ExternalArray*
>(result)->set_length(length);
4060 reinterpret_cast<ExternalArray*
>(result)->set_external_pointer(
4069 switch (array_type) {
4070 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
4071 case kExternal##Type##Array: \
4072 *element_size = size; \
4073 *element_kind = TYPE##_ELEMENTS; \
4077 #undef TYPED_ARRAY_CASE
4092 ForFixedTypedArray(array_type, &element_size, &elements_kind);
4095 #ifndef V8_HOST_ARCH_64_BIT
4104 if (!maybe_object->To(&
object))
return maybe_object;
4107 object = EnsureDoubleAligned(
this,
object, size);
4124 int prologue_offset) {
4129 if (!maybe_reloc_info->To(&reloc_info))
return maybe_reloc_info;
4132 if (FLAG_enable_ool_constant_pool) {
4134 if (!maybe_constant_pool->To(&constant_pool))
return maybe_constant_pool;
4136 constant_pool = empty_constant_pool_array();
4143 MaybeObject* maybe_result;
4148 if (force_lo_space) {
4153 if (!maybe_result->To<
HeapObject>(&result))
return maybe_result;
4155 if (immovable && !force_lo_space &&
4162 if (!maybe_result->To<
HeapObject>(&result))
return maybe_result;
4171 code->set_relocation_info(reloc_info);
4177 code->set_raw_type_feedback_info(undefined_value());
4178 code->set_next_code_link(undefined_value());
4183 if (code->
kind() == Code::OPTIMIZED_FUNCTION) {
4187 if (FLAG_enable_ool_constant_pool) {
4192 #ifdef ENABLE_DEBUGGER_SUPPORT
4193 if (code->
kind() == Code::FUNCTION) {
4201 if (!self_reference.is_null()) {
4202 *(self_reference.location()) = code;
4212 if (FLAG_verify_heap) {
4221 MaybeObject* maybe_result;
4222 Object* new_constant_pool;
4223 if (FLAG_enable_ool_constant_pool &&
4228 if (!maybe_result->ToObject(&new_constant_pool))
return maybe_result;
4230 new_constant_pool = empty_constant_pool_array();
4234 int obj_size = code->
Size();
4242 if (!maybe_result->ToObject(&result))
return maybe_result;
4247 CopyBlock(new_addr, old_addr, obj_size);
4256 new_code->
Relocate(new_addr - old_addr);
4264 Object* reloc_info_array;
4265 { MaybeObject* maybe_reloc_info_array =
4267 if (!maybe_reloc_info_array->ToObject(&reloc_info_array)) {
4268 return maybe_reloc_info_array;
4271 Object* new_constant_pool;
4272 if (FLAG_enable_ool_constant_pool &&
4276 MaybeObject* maybe_constant_pool =
4278 if (!maybe_constant_pool->ToObject(&new_constant_pool))
4279 return maybe_constant_pool;
4281 new_constant_pool = empty_constant_pool_array();
4290 size_t relocation_offset =
4293 MaybeObject* maybe_result;
4301 if (!maybe_result->ToObject(&result))
return maybe_result;
4307 CopyBytes(new_addr, old_addr, relocation_offset);
4318 static_cast<size_t>(reloc_info.
length()));
4323 new_code->
Relocate(new_addr - old_addr);
4326 if (FLAG_verify_heap) {
4337 ASSERT(allocation_site->
map() == allocation_site_map());
4339 if (FLAG_allocation_site_pretenuring) {
4354 if (allocation_site !=
NULL) {
4358 MaybeObject* maybe_result =
AllocateRaw(size, space, retry_space);
4359 if (!maybe_result->ToObject(&result))
return maybe_result;
4362 if (allocation_site !=
NULL) {
4365 InitializeAllocationMemento(alloc_memento, allocation_site);
4371 void Heap::InitializeFunction(
JSFunction*
function,
4374 ASSERT(!prototype->IsMap());
4375 function->initialize_properties();
4376 function->initialize_elements();
4377 function->set_shared(shared);
4378 function->set_code(shared->code());
4379 function->set_prototype_or_initial_map(prototype);
4380 function->set_context(undefined_value());
4381 function->set_literals_or_bindings(empty_fixed_array());
4382 function->set_next_function_link(undefined_value());
4393 { MaybeObject* maybe_result =
Allocate(function_map, space);
4394 if (!maybe_result->ToObject(&result))
return maybe_result;
4406 int arguments_object_size;
4407 bool strict_mode_callee = callee->IsJSFunction() &&
4409 if (strict_mode_callee) {
4426 { MaybeObject* maybe_result =
4428 if (!maybe_result->ToObject(&result))
return maybe_result;
4443 if (!strict_mode_callee) {
4456 void Heap::InitializeJSObjectFromMap(
JSObject* obj,
4459 obj->set_properties(properties);
4475 if (map->constructor()->IsJSFunction() &&
4477 IsInobjectSlackTrackingInProgress()) {
4480 filler = Heap::one_pointer_filler_map();
4482 filler = Heap::undefined_value();
4491 bool allocate_properties,
4504 if (allocate_properties) {
4508 if (!maybe_properties->To(&properties))
return maybe_properties;
4511 properties = empty_fixed_array();
4518 MaybeObject* maybe_obj =
Allocate(map, space, allocation_site);
4519 if (!maybe_obj->To(&obj))
return maybe_obj;
4543 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject());
4553 if (!maybe_map->To(&map))
return maybe_map;
4557 if (!maybe_module->To(&module))
return maybe_module;
4558 module->set_context(context);
4559 module->set_scope_info(scope_info);
4570 MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure);
4572 if (!maybe_array->To(&array))
return maybe_array;
4576 ASSERT(capacity >= length);
4578 if (capacity == 0) {
4580 array->set_elements(empty_fixed_array());
4585 MaybeObject* maybe_elms =
NULL;
4602 if (!maybe_elms->To(&elms))
return maybe_elms;
4604 array->set_elements(elms);
4615 ASSERT(capacity >= length);
4617 if (capacity == 0) {
4619 array->set_elements(empty_fixed_array());
4624 MaybeObject* maybe_elms =
NULL;
4642 if (!maybe_elms->To(&elms))
return maybe_elms;
4644 array->set_elements(elms);
4655 MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure);
4657 if (!maybe_array->To(&array))
return maybe_array;
4659 array->set_elements(elements);
4672 if (!maybe_map_obj->To<
Map>(&map))
return maybe_map_obj;
4673 map->set_prototype(prototype);
4678 if (!maybe_result->To<
JSProxy>(&result))
return maybe_result;
4680 result->set_handler(handler);
4694 MaybeObject* maybe_map_obj =
4696 if (!maybe_map_obj->To<
Map>(&map))
return maybe_map_obj;
4697 map->set_prototype(prototype);
4704 result->set_handler(handler);
4706 result->set_call_trap(call_trap);
4707 result->set_construct_trap(construct_trap);
4718 Map* map = source->
map();
4729 { MaybeObject* maybe_clone =
4731 if (!maybe_clone->ToObject(&clone))
return maybe_clone;
4738 RecordWrites(clone_address,
4744 {
int adjusted_object_size = site !=
NULL
4747 MaybeObject* maybe_clone =
4749 if (!maybe_clone->ToObject(&clone))
return maybe_clone;
4760 reinterpret_cast<Address>(clone) + object_size);
4761 InitializeAllocationMemento(alloc_memento, site);
4770 if (elements->length() > 0) {
4772 { MaybeObject* maybe_elem;
4773 if (elements->map() == fixed_cow_array_map()) {
4780 if (!maybe_elem->ToObject(&elem))
return maybe_elem;
4785 if (properties->length() > 0) {
4788 if (!maybe_prop->ToObject(&prop))
return maybe_prop;
4805 if (!maybe->To<
Map>(&map))
return maybe;
4808 int size_difference =
object->map()->instance_size() - map->
instance_size();
4809 ASSERT(size_difference >= 0);
4811 map->set_prototype(object->
map()->prototype());
4817 if (!maybe->ToObject(&properties))
return maybe;
4826 if (!maybe->To<
String>(&name))
return maybe;
4835 object->set_map(map);
4846 isolate()->context()->native_context());
4850 if (size_difference > 0) {
4866 ASSERT(map->instance_size() ==
object->map()->instance_size());
4867 ASSERT(map->instance_type() ==
object->map()->instance_type());
4870 int prop_size = map->unused_property_fields() - map->inobject_properties();
4873 if (!maybe_properties->ToObject(&properties))
return maybe_properties;
4887 int length =
string.length();
4892 { MaybeObject* maybe_result =
4894 if (!maybe_result->ToObject(&result))
return maybe_result;
4906 int non_ascii_start,
4912 decoder->Reset(
string.start() + non_ascii_start,
4913 string.length() - non_ascii_start);
4914 int utf16_length = decoder->Utf16Length();
4915 ASSERT(utf16_length > 0);
4919 int chars = non_ascii_start + utf16_length;
4921 if (!maybe_result->ToObject(&result))
return maybe_result;
4927 if (non_ascii_start != 0) {
4928 const char* ascii_data =
string.start();
4929 for (
int i = 0; i < non_ascii_start; i++) {
4930 *data++ = *ascii_data++;
4934 decoder->WriteUtf16(data, utf16_length);
4943 int length =
string.length();
4944 const uc16* start =
string.start();
4948 if (!maybe_result->ToObject(&result))
return maybe_result;
4952 if (!maybe_result->ToObject(&result))
return maybe_result;
4965 case STRING_TYPE:
return internalized_string_map();
4966 case ASCII_STRING_TYPE:
return ascii_internalized_string_map();
4971 return external_ascii_internalized_string_map();
4973 return external_internalized_string_with_one_byte_data_map();
4975 return short_external_internalized_string_map();
4977 return short_external_ascii_internalized_string_map();
4979 return short_external_internalized_string_with_one_byte_data_map();
4980 default:
return NULL;
4993 static inline void WriteTwoByteData(Vector<const char> vector,
4996 const uint8_t* stream =
reinterpret_cast<const uint8_t*
>(vector.start());
4997 unsigned stream_length = vector.length();
4998 while (stream_length != 0) {
4999 unsigned consumed = 0;
5002 ASSERT(consumed <= stream_length);
5003 stream_length -= consumed;
5016 ASSERT(stream_length == 0);
5021 static inline void WriteOneByteData(String* s, uint8_t* chars,
int len) {
5022 ASSERT(s->length() == len);
5027 static inline void WriteTwoByteData(String* s,
uint16_t* chars,
int len) {
5028 ASSERT(s->length() == len);
5033 template<
bool is_one_byte,
typename T>
5035 T t,
int chars, uint32_t hash_field) {
5045 map = ascii_internalized_string_map();
5048 map = internalized_string_map();
5056 if (!maybe_result->ToObject(&result))
return maybe_result;
5059 reinterpret_cast<HeapObject*
>(result)->set_map_no_write_barrier(map);
5078 MaybeObject* Heap::AllocateInternalizedStringImpl<true>(
String*, int, uint32_t);
5080 MaybeObject* Heap::AllocateInternalizedStringImpl<false>(
5083 MaybeObject* Heap::AllocateInternalizedStringImpl<false>(
5098 if (!maybe_result->ToObject(&result))
return maybe_result;
5122 if (!maybe_result->ToObject(&result))
return maybe_result;
5134 MaybeObject* Heap::AllocateJSArray(
5138 JSFunction* array_function = native_context->array_function();
5141 if (transition_map !=
NULL) map = transition_map;
5146 MaybeObject* Heap::AllocateEmptyFixedArray() {
5149 { MaybeObject* maybe_result =
5151 if (!maybe_result->ToObject(&result))
return maybe_result;
5154 reinterpret_cast<FixedArray*
>(result)->set_map_no_write_barrier(
5156 reinterpret_cast<FixedArray*
>(result)->set_length(0);
5173 { MaybeObject* maybe_obj = AllocateRawFixedArray(len,
TENURED);
5174 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
5183 for (
int i = 0; i < len; i++) result->
set(i, src->
get(i),
mode);
5201 { MaybeObject* maybe_obj = AllocateRawFixedArray(len,
NOT_TENURED);
5202 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
5219 for (
int i = 0; i < len; i++) result->
set(i, src->
get(i),
mode);
5228 { MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(len,
NOT_TENURED);
5229 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
5248 { MaybeObject* maybe_obj =
5250 heap_ptr_entries, int32_entries);
5251 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
5256 int64_entries, code_ptr_entries, heap_ptr_entries, int32_entries);
5265 MaybeObject* Heap::AllocateRawFixedArray(
int length,
PretenureFlag pretenure) {
5276 MaybeObject* Heap::AllocateFixedArrayWithFiller(
int length,
5280 ASSERT(empty_fixed_array()->IsFixedArray());
5281 if (length == 0)
return empty_fixed_array();
5285 { MaybeObject* maybe_result = AllocateRawFixedArray(length, pretenure);
5286 if (!maybe_result->ToObject(&result))
return maybe_result;
5291 array->set_length(length);
5298 return AllocateFixedArrayWithFiller(length, pretenure, undefined_value());
5304 return AllocateFixedArrayWithFiller(length, pretenure, the_hole_value());
5309 if (length == 0)
return empty_fixed_array();
5312 { MaybeObject* maybe_obj = AllocateRawFixedArray(length,
NOT_TENURED);
5313 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
5316 reinterpret_cast<FixedArray*
>(
obj)->set_map_no_write_barrier(
5323 MaybeObject* Heap::AllocateEmptyFixedDoubleArray() {
5326 { MaybeObject* maybe_result =
5328 if (!maybe_result->ToObject(&result))
return maybe_result;
5331 reinterpret_cast<FixedDoubleArray*
>(result)->set_map_no_write_barrier(
5332 fixed_double_array_map());
5333 reinterpret_cast<FixedDoubleArray*
>(result)->set_length(0);
5341 if (length == 0)
return empty_fixed_array();
5344 MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(length, pretenure);
5345 if (!maybe_obj->ToObject(&elements_object))
return maybe_obj;
5358 if (length == 0)
return empty_fixed_array();
5361 MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(length, pretenure);
5362 if (!maybe_obj->ToObject(&elements_object))
return maybe_obj;
5366 for (
int i = 0; i < length; ++i) {
5376 MaybeObject* Heap::AllocateRawFixedDoubleArray(
int length,
5382 #ifndef V8_HOST_ARCH_64_BIT
5389 if (!maybe_object->To<HeapObject>(&
object))
return maybe_object;
5392 return EnsureDoubleAligned(
this,
object, size);
5397 int number_of_code_ptr_entries,
5398 int number_of_heap_ptr_entries,
5399 int number_of_int32_entries) {
5400 ASSERT(number_of_int64_entries > 0 || number_of_code_ptr_entries > 0 ||
5401 number_of_heap_ptr_entries > 0 || number_of_int32_entries > 0);
5403 number_of_code_ptr_entries,
5404 number_of_heap_ptr_entries,
5405 number_of_int32_entries);
5406 #ifndef V8_HOST_ARCH_64_BIT
5413 if (!maybe_object->To<
HeapObject>(&
object))
return maybe_object;
5415 object = EnsureDoubleAligned(
this,
object, size);
5421 number_of_code_ptr_entries,
5422 number_of_heap_ptr_entries,
5423 number_of_int32_entries);
5424 if (number_of_code_ptr_entries > 0) {
5429 isolate()->builtins()->builtin(Builtins::kIllegal)->entry(),
5430 number_of_code_ptr_entries);
5432 if (number_of_heap_ptr_entries > 0) {
5438 number_of_heap_ptr_entries);
5440 return constant_pool;
5444 MaybeObject* Heap::AllocateEmptyConstantPoolArray() {
5447 { MaybeObject* maybe_result =
5449 if (!maybe_result->ToObject(&result))
return maybe_result;
5460 if (!maybe_result->ToObject(&result))
return maybe_result;
5462 reinterpret_cast<HeapObject*
>(result)->set_map_no_write_barrier(
5464 ASSERT(result->IsHashTable());
5474 MaybeObject* maybe =
5476 if (!maybe->ToObject(&result))
return maybe;
5486 }
while (hash == 0 && attempts < 30);
5487 if (hash == 0) hash = 1;
5502 if (!maybe->To(&symbol))
return maybe;
5503 symbol->set_is_private(
true);
5510 { MaybeObject* maybe_result =
5512 if (!maybe_result->ToObject(&result))
return maybe_result;
5516 context->set_js_array_maps(undefined_value());
5518 ASSERT(result->IsContext());
5526 { MaybeObject* maybe_result =
5528 if (!maybe_result->ToObject(&result))
return maybe_result;
5537 ASSERT(result->IsContext());
5544 { MaybeObject* maybe_result =
5546 if (!maybe_result->ToObject(&result))
return maybe_result;
5560 if (!maybe_result->ToObject(&result))
return maybe_result;
5578 { MaybeObject* maybe_result =
5580 if (!maybe_result->ToObject(&result))
return maybe_result;
5598 if (!maybe_result->ToObject(&result))
return maybe_result;
5614 { MaybeObject* maybe_result =
5616 if (!maybe_result->ToObject(&result))
return maybe_result;
5631 if (!maybe_scope_info->To(&scope_info))
return maybe_scope_info;
5639 { MaybeObject* maybe_result =
AllocateForeign(static_cast<Address>(value));
5640 if (!maybe_result->To(&foreign))
return maybe_result;
5644 if (!maybe_result->To(&external))
return maybe_result;
5654 #define MAKE_CASE(NAME, Name, name) \
5655 case NAME##_TYPE: map = name##_map(); break;
5665 { MaybeObject* maybe_result =
Allocate(map, space);
5666 if (!maybe_result->ToObject(&result))
return maybe_result;
5680 ASSERT(AllowHeapAllocation::IsAllowed());
5688 void Heap::AdvanceIdleIncrementalMarking(intptr_t step_size) {
5693 bool uncommit =
false;
5694 if (gc_count_at_last_idle_gc_ == gc_count_) {
5700 mark_sweeps_since_idle_round_started_++;
5701 gc_count_at_last_idle_gc_ = gc_count_;
5704 UncommitFromSpace();
5713 const int kMaxHint = 1000;
5714 const int kMinHintForIncrementalMarking = 10;
5716 const int kMinHintForFullGC = 100;
5717 intptr_t size_factor =
Min(
Max(hint, 20), kMaxHint) / 4;
5721 intptr_t step_size =
5724 if (contexts_disposed_ > 0) {
5725 contexts_disposed_ = 0;
5726 int mark_sweep_time =
Min(TimeMarkSweepWouldTakeInMs(), 1000);
5727 if (hint >= mark_sweep_time && !FLAG_expose_gc &&
5729 HistogramTimerScope scope(isolate_->
counters()->gc_context());
5731 "idle notification: contexts disposed");
5733 AdvanceIdleIncrementalMarking(step_size);
5744 return IdleGlobalGC();
5764 if (mark_sweeps_since_idle_round_started_ >= kMaxMarkSweepsInIdleRound) {
5765 if (EnoughGarbageSinceLastIdleRound()) {
5772 int remaining_mark_sweeps = kMaxMarkSweepsInIdleRound -
5773 mark_sweeps_since_idle_round_started_;
5781 if (remaining_mark_sweeps <= 2 && hint >= kMinHintForFullGC) {
5783 "idle notification: finalize idle round");
5784 mark_sweeps_since_idle_round_started_++;
5785 }
else if (hint > kMinHintForIncrementalMarking) {
5790 hint > kMinHintForIncrementalMarking) {
5791 AdvanceIdleIncrementalMarking(step_size);
5794 if (mark_sweeps_since_idle_round_started_ >= kMaxMarkSweepsInIdleRound) {
5803 bool Heap::IdleGlobalGC() {
5804 static const int kIdlesBeforeScavenge = 4;
5805 static const int kIdlesBeforeMarkSweep = 7;
5806 static const int kIdlesBeforeMarkCompact = 8;
5807 static const int kMaxIdleCount = kIdlesBeforeMarkCompact + 1;
5808 static const unsigned int kGCsBetweenCleanup = 4;
5810 if (!last_idle_notification_gc_count_init_) {
5811 last_idle_notification_gc_count_ = gc_count_;
5812 last_idle_notification_gc_count_init_ =
true;
5815 bool uncommit =
true;
5816 bool finished =
false;
5822 if (gc_count_ - last_idle_notification_gc_count_ < kGCsBetweenCleanup) {
5823 number_idle_notifications_ =
5824 Min(number_idle_notifications_ + 1, kMaxIdleCount);
5826 number_idle_notifications_ = 0;
5827 last_idle_notification_gc_count_ = gc_count_;
5830 if (number_idle_notifications_ == kIdlesBeforeScavenge) {
5833 last_idle_notification_gc_count_ = gc_count_;
5834 }
else if (number_idle_notifications_ == kIdlesBeforeMarkSweep) {
5842 last_idle_notification_gc_count_ = gc_count_;
5844 }
else if (number_idle_notifications_ == kIdlesBeforeMarkCompact) {
5847 last_idle_notification_gc_count_ = gc_count_;
5848 number_idle_notifications_ = 0;
5850 }
else if (number_idle_notifications_ > kIdlesBeforeMarkCompact) {
5857 if (uncommit) UncommitFromSpace();
5868 AllSpaces spaces(
this);
5869 for (Space* space = spaces.next(); space !=
NULL; space = spaces.next()) {
5875 void Heap::ReportCodeStatistics(
const char* title) {
5876 PrintF(
">>>>>> Code Stats (%s) >>>>>>\n", title);
5877 PagedSpace::ResetCodeStatistics(
isolate());
5880 code_space_->CollectCodeStatistics();
5881 lo_space_->CollectCodeStatistics();
5882 PagedSpace::ReportCodeStatistics(
isolate());
5889 void Heap::ReportHeapStatistics(
const char* title) {
5891 PrintF(
">>>>>> =============== %s (%d) =============== >>>>>>\n",
5894 old_generation_allocation_limit_);
5901 PrintF(
"Heap statistics : ");
5905 PrintF(
"Old pointer space : ");
5906 old_pointer_space_->ReportStatistics();
5907 PrintF(
"Old data space : ");
5908 old_data_space_->ReportStatistics();
5910 code_space_->ReportStatistics();
5912 map_space_->ReportStatistics();
5914 cell_space_->ReportStatistics();
5915 PrintF(
"PropertyCell space : ");
5916 property_cell_space_->ReportStatistics();
5917 PrintF(
"Large object space : ");
5918 lo_space_->ReportStatistics();
5919 PrintF(
">>>>>> ========================================= >>>>>>\n");
5933 old_pointer_space_->
Contains(addr) ||
5938 property_cell_space_->
Contains(addr) ||
5956 return old_pointer_space_->
Contains(addr);
5958 return old_data_space_->
Contains(addr);
5960 return code_space_->
Contains(addr);
5964 return cell_space_->
Contains(addr);
5966 return property_cell_space_->
Contains(addr);
5976 void Heap::Verify() {
5987 new_space_.Verify();
5989 old_pointer_space_->Verify(&visitor);
5990 map_space_->Verify(&visitor);
5993 old_data_space_->Verify(&no_dirty_regions_visitor);
5994 code_space_->Verify(&no_dirty_regions_visitor);
5995 cell_space_->Verify(&no_dirty_regions_visitor);
5996 property_cell_space_->Verify(&no_dirty_regions_visitor);
5998 lo_space_->Verify();
6010 if (string->IsInternalizedString())
return string;
6013 { MaybeObject* maybe_new_table =
6014 string_table()->LookupString(
string, &result);
6015 if (!maybe_new_table->ToObject(&new_table))
return maybe_new_table;
6026 if (string->IsInternalizedString()) {
6030 return string_table()->LookupStringIfExists(
string, result);
6037 { MaybeObject* maybe_new_table =
6038 string_table()->LookupKey(key, &result);
6039 if (!maybe_new_table->ToObject(&new_table))
return maybe_new_table;
6049 void Heap::ZapFromSpace() {
6052 while (it.has_next()) {
6074 bool record_slots =
false;
6077 record_slots = Marking::IsBlack(mark_bit);
6080 while (slot_address < end) {
6081 Object** slot =
reinterpret_cast<Object**
>(slot_address);
6087 if (object->IsHeapObject()) {
6089 callback(reinterpret_cast<HeapObject**>(slot),
6091 Object* new_object = *slot;
6096 reinterpret_cast<Address>(slot));
6098 SLOW_ASSERT(!MarkCompactCollector::IsOnEvacuationCandidate(new_object));
6099 }
else if (record_slots &&
6100 MarkCompactCollector::IsOnEvacuationCandidate(
object)) {
6110 typedef bool (*CheckStoreBufferFilter)(
Object** addr);
6113 bool IsAMapPointerAddress(
Object** addr) {
6114 uintptr_t a =
reinterpret_cast<uintptr_t
>(addr);
6121 bool EverythingsAPointer(
Object** addr) {
6126 static void CheckStoreBuffer(Heap* heap,
6129 Object**** store_buffer_position,
6130 Object*** store_buffer_top,
6131 CheckStoreBufferFilter filter,
6132 Address special_garbage_start,
6133 Address special_garbage_end) {
6134 Map* free_space_map = heap->free_space_map();
6135 for ( ; current < limit; current++) {
6139 if (o == free_space_map) {
6141 FreeSpace* free_space =
6143 int skip = free_space->Size();
6144 ASSERT(current_address + skip <= reinterpret_cast<Address>(limit));
6147 current =
reinterpret_cast<Object**
>(current_address);
6152 if (current_address == special_garbage_start &&
6153 special_garbage_end != special_garbage_start) {
6155 current =
reinterpret_cast<Object**
>(current_address);
6158 if (!(*filter)(current))
continue;
6159 ASSERT(current_address < special_garbage_start ||
6160 current_address >= special_garbage_end);
6166 if (!heap->InNewSpace(o))
continue;
6167 while (**store_buffer_position < current &&
6168 *store_buffer_position < store_buffer_top) {
6169 (*store_buffer_position)++;
6171 if (**store_buffer_position != current ||
6172 *store_buffer_position == store_buffer_top) {
6173 Object** obj_start = current;
6174 while (!(*obj_start)->IsMap()) obj_start--;
6184 void Heap::OldPointerSpaceCheckStoreBuffer() {
6186 PageIterator pages(space);
6190 while (pages.has_next()) {
6191 Page* page = pages.next();
6192 Object** current =
reinterpret_cast<Object**
>(page->area_start());
6194 Address end = page->area_end();
6200 CheckStoreBuffer(
this,
6203 &store_buffer_position,
6205 &EverythingsAPointer,
6212 void Heap::MapSpaceCheckStoreBuffer() {
6214 PageIterator pages(space);
6218 while (pages.has_next()) {
6219 Page* page = pages.next();
6220 Object** current =
reinterpret_cast<Object**
>(page->area_start());
6222 Address end = page->area_end();
6228 CheckStoreBuffer(
this,
6231 &store_buffer_position,
6233 &IsAMapPointerAddress,
6240 void Heap::LargeObjectSpaceCheckStoreBuffer() {
6241 LargeObjectIterator it(
lo_space());
6242 for (HeapObject*
object = it.Next();
object !=
NULL;
object = it.Next()) {
6246 if (object->IsFixedArray()) {
6249 Object** current =
reinterpret_cast<Object**
>(
object->address());
6251 reinterpret_cast<Object**
>(
object->address() +
object->Size());
6252 CheckStoreBuffer(
this,
6255 &store_buffer_position,
6257 &EverythingsAPointer,
6274 v->Synchronize(VisitorSynchronization::kStringTable);
6278 external_string_table_.
Iterate(v);
6280 v->Synchronize(VisitorSynchronization::kExternalStringsTable);
6286 ExecutionAccess access(
isolate());
6288 v->Synchronize(VisitorSynchronization::kSmiRootList);
6294 v->Synchronize(VisitorSynchronization::kStrongRootList);
6296 v->VisitPointer(BitCast<Object**>(&hidden_string_));
6297 v->Synchronize(VisitorSynchronization::kInternalizedString);
6300 v->Synchronize(VisitorSynchronization::kBootstrapper);
6302 v->Synchronize(VisitorSynchronization::kTop);
6303 Relocatable::Iterate(isolate_, v);
6304 v->Synchronize(VisitorSynchronization::kRelocatable);
6306 #ifdef ENABLE_DEBUGGER_SUPPORT
6307 isolate_->debug()->
Iterate(v);
6312 v->Synchronize(VisitorSynchronization::kDebug);
6314 v->Synchronize(VisitorSynchronization::kCompilationCache);
6319 v->Synchronize(VisitorSynchronization::kHandleScope);
6327 v->Synchronize(VisitorSynchronization::kBuiltins);
6342 v->Synchronize(VisitorSynchronization::kGlobalHandles);
6350 v->Synchronize(VisitorSynchronization::kEternalHandles);
6354 v->Synchronize(VisitorSynchronization::kThreadManager);
6376 intptr_t max_old_gen_size,
6377 intptr_t max_executable_size) {
6380 if (FLAG_stress_compaction) {
6385 if (max_semispace_size > 0) {
6388 if (FLAG_trace_gc) {
6389 PrintPID(
"Max semispace size cannot be less than %dkbytes\n",
6393 max_semispace_size_ = max_semispace_size;
6402 if (max_semispace_size_ > reserved_semispace_size_) {
6403 max_semispace_size_ = reserved_semispace_size_;
6404 if (FLAG_trace_gc) {
6405 PrintPID(
"Max semispace size cannot be more than %dkbytes\n",
6406 reserved_semispace_size_ >> 10);
6412 reserved_semispace_size_ = max_semispace_size_;
6415 if (max_old_gen_size > 0) max_old_generation_size_ = max_old_gen_size;
6416 if (max_executable_size > 0) {
6422 if (max_executable_size_ > max_old_generation_size_) {
6423 max_executable_size_ = max_old_generation_size_;
6430 initial_semispace_size_ =
Min(initial_semispace_size_, max_semispace_size_);
6435 external_allocation_limit_ = 12 * max_semispace_size_;
6436 ASSERT(external_allocation_limit_ <= 256 *
MB);
6440 max_old_generation_size_ =
Max(static_cast<intptr_t>(paged_space_count *
6442 RoundUp(max_old_generation_size_,
6457 return ConfigureHeap(static_cast<intptr_t>(FLAG_max_new_space_size / 2) *
KB,
6458 static_cast<intptr_t>(FLAG_max_old_space_size) *
MB,
6459 static_cast<intptr_t>(FLAG_max_executable_size) * MB);
6488 if (take_snapshot) {
6489 HeapIterator iterator(
this);
6492 obj = iterator.next()) {
6517 return sweeping_complete;
6521 int64_t Heap::PromotedExternalMemorySize() {
6522 if (amount_of_external_allocated_memory_
6523 <= amount_of_external_allocated_memory_at_last_global_gc_)
return 0;
6524 return amount_of_external_allocated_memory_
6525 - amount_of_external_allocated_memory_at_last_global_gc_;
6530 if (!inline_allocation_disabled_)
return;
6531 inline_allocation_disabled_ =
false;
6539 if (inline_allocation_disabled_)
return;
6540 inline_allocation_disabled_ =
true;
6546 PagedSpaces spaces(
this);
6549 space = spaces.next()) {
6550 space->EmptyAllocationInfo();
6557 static void InitializeGCOnce() {
6558 InitializeScavengingVisitorsTables();
6566 allocation_timeout_ = FLAG_gc_interval;
6581 CallOnce(&initialize_gc_once, &InitializeGCOnce);
6583 MarkMapPointersAsEncoded(
false);
6590 if (!new_space_.
SetUp(reserved_semispace_size_, max_semispace_size_)) {
6595 old_pointer_space_ =
6597 max_old_generation_size_,
6600 if (old_pointer_space_ ==
NULL)
return false;
6601 if (!old_pointer_space_->
SetUp())
return false;
6606 max_old_generation_size_,
6609 if (old_data_space_ ==
NULL)
return false;
6610 if (!old_data_space_->
SetUp())
return false;
6616 if (code_range_size_ > 0) {
6624 if (code_space_ ==
NULL)
return false;
6625 if (!code_space_->
SetUp())
return false;
6629 if (map_space_ ==
NULL)
return false;
6630 if (!map_space_->
SetUp())
return false;
6634 if (cell_space_ ==
NULL)
return false;
6635 if (!cell_space_->
SetUp())
return false;
6640 if (property_cell_space_ ==
NULL)
return false;
6641 if (!property_cell_space_->
SetUp())
return false;
6647 if (lo_space_ ==
NULL)
return false;
6648 if (!lo_space_->
SetUp())
return false;
6651 ASSERT(hash_seed() == 0);
6652 if (FLAG_randomize_hashes) {
6653 if (FLAG_hash_seed == 0) {
6661 LOG(isolate_, IntPtrTEvent(
"heap-capacity",
Capacity()));
6662 LOG(isolate_, IntPtrTEvent(
"heap-available",
Available()));
6674 if (!CreateInitialMaps())
return false;
6678 if (!CreateInitialObjects())
return false;
6680 native_contexts_list_ = undefined_value();
6681 array_buffers_list_ = undefined_value();
6682 allocation_sites_list_ = undefined_value();
6683 weak_object_to_code_table_ = undefined_value();
6696 roots_[kStackLimitRootIndex] =
6697 reinterpret_cast<Object*
>(
6699 roots_[kRealStackLimitRootIndex] =
6700 reinterpret_cast<Object*
>(
6707 if (FLAG_verify_heap) {
6714 if (FLAG_print_cumulative_gc_stat) {
6716 PrintF(
"gc_count=%d ", gc_count_);
6717 PrintF(
"mark_sweep_count=%d ", ms_count_);
6719 PrintF(
"total_gc_time=%.1f ", total_gc_time_ms_);
6728 if (FLAG_print_max_heap_committed) {
6753 TearDownArrayBuffers();
6763 if (old_pointer_space_ !=
NULL) {
6765 delete old_pointer_space_;
6766 old_pointer_space_ =
NULL;
6769 if (old_data_space_ !=
NULL) {
6771 delete old_data_space_;
6772 old_data_space_ =
NULL;
6775 if (code_space_ !=
NULL) {
6781 if (map_space_ !=
NULL) {
6787 if (cell_space_ !=
NULL) {
6793 if (property_cell_space_ !=
NULL) {
6795 delete property_cell_space_;
6796 property_cell_space_ =
NULL;
6799 if (lo_space_ !=
NULL) {
6814 bool pass_isolate) {
6816 GCPrologueCallbackPair pair(callback, gc_type, pass_isolate);
6818 return gc_prologue_callbacks_.
Add(pair);
6824 for (
int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
6825 if (gc_prologue_callbacks_[i].callback == callback) {
6826 gc_prologue_callbacks_.
Remove(i);
6836 bool pass_isolate) {
6838 GCEpilogueCallbackPair pair(callback, gc_type, pass_isolate);
6840 return gc_epilogue_callbacks_.
Add(pair);
6846 for (
int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
6847 if (gc_epilogue_callbacks_[i].callback == callback) {
6848 gc_epilogue_callbacks_.
Remove(i);
6860 MaybeObject* maybe_obj =
6863 if (!maybe_obj->To(&table))
return maybe_obj;
6867 set_weak_object_to_code_table(table);
6869 return weak_object_to_code_table_;
6882 set_weak_object_to_code_table(*
isolate()->factory()->NewWeakHashTable(16));
6893 class PrintHandleVisitor:
public ObjectVisitor {
6896 for (
Object** p = start; p < end; p++)
6897 PrintF(
" handle %p to %p\n",
6898 reinterpret_cast<void*>(p),
6899 reinterpret_cast<void*>(*p));
6904 void Heap::PrintHandles() {
6906 PrintHandleVisitor v;
6913 Space* AllSpaces::next() {
6914 switch (counter_++) {
6916 return heap_->new_space();
6918 return heap_->old_pointer_space();
6920 return heap_->old_data_space();
6922 return heap_->code_space();
6924 return heap_->map_space();
6926 return heap_->cell_space();
6928 return heap_->property_cell_space();
6930 return heap_->lo_space();
6937 PagedSpace* PagedSpaces::next() {
6938 switch (counter_++) {
6940 return heap_->old_pointer_space();
6942 return heap_->old_data_space();
6944 return heap_->code_space();
6946 return heap_->map_space();
6948 return heap_->cell_space();
6950 return heap_->property_cell_space();
6958 OldSpace* OldSpaces::next() {
6959 switch (counter_++) {
6961 return heap_->old_pointer_space();
6963 return heap_->old_data_space();
6965 return heap_->code_space();
6984 size_func_(size_func) {
7001 if (iterator_ !=
NULL) {
7012 return CreateIterator();
7020 switch (current_space_) {
7058 virtual bool SkipObject(
HeapObject*
object) = 0;
7065 MarkReachableObjects();
7069 heap_->mark_compact_collector()->ClearMarkbits();
7073 MarkBit mark_bit = Marking::MarkBitFrom(
object);
7074 return !mark_bit.
Get();
7078 class MarkingVisitor :
public ObjectVisitor {
7080 MarkingVisitor() : marking_stack_(10) {}
7083 for (
Object** p = start; p < end; p++) {
7084 if (!(*p)->IsHeapObject())
continue;
7086 MarkBit mark_bit = Marking::MarkBitFrom(obj);
7087 if (!mark_bit.Get()) {
7089 marking_stack_.Add(obj);
7094 void TransitiveClosure() {
7095 while (!marking_stack_.is_empty()) {
7096 HeapObject* obj = marking_stack_.RemoveLast();
7102 List<HeapObject*> marking_stack_;
7105 void MarkReachableObjects() {
7106 MarkingVisitor visitor;
7107 heap_->IterateRoots(&visitor,
VISIT_ALL);
7108 visitor.TransitiveClosure();
7116 HeapIterator::HeapIterator(Heap* heap)
7118 filtering_(HeapIterator::kNoFiltering),
7124 HeapIterator::HeapIterator(Heap* heap,
7125 HeapIterator::HeapObjectsFiltering filtering)
7127 filtering_(filtering),
7133 HeapIterator::~HeapIterator() {
7138 void HeapIterator::Init() {
7140 space_iterator_ =
new SpaceIterator(heap_);
7141 switch (filtering_) {
7142 case kFilterUnreachable:
7143 filter_ =
new UnreachableObjectsFilter(heap_);
7148 object_iterator_ = space_iterator_->next();
7152 void HeapIterator::Shutdown() {
7156 if (filtering_ != kNoFiltering) {
7161 delete space_iterator_;
7162 space_iterator_ =
NULL;
7163 object_iterator_ =
NULL;
7169 HeapObject* HeapIterator::next() {
7170 if (filter_ ==
NULL)
return NextObject();
7172 HeapObject* obj = NextObject();
7173 while (obj !=
NULL && filter_->SkipObject(obj)) obj = NextObject();
7178 HeapObject* HeapIterator::NextObject() {
7180 if (object_iterator_ ==
NULL)
return NULL;
7182 if (HeapObject* obj = object_iterator_->next_object()) {
7187 while (space_iterator_->has_next()) {
7188 object_iterator_ = space_iterator_->next();
7189 if (HeapObject* obj = object_iterator_->next_object()) {
7195 object_iterator_ =
NULL;
7200 void HeapIterator::reset() {
7209 Object*
const PathTracer::kAnyGlobalObject =
NULL;
7211 class PathTracer::MarkVisitor:
public ObjectVisitor {
7213 explicit MarkVisitor(PathTracer* tracer) : tracer_(tracer) {}
7216 for (
Object** p = start; !tracer_->found() && (p < end); p++) {
7217 if ((*p)->IsHeapObject())
7218 tracer_->MarkRecursively(p,
this);
7223 PathTracer* tracer_;
7227 class PathTracer::UnmarkVisitor:
public ObjectVisitor {
7229 explicit UnmarkVisitor(PathTracer* tracer) : tracer_(tracer) {}
7232 for (
Object** p = start; p < end; p++) {
7233 if ((*p)->IsHeapObject())
7234 tracer_->UnmarkRecursively(p,
this);
7239 PathTracer* tracer_;
7243 void PathTracer::VisitPointers(
Object** start,
Object** end) {
7244 bool done = ((what_to_find_ == FIND_FIRST) && found_target_);
7246 for (
Object** p = start; !done && (p < end); p++) {
7247 if ((*p)->IsHeapObject()) {
7249 done = ((what_to_find_ == FIND_FIRST) && found_target_);
7255 void PathTracer::Reset() {
7256 found_target_ =
false;
7257 object_stack_.Clear();
7261 void PathTracer::TracePathFrom(
Object** root) {
7262 ASSERT((search_target_ == kAnyGlobalObject) ||
7263 search_target_->IsHeapObject());
7264 found_target_in_trace_ =
false;
7267 MarkVisitor mark_visitor(
this);
7268 MarkRecursively(root, &mark_visitor);
7270 UnmarkVisitor unmark_visitor(
this);
7271 UnmarkRecursively(root, &unmark_visitor);
7277 static bool SafeIsNativeContext(HeapObject* obj) {
7278 return obj->map() == obj->GetHeap()->raw_unchecked_native_context_map();
7282 void PathTracer::MarkRecursively(
Object** p, MarkVisitor* mark_visitor) {
7283 if (!(*p)->IsHeapObject())
return;
7289 if (!map->IsHeapObject())
return;
7291 if (found_target_in_trace_)
return;
7292 object_stack_.Add(obj);
7293 if (((search_target_ == kAnyGlobalObject) && obj->IsJSGlobalObject()) ||
7294 (obj == search_target_)) {
7295 found_target_in_trace_ =
true;
7296 found_target_ =
true;
7300 bool is_native_context = SafeIsNativeContext(obj);
7305 Address map_addr = map_p->address();
7307 obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag));
7312 Object** start =
reinterpret_cast<Object**
>(obj->address() +
7314 Object** end =
reinterpret_cast<Object**
>(obj->address() +
7316 mark_visitor->VisitPointers(start, end);
7318 obj->IterateBody(map_p->instance_type(),
7319 obj->SizeFromMap(map_p),
7325 MarkRecursively(&map, mark_visitor);
7327 if (!found_target_in_trace_)
7328 object_stack_.RemoveLast();
7332 void PathTracer::UnmarkRecursively(
Object** p, UnmarkVisitor* unmark_visitor) {
7333 if (!(*p)->IsHeapObject())
return;
7337 Object* map = obj->map();
7339 if (map->IsHeapObject())
return;
7343 map_addr -= kMarkTag;
7349 obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_p));
7351 UnmarkRecursively(reinterpret_cast<Object**>(&map_p), unmark_visitor);
7353 obj->IterateBody(
Map::cast(map_p)->instance_type(),
7359 void PathTracer::ProcessResults() {
7360 if (found_target_) {
7361 PrintF(
"=====================================\n");
7362 PrintF(
"==== Path to object ====\n");
7363 PrintF(
"=====================================\n\n");
7365 ASSERT(!object_stack_.is_empty());
7366 for (
int i = 0; i < object_stack_.length(); i++) {
7367 if (i > 0)
PrintF(
"\n |\n |\n V\n\n");
7368 Object* obj = object_stack_[i];
7371 PrintF(
"=====================================\n");
7379 void Heap::TracePathToObjectFrom(
Object* target,
Object* root) {
7381 tracer.VisitPointer(&root);
7387 void Heap::TracePathToObject(
Object* target) {
7396 void Heap::TracePathToGlobal() {
7397 PathTracer
tracer(PathTracer::kAnyGlobalObject,
7398 PathTracer::FIND_ALL,
7405 static intptr_t CountTotalHolesSize(Heap* heap) {
7406 intptr_t holes_size = 0;
7407 OldSpaces spaces(heap);
7408 for (OldSpace* space = spaces.next();
7410 space = spaces.next()) {
7411 holes_size += space->Waste() + space->Available();
7417 GCTracer::GCTracer(Heap* heap,
7418 const char* gc_reason,
7419 const char* collector_reason)
7421 start_object_size_(0),
7422 start_memory_size_(0),
7425 allocated_since_last_gc_(0),
7426 spent_in_mutator_(0),
7427 promoted_objects_size_(0),
7428 nodes_died_in_new_space_(0),
7429 nodes_copied_in_new_space_(0),
7432 gc_reason_(gc_reason),
7433 collector_reason_(collector_reason) {
7434 if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat)
return;
7439 for (
int i = 0; i < Scope::kNumberOfScopes; i++) {
7443 in_free_list_or_wasted_before_gc_ = CountTotalHolesSize(heap);
7445 allocated_since_last_gc_ =
7448 if (heap_->last_gc_end_timestamp_ > 0) {
7449 spent_in_mutator_ =
Max(start_time_ - heap_->last_gc_end_timestamp_, 0.0);
7455 steps_count_since_last_gc_ =
7457 steps_took_since_last_gc_ =
7462 GCTracer::~GCTracer() {
7464 if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat)
return;
7466 bool first_gc = (heap_->last_gc_end_timestamp_ == 0);
7468 heap_->alive_after_last_gc_ = heap_->SizeOfObjects();
7471 double time = heap_->last_gc_end_timestamp_ - start_time_;
7474 if (FLAG_print_cumulative_gc_stat) {
7475 heap_->total_gc_time_ms_ += time;
7476 heap_->max_gc_pause_ =
Max(heap_->max_gc_pause_, time);
7477 heap_->max_alive_after_gc_ =
Max(heap_->max_alive_after_gc_,
7478 heap_->alive_after_last_gc_);
7480 heap_->min_in_mutator_ =
Min(heap_->min_in_mutator_,
7483 }
else if (FLAG_trace_gc_verbose) {
7484 heap_->total_gc_time_ms_ += time;
7487 if (collector_ ==
SCAVENGER && FLAG_trace_gc_ignore_scavenger)
return;
7489 heap_->AddMarkingTime(scopes_[Scope::MC_MARK]);
7491 if (FLAG_print_cumulative_gc_stat && !FLAG_trace_gc)
return;
7492 PrintPID(
"%8.0f ms: ", heap_->isolate()->time_millis_since_init());
7494 if (!FLAG_trace_gc_nvp) {
7497 double end_memory_size_mb =
7498 static_cast<double>(heap_->isolate()->memory_allocator()->Size()) /
MB;
7500 PrintF(
"%s %.1f (%.1f) -> %.1f (%.1f) MB, ",
7502 static_cast<double>(start_object_size_) /
MB,
7503 static_cast<double>(start_memory_size_) /
MB,
7504 SizeOfHeapObjects(),
7505 end_memory_size_mb);
7507 if (external_time > 0)
PrintF(
"%d / ", external_time);
7509 if (steps_count_ > 0) {
7511 PrintF(
" (+ %.1f ms in %d steps since last GC)",
7512 steps_took_since_last_gc_,
7513 steps_count_since_last_gc_);
7515 PrintF(
" (+ %.1f ms in %d steps since start of marking, "
7516 "biggest step %.1f ms)",
7523 if (gc_reason_ !=
NULL) {
7524 PrintF(
" [%s]", gc_reason_);
7527 if (collector_reason_ !=
NULL) {
7528 PrintF(
" [%s]", collector_reason_);
7533 PrintF(
"pause=%.1f ", time);
7534 PrintF(
"mutator=%.1f ", spent_in_mutator_);
7536 switch (collector_) {
7549 PrintF(
"mark=%.1f ", scopes_[Scope::MC_MARK]);
7550 PrintF(
"sweep=%.2f ", scopes_[Scope::MC_SWEEP]);
7551 PrintF(
"sweepns=%.2f ", scopes_[Scope::MC_SWEEP_NEWSPACE]);
7552 PrintF(
"sweepos=%.2f ", scopes_[Scope::MC_SWEEP_OLDSPACE]);
7553 PrintF(
"evacuate=%.1f ", scopes_[Scope::MC_EVACUATE_PAGES]);
7554 PrintF(
"new_new=%.1f ", scopes_[Scope::MC_UPDATE_NEW_TO_NEW_POINTERS]);
7555 PrintF(
"root_new=%.1f ", scopes_[Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS]);
7556 PrintF(
"old_new=%.1f ", scopes_[Scope::MC_UPDATE_OLD_TO_NEW_POINTERS]);
7557 PrintF(
"compaction_ptrs=%.1f ",
7558 scopes_[Scope::MC_UPDATE_POINTERS_TO_EVACUATED]);
7559 PrintF(
"intracompaction_ptrs=%.1f ",
7560 scopes_[Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED]);
7561 PrintF(
"misc_compaction=%.1f ", scopes_[Scope::MC_UPDATE_MISC_POINTERS]);
7562 PrintF(
"weakcollection_process=%.1f ",
7563 scopes_[Scope::MC_WEAKCOLLECTION_PROCESS]);
7564 PrintF(
"weakcollection_clear=%.1f ",
7565 scopes_[Scope::MC_WEAKCOLLECTION_CLEAR]);
7570 in_free_list_or_wasted_before_gc_);
7575 PrintF(
"nodes_died_in_new=%d ", nodes_died_in_new_space_);
7576 PrintF(
"nodes_copied_in_new=%d ", nodes_copied_in_new_space_);
7577 PrintF(
"nodes_promoted=%d ", nodes_promoted_);
7580 PrintF(
"stepscount=%d ", steps_count_since_last_gc_);
7581 PrintF(
"stepstook=%.1f ", steps_took_since_last_gc_);
7583 PrintF(
"stepscount=%d ", steps_count_);
7584 PrintF(
"stepstook=%.1f ", steps_took_);
7585 PrintF(
"longeststep=%.1f ", longest_step_);
7591 heap_->PrintShortHeapStatistics();
7595 const char* GCTracer::CollectorString() {
7596 switch (collector_) {
7600 return "Mark-sweep";
7602 return "Unknown GC";
7606 int KeyedLookupCache::Hash(Map* map, Name*
name) {
7608 uintptr_t addr_hash =
7610 return static_cast<uint32_t
>((addr_hash ^ name->Hash()) &
kCapacityMask);
7615 int index = (Hash(map, name) &
kHashMask);
7617 Key& key = keys_[index + i];
7618 if ((key.map == map) && key.name->Equals(name)) {
7619 return field_offsets_[index + i];
7627 if (!name->IsUniqueName()) {
7628 String* internalized_string;
7633 name = internalized_string;
7639 int index = (Hash(map, name) &
kHashMask);
7643 Key& key = keys_[index];
7645 if (key.map == free_entry_indicator) {
7648 field_offsets_[index + i] = field_offset;
7654 for (
int i = kEntriesPerBucket - 1; i > 0; i--) {
7655 Key& key = keys_[index + i];
7656 Key& key2 = keys_[index + i - 1];
7658 field_offsets_[index + i] = field_offsets_[index + i - 1];
7662 Key& key = keys_[index];
7665 field_offsets_[index] = field_offset;
7670 for (
int index = 0; index <
kLength; index++) keys_[index].map =
NULL;
7675 for (
int index = 0; index < kLength; index++) keys_[index].source =
NULL;
7680 void Heap::GarbageCollectionGreedyCheck() {
7683 if (!AllowAllocationFailure::IsAllowed(isolate_))
return;
7691 for (
int i = 0; i < new_space_strings_.length(); ++i) {
7692 if (new_space_strings_[i] == heap_->the_hole_value()) {
7695 ASSERT(new_space_strings_[i]->IsExternalString());
7696 if (heap_->
InNewSpace(new_space_strings_[i])) {
7697 new_space_strings_[last++] = new_space_strings_[i];
7699 old_space_strings_.
Add(new_space_strings_[i]);
7702 new_space_strings_.Rewind(last);
7703 new_space_strings_.Trim();
7706 for (
int i = 0; i < old_space_strings_.length(); ++i) {
7707 if (old_space_strings_[i] == heap_->the_hole_value()) {
7710 ASSERT(old_space_strings_[i]->IsExternalString());
7712 old_space_strings_[last++] = old_space_strings_[i];
7714 old_space_strings_.Rewind(last);
7715 old_space_strings_.Trim();
7717 if (FLAG_verify_heap) {
7725 for (
int i = 0; i < new_space_strings_.length(); ++i) {
7728 new_space_strings_.
Free();
7729 for (
int i = 0; i < old_space_strings_.length(); ++i) {
7732 old_space_strings_.
Free();
7738 chunks_queued_for_free_ = chunk;
7743 if (chunks_queued_for_free_ ==
NULL)
return;
7746 for (chunk = chunks_queued_for_free_; chunk !=
NULL; chunk = next) {
7764 while (inner <= inner_last) {
7770 if (area_end < inner->address()) area_end = chunk_end;
7782 for (chunk = chunks_queued_for_free_; chunk !=
NULL; chunk = next) {
7786 chunks_queued_for_free_ =
NULL;
7791 uintptr_t p =
reinterpret_cast<uintptr_t
>(page);
7798 remembered_unmapped_pages_[remembered_unmapped_pages_index_] =
7800 remembered_unmapped_pages_index_++;
7801 remembered_unmapped_pages_index_ %= kRememberedUnmappedPages;
7805 void Heap::ClearObjectStats(
bool clear_last_time_stats) {
7806 memset(object_counts_, 0,
sizeof(object_counts_));
7807 memset(object_sizes_, 0,
sizeof(object_sizes_));
7808 if (clear_last_time_stats) {
7809 memset(object_counts_last_time_, 0,
sizeof(object_counts_last_time_));
7810 memset(object_sizes_last_time_, 0,
sizeof(object_sizes_last_time_));
7819 LockGuard<Mutex> lock_guard(checkpoint_object_stats_mutex.
Pointer());
7821 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
7822 counters->count_of_##name()->Increment( \
7823 static_cast<int>(object_counts_[name])); \
7824 counters->count_of_##name()->Decrement( \
7825 static_cast<int>(object_counts_last_time_[name])); \
7826 counters->size_of_##name()->Increment( \
7827 static_cast<int>(object_sizes_[name])); \
7828 counters->size_of_##name()->Decrement( \
7829 static_cast<int>(object_sizes_last_time_[name]));
7831 #undef ADJUST_LAST_TIME_OBJECT_COUNT
7833 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
7834 index = FIRST_CODE_KIND_SUB_TYPE + Code::name; \
7835 counters->count_of_CODE_TYPE_##name()->Increment( \
7836 static_cast<int>(object_counts_[index])); \
7837 counters->count_of_CODE_TYPE_##name()->Decrement( \
7838 static_cast<int>(object_counts_last_time_[index])); \
7839 counters->size_of_CODE_TYPE_##name()->Increment( \
7840 static_cast<int>(object_sizes_[index])); \
7841 counters->size_of_CODE_TYPE_##name()->Decrement( \
7842 static_cast<int>(object_sizes_last_time_[index]));
7844 #undef ADJUST_LAST_TIME_OBJECT_COUNT
7845 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
7846 index = FIRST_FIXED_ARRAY_SUB_TYPE + name; \
7847 counters->count_of_FIXED_ARRAY_##name()->Increment( \
7848 static_cast<int>(object_counts_[index])); \
7849 counters->count_of_FIXED_ARRAY_##name()->Decrement( \
7850 static_cast<int>(object_counts_last_time_[index])); \
7851 counters->size_of_FIXED_ARRAY_##name()->Increment( \
7852 static_cast<int>(object_sizes_[index])); \
7853 counters->size_of_FIXED_ARRAY_##name()->Decrement( \
7854 static_cast<int>(object_sizes_last_time_[index]));
7856 #undef ADJUST_LAST_TIME_OBJECT_COUNT
7857 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
7859 FIRST_CODE_AGE_SUB_TYPE + Code::k##name##CodeAge - Code::kFirstCodeAge; \
7860 counters->count_of_CODE_AGE_##name()->Increment( \
7861 static_cast<int>(object_counts_[index])); \
7862 counters->count_of_CODE_AGE_##name()->Decrement( \
7863 static_cast<int>(object_counts_last_time_[index])); \
7864 counters->size_of_CODE_AGE_##name()->Increment( \
7865 static_cast<int>(object_sizes_[index])); \
7866 counters->size_of_CODE_AGE_##name()->Decrement( \
7867 static_cast<int>(object_sizes_last_time_[index]));
7869 #undef ADJUST_LAST_TIME_OBJECT_COUNT
7871 OS::MemCopy(object_counts_last_time_, object_counts_,
sizeof(object_counts_));
7872 OS::MemCopy(object_sizes_last_time_, object_sizes_,
sizeof(object_sizes_));
static int SizeOfMarkedObject(HeapObject *object)
MUST_USE_RESULT MaybeObject * CopyConstantPoolArray(ConstantPoolArray *src)
Object ** roots_array_start()
void set_length(int value)
static Object * WeakNext(JSArrayBufferView *obj)
static void SetWeakNext(JSArrayBufferView *obj, Object *next)
MUST_USE_RESULT MaybeObject * AllocateJSModule(Context *context, ScopeInfo *scope_info)
ContextSlotCache * context_slot_cache()
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
const uint32_t kShortcutTypeTag
intptr_t MaximumCommittedMemory()
static const int kEmptyStringHash
#define CODE_AGE_LIST_COMPLETE(V)
static const int kPointerFieldsEndOffset
MUST_USE_RESULT MaybeObject * CopyCode(Code *code)
void set_elements_kind(ElementsKind elements_kind)
size_t CommittedPhysicalMemory()
void RemoveImplicitRefGroups()
static void Clear(FixedArray *cache)
static const int kWeakNextOffset
MUST_USE_RESULT MaybeObject * AllocateFixedTypedArray(int length, ExternalArrayType array_type, PretenureFlag pretenure)
static const int kMaxLength
Code * builtin(Name name)
#define SLOW_ASSERT(condition)
const intptr_t kSmiTagMask
void set_deopt_dependent_code(bool deopt)
static Object * WeakNext(AllocationSite *obj)
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8Slow(Vector< const char > str, int non_ascii_start, PretenureFlag pretenure=NOT_TENURED)
static void SetWeakNext(Context *context, Object *next)
static ConstantPoolArray * cast(Object *obj)
const intptr_t kDoubleAlignmentMask
MUST_USE_RESULT MaybeObject * AllocateSymbol()
static void VisitPhantomObject(Heap *, JSFunction *)
static void DoWeakList(Heap *heap, Context *context, WeakObjectRetainer *retainer, bool record_slots, int index)
static const int kCodeEntryOffset
size_t CommittedPhysicalMemory()
bool Contains(const T &elm) const
bool NextGCIsLikelyToBeFull()
MUST_USE_RESULT MaybeObject * AllocateOneByteInternalizedString(Vector< const uint8_t > str, uint32_t hash_field)
MUST_USE_RESULT MaybeObject * AllocateExternalStringFromAscii(const ExternalAsciiString::Resource *resource)
void set_constant_pool(Object *constant_pool)
int inobject_properties()
#define PROFILE(IsolateGetter, Call)
#define LAZY_MUTEX_INITIALIZER
MUST_USE_RESULT MaybeObject * AllocateRawOneByteString(int length, PretenureFlag pretenure=NOT_TENURED)
static int WeakNextOffset()
void Callback(MemoryChunk *page, StoreBufferEvent event)
#define INSTANCE_TYPE_LIST(V)
void set_size(Heap *heap, int size_in_bytes)
#define STRUCT_TABLE_ELEMENT(NAME, Name, name)
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
static void VisitPhantomObject(Heap *, Code *)
intptr_t * old_pointer_space_size
int Lookup(Map *map, Name *name)
bool Contains(Address addr)
void set(int index, Object *value)
static int WeakNextOffset()
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths true
CompilationCache * compilation_cache()
intptr_t * cell_space_size
void PopulateConstantPool(ConstantPoolArray *constant_pool)
static const int kMapHashShift
void DeoptMarkedAllocationSites()
void PrintF(const char *format,...)
void PrintStack(StringStream *accumulator)
void SetNewSpaceHighPromotionModeActive(bool mode)
#define ASSERT_TAG_ALIGNED(address)
void CollectAllGarbage(int flags, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
static Object * WeakNext(JSArrayBuffer *obj)
bool InOldDataSpace(Address address)
void set_function_with_prototype(bool value)
bool InNewSpace(Object *object)
static void VisitPhantomObject(Heap *heap, Context *context)
static String * cast(Object *obj)
void AddGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback, GCType gc_type_filter, bool pass_isolate=true)
void IdentifyNewSpaceWeakIndependentHandles(WeakSlotCallbackWithHeap f)
MUST_USE_RESULT MaybeObject * AllocateFunctionContext(int length, JSFunction *function)
RootListIndex RootIndexForEmptyExternalArray(ElementsKind kind)
void(* ObjectSlotCallback)(HeapObject **from, HeapObject *to)
HandleScopeImplementer * handle_scope_implementer()
void set_access_flags(v8::AccessControl access_control)
intptr_t MaximumCommittedMemory()
static DescriptorArray * cast(Object *obj)
static Failure * InternalError()
void IterateWeakRoots(ObjectVisitor *v, VisitMode mode)
bool SkipObject(HeapObject *object)
static int SizeOf(Map *map, HeapObject *object)
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes)
MUST_USE_RESULT MaybeObject * ReinitializeJSGlobalProxy(JSFunction *constructor, JSGlobalProxy *global)
int unused_property_fields()
void set_length(Smi *length)
V8_INLINE bool IsOutsideAllocatedSpace(const void *address) const
bool SetUp(const size_t requested_size)
void PostGarbageCollectionProcessing(Heap *heap)
void RemoveGCPrologueCallback(v8::Isolate::GCPrologueCallback callback)
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArrayWithMap(FixedDoubleArray *src, Map *map)
void Prepare(GCTracer *tracer)
void set_scan_on_scavenge(bool scan)
static Smi * FromInt(int value)
static void VisitPhantomObject(Heap *heap, AllocationSite *phantom)
#define LOG(isolate, Call)
MUST_USE_RESULT MaybeObject * AllocateJSFunctionProxy(Object *handler, Object *call_trap, Object *construct_trap, Object *prototype)
static Object * GetObjectFromEntryAddress(Address location_of_address)
void FinalizeExternalString(String *string)
MUST_USE_RESULT MaybeObject * CopyJSObject(JSObject *source, AllocationSite *site=NULL)
Map * MapForFixedTypedArray(ExternalArrayType array_type)
void CompletelyClearInstanceofCache()
V8_DECLARE_ONCE(initialize_gc_once)
static MemoryChunk * FromAddress(Address a)
void set_ic_age(int count)
static const int kDataOffset
int InitialPropertiesLength()
static HeapObject * cast(Object *obj)
Map * MapForExternalArrayType(ExternalArrayType array_type)
void SetNumberStringCache(Object *number, String *str)
static const byte kArgumentMarker
static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index)
MUST_USE_RESULT MaybeObject * AllocateModuleContext(ScopeInfo *scope_info)
MUST_USE_RESULT MaybeObject * AllocateCodeCache()
void set_pre_allocated_property_fields(int value)
void CallOnce(OnceType *once, NoArgFunction init_func)
static const byte kUndefined
MUST_USE_RESULT MaybeObject * AllocateNativeContext()
const int kVariableSizeSentinel
RootListIndex RootIndexForExternalArrayType(ExternalArrayType array_type)
void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags)
bool SetUp(intptr_t max_capacity, intptr_t capacity_executable)
static ExternalTwoByteString * cast(Object *obj)
void VisitExternalResources(v8::ExternalResourceVisitor *visitor)
void IterateDeferredHandles(ObjectVisitor *visitor)
static Object * WeakNext(JSFunction *function)
static Map * cast(Object *obj)
static uint16_t TrailSurrogate(uint32_t char_code)
MaybeObject * AddWeakObjectToCodeDependency(Object *obj, DependentCode *dep)
void set_has_debug_break_slots(bool value)
void set_start_position(int value)
void ResetAllocationInfo()
kSerializedDataOffset Object
static const byte kTheHole
static ByteArray * cast(Object *obj)
MUST_USE_RESULT MaybeObject * AllocateJSMessageObject(String *type, JSArray *arguments, int start_position, int end_position, Object *script, Object *stack_frames)
static const int kMaxSize
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage message
static AllocationSite * cast(Object *obj)
void set_end_position(int end_position)
void set_context(Object *context)
static FreeSpace * cast(Object *obj)
Bootstrapper * bootstrapper()
bool InFromSpace(Object *object)
MUST_USE_RESULT MaybeObject * Uint32ToString(uint32_t value, bool check_number_string_cache=true)
void Relocate(intptr_t delta)
static SeqOneByteString * cast(Object *obj)
void set_is_crankshafted(bool value)
Object * weak_object_to_code_table()
WriteBarrierMode GetWriteBarrierMode(const DisallowHeapAllocation &promise)
PromotionQueue * promotion_queue()
void SetTop(Object ***top)
static ScopeInfo * Empty(Isolate *isolate)
RootListIndex RootIndexForFixedTypedArray(ExternalArrayType array_type)
static int WeakNextOffset()
intptr_t inline_allocation_limit_step()
intptr_t * code_space_size
MUST_USE_RESULT MaybeObject * InternalizeStringWithKey(HashTableKey *key)
MUST_USE_RESULT MaybeObject * AllocateExternal(void *value)
static void VisitPhantomObject(Heap *, JSArrayBufferView *)
RandomNumberGenerator * random_number_generator()
static void SetWeakNext(Code *code, Object *next)
static int WeakNextOffset()
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
intptr_t MaximumCommittedMemory()
#define ASSERT(condition)
bool InSpace(Address addr, AllocationSpace space)
void(* GCPrologueCallback)(GCType type, GCCallbackFlags flags)
MUST_USE_RESULT MaybeObject * AllocateGlobalContext(JSFunction *function, ScopeInfo *scope_info)
static void IncrementLiveBytesFromGC(Address address, int by)
void Step(intptr_t allocated, CompletionAction action)
KeyedLookupCache * keyed_lookup_cache()
static const int kReduceMemoryFootprintMask
void IterateStrongRoots(ObjectVisitor *v)
virtual Object * RetainAs(Object *object)
static Context * cast(Object *context)
void AgeBufferedOsrJobs()
const intptr_t kCodeAlignment
ThreadManager * thread_manager()
#define ADJUST_LAST_TIME_OBJECT_COUNT(name)
MUST_USE_RESULT MaybeObject * AllocateBlockContext(JSFunction *function, Context *previous, ScopeInfo *info)
int SizeFromMap(Map *map)
void IncrementMementoCreateCount()
intptr_t CommittedMemoryExecutable()
int GetInternalFieldCount()
void initialize_elements()
static void VisitLiveObject(Heap *, JSArrayBufferView *obj, WeakObjectRetainer *retainer, bool record_slots)
void VisitPointers(Object **start, Object **end)
#define INTERNALIZED_STRING_LIST(V)
#define STRING_TYPE_LIST(V)
static FixedTypedArrayBase * cast(Object *obj)
static ExternalAsciiString * cast(Object *obj)
static const int kMaxSize
static const int kPageSize
bool DigestPretenuringFeedback()
void init_back_pointer(Object *undefined)
void IterateSmiRoots(ObjectVisitor *v)
void set_foreign_address(Address value)
PerThreadAssertScopeDebugOnly< HEAP_ALLOCATION_ASSERT, true > AllowHeapAllocation
void SeqTwoByteStringSet(int index, uint16_t value)
static Code * cast(Object *obj)
virtual const uint16_t * data() const =0
MUST_USE_RESULT MaybeObject * AllocateJSObject(JSFunction *constructor, PretenureFlag pretenure=NOT_TENURED, AllocationSite *allocation_site=NULL)
static bool IsAtEnd(Address addr)
void AdjustLiveBytes(Address address, int by, InvocationMode mode)
static void SetWeakNext(JSArrayBuffer *obj, Object *next)
#define ELEMENT_KIND_TO_ROOT_INDEX(Type, type, TYPE, ctype, size)
void IterateAndMarkPointersToFromSpace(Address start, Address end, ObjectSlotCallback callback)
static PolymorphicCodeCache * cast(Object *obj)
ArrayStorageAllocationMode
static Symbol * cast(Object *obj)
Failure * ThrowInvalidStringLength()
virtual Object * RetainAs(Object *object)=0
static Object ** RawField(HeapObject *obj, int offset)
StoreBuffer * store_buffer()
ConstantPoolArray * constant_pool()
static Smi * cast(Object *object)
void set_function_token_position(int function_token_position)
MUST_USE_RESULT MaybeObject * AllocateStringFromOneByte(Vector< const uint8_t > str, PretenureFlag pretenure=NOT_TENURED)
#define STRING_TYPE_ELEMENT(type, size, name, camel_name)
intptr_t MaximumCommittedMemory()
FixedTypedArrayBase * EmptyFixedTypedArrayForMap(Map *map)
static MUST_USE_RESULT MaybeObject * InitializeIntrinsicFunctionNames(Heap *heap, Object *dictionary)
void set_closure(JSFunction *closure)
kInstanceClassNameOffset flag
StackGuard * stack_guard()
void Free(MemoryChunk *chunk)
void set_opt_count_and_bailout_reason(int value)
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
#define UPDATE_COUNTERS_FOR_SPACE(space)
static bool IsOneByte(const uc16 *chars, int length)
void EnsureWeakObjectToCodeTable()
GlobalObject * global_object()
int NotifyContextDisposed()
Object * InObjectPropertyAtPut(int index, Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static Struct * cast(Object *that)
MUST_USE_RESULT MaybeObject * AllocateHeapNumber(double value, PretenureFlag pretenure=NOT_TENURED)
static int GetBuiltinsCount()
void InitializeBody(int object_size, Object *value)
void RepairFreeListsAfterBoot()
MUST_USE_RESULT MaybeObject * NumberToString(Object *number, bool check_number_string_cache=true)
static const int kWeakFirstViewOffset
UnicodeCache * unicode_cache()
String *(* ExternalStringTableUpdaterCallback)(Heap *heap, Object **pointer)
double sweeping_time() const
void set_the_hole(int index)
static const int kEndMarker
bool IdleNotification(int hint)
virtual size_t length() const =0
void SetEntryCounts(int number_of_int64_entries, int number_of_code_ptr_entries, int number_of_heap_ptr_entries, int number_of_int32_entries)
friend class MarkCompactCollector
void(* GCEpilogueCallback)(Isolate *isolate, GCType type, GCCallbackFlags flags)
void EnsureHeapIsIterable()
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedDoubleArray(int length, PretenureFlag pretenure=NOT_TENURED)
bool InOldPointerSpace(Address address)
intptr_t * property_cell_space_capacity
int(* HeapObjectCallback)(HeapObject *obj)
static Object * WeakNext(Context *context)
void set_global_object(GlobalObject *object)
static SeededNumberDictionary * cast(Object *obj)
#define ALLOCATE_EMPTY_EXTERNAL_ARRAY(Type, type, TYPE, ctype, size)
void set_num_literals(int value)
static const int kMaxLength
const char * IntToCString(int n, Vector< char > buffer)
bool is_tracking_object_moves() const
#define UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(space)
intptr_t OldGenerationAllocationLimit(intptr_t old_gen_size)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
bool AdvanceSweeper(intptr_t bytes_to_sweep)
void Register(StaticVisitorBase::VisitorId id, Callback callback)
intptr_t CommittedMemory()
static bool IsMarked(HeapObject *object)
static Cell * cast(Object *obj)
static int NumberOfHandles(Isolate *isolate)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
void IteratePointersToNewSpace(ObjectSlotCallback callback)
static void MemCopy(void *dest, const void *src, size_t size)
intptr_t CommittedMemory()
void set_expected_nof_properties(int value)
MUST_USE_RESULT MaybeObject * AllocateJSArrayWithElements(FixedArrayBase *array_base, ElementsKind elements_kind, int length, PretenureFlag pretenure=NOT_TENURED)
void set_instruction_size(int value)
Context * native_context()
void InitializeBody(int object_size)
void LowerInlineAllocationLimit(intptr_t step)
static const int kStoreBufferSize
static const uchar kMaxNonSurrogateCharCode
static bool IsValid(intptr_t value)
void set_resource(const Resource *buffer)
#define MAKE_CASE(NAME, Name, name)
void ClearAllICsByKind(Code::Kind kind)
MUST_USE_RESULT MaybeObject * AllocateWithContext(JSFunction *function, Context *previous, JSReceiver *extension)
void CollectAllAvailableGarbage(const char *gc_reason=NULL)
bool ConfigureHeapDefault()
PagedSpace * paged_space(int idx)
void set_aliased_context_slot(int count)
ElementsKind GetElementsKind()
static const int kNoGCFlags
PropertyCellSpace * property_cell_space()
MUST_USE_RESULT MaybeObject * AllocateFixedArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
void set_prologue_offset(int offset)
MemoryAllocator * memory_allocator()
EternalHandles * eternal_handles()
static Oddball * cast(Object *obj)
static Address & Address_at(Address addr)
static uint16_t LeadSurrogate(uint32_t char_code)
MUST_USE_RESULT MaybeObject * AllocateForeign(Address address, PretenureFlag pretenure=NOT_TENURED)
const char * DoubleToCString(double v, Vector< char > buffer)
static UnseededNumberDictionary * cast(Object *obj)
void QueueMemoryChunkForFree(MemoryChunk *chunk)
static void VisitLiveObject(Heap *, JSFunction *, WeakObjectRetainer *, bool)
void CheckpointObjectStats()
MUST_USE_RESULT MaybeObject * AllocateExternalArray(int length, ExternalArrayType array_type, void *external_pointer, PretenureFlag pretenure)
intptr_t * cell_space_capacity
bool IsAligned(T value, U alignment)
intptr_t * memory_allocator_size
static DependentCode * cast(Object *object)
void Start(CompactionFlag flag=ALLOW_COMPACTION)
static void VisitLiveObject(Heap *heap, Context *context, WeakObjectRetainer *retainer, bool record_slots)
void set_inobject_properties(int value)
LazyStaticInstance< Mutex, DefaultConstructTrait< Mutex >, ThreadSafeInitOnceTrait >::type LazyMutex
void MarkCompactPrologue()
void Iterate(ObjectVisitor *v)
virtual intptr_t SizeOfObjects()
static void VisitLiveObject(Heap *heap, AllocationSite *site, WeakObjectRetainer *retainer, bool record_slots)
size_t CommittedPhysicalMemory()
GlobalHandles * global_handles()
void IncrementYoungSurvivorsCounter(int survived)
~UnreachableObjectsFilter()
void set_allocation_sites_list(Object *object)
int first_code_ptr_index()
MUST_USE_RESULT MaybeObject * AllocatePolymorphicCodeCache()
static const int kMaxRegularHeapObjectSize
intptr_t * code_space_capacity
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
void VisitPointer(Object **p)
bool deopt_dependent_code()
static MUST_USE_RESULT MaybeObject * Allocate(Heap *heap, int at_least_space_for, PretenureFlag pretenure=NOT_TENURED)
MUST_USE_RESULT MaybeObject * InternalizeString(String *str)
static void VisitPhantomObject(Heap *heap, JSArrayBuffer *phantom)
static void Enter(Heap *heap, String *key_string, Object *key_pattern, FixedArray *value_array, ResultsCacheType type)
const uint32_t kShortcutTypeMask
static void MemMove(void *dest, const void *src, size_t size)
void ReserveSpace(int *sizes, Address *addresses)
static const int kIsNotArrayIndexMask
static void DeoptimizeAll(Isolate *isolate)
void set_end_position(int value)
int memento_found_count()
ExternalArray * EmptyExternalArrayForMap(Map *map)
OldSpace * old_pointer_space()
void UncommitMarkingDeque()
static const byte kUninitialized
Map * InternalizedStringMapForString(String *str)
bool ConfigureHeap(int max_semispace_size, intptr_t max_old_gen_size, intptr_t max_executable_size)
T RoundUp(T x, intptr_t m)
intptr_t * map_space_size
void RecordWrites(HeapObject *obj)
Object * Lookup(Object *key)
static Object * WeakNext(Code *code)
static double TimeCurrentMillis()
static FixedDoubleArray * cast(Object *obj)
MUST_USE_RESULT MaybeObject * AllocateTypeFeedbackInfo()
PretenureFlag GetPretenureMode()
void DeoptMarkedAllocationSites()
static NameDictionary * cast(Object *obj)
MaybeObject * AllocateConstantPool(Heap *heap)
MUST_USE_RESULT MaybeObject * AllocateByteArray(int length, PretenureFlag pretenure=NOT_TENURED)
static void SetWeakNext(AllocationSite *obj, Object *next)
void set_age_mark(Address mark)
void IterateAllRoots(ObjectVisitor *v)
bool contains(Address address)
void set_length(int value)
static const int kMakeHeapIterableMask
MUST_USE_RESULT MaybeObject * AllocateJSObjectFromMap(Map *map, PretenureFlag pretenure=NOT_TENURED, bool alloc_props=true, AllocationSite *allocation_site=NULL)
static ExternalString * cast(Object *obj)
MUST_USE_RESULT MaybeObject * AllocateJSArrayAndStorage(ElementsKind elements_kind, int length, int capacity, ArrayStorageAllocationMode mode=DONT_INITIALIZE_ARRAY_ELEMENTS, PretenureFlag pretenure=NOT_TENURED)
void EnsureSpace(intptr_t space_needed)
void Iterate(ObjectVisitor *v)
static const int kNextFunctionLinkOffset
bool InToSpace(Object *object)
void CopyFrom(const CodeDesc &desc)
static int SizeFor(int length)
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
void set_start_position_and_type(int value)
void set_resource(const Resource *buffer)
static void Iterate(Isolate *isolate, ObjectVisitor *visitor)
static PropertyCell * cast(Object *obj)
void Iterate(ObjectVisitor *v)
RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind)
void Iterate(ObjectVisitor *v)
byte * relocation_start()
#define STATIC_ASCII_VECTOR(x)
LargeObjectSpace * lo_space()
bool RootCanBeTreatedAsConstant(RootListIndex root_index)
const Address kFromSpaceZapValue
bool ToSpaceContains(Address address)
DeoptimizerData * deoptimizer_data()
Callback GetVisitorById(StaticVisitorBase::VisitorId id)
MUST_USE_RESULT MaybeObject * AllocateExternalStringFromTwoByte(const ExternalTwoByteString::Resource *resource)
MUST_USE_RESULT MaybeObject * AllocatePartialMap(InstanceType instance_type, int instance_size)
static Object * Lookup(Heap *heap, String *key_string, Object *key_pattern, ResultsCacheType type)
DescriptorLookupCache * descriptor_lookup_cache()
void set_map_no_write_barrier(Map *value)
static JSMessageObject * cast(Object *obj)
void initialize_storage()
static const int kAbortIncrementalMarkingMask
static const int kNonWeakFieldsEndOffset
Vector< const char > CStrVector(const char *data)
bool CollectGarbage(AllocationSpace space, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
void IterateNewSpaceRoots(ObjectVisitor *visitor)
static Local< Context > ToLocal(v8::internal::Handle< v8::internal::Context > obj)
static int OffsetOfElementAt(int index)
static const int kNextCodeLinkOffset
intptr_t CommittedMemory()
Object * GetNumberStringCache(Object *number)
static int SizeFor(int length)
#define T(name, string, precedence)
void SetArea(Address area_start, Address area_end)
void IterateNewSpaceStrongAndDependentRoots(ObjectVisitor *v)
static uchar ValueOf(const byte *str, unsigned length, unsigned *cursor)
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
friend class GCCallbacksScope
void UpdateMarkingDequeAfterScavenge()
static SeqTwoByteString * cast(Object *obj)
static JSFunctionResultCache * cast(Object *obj)
void Iterate(ObjectVisitor *v)
intptr_t get_max_alive_after_gc()
void UpdateReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
MUST_USE_RESULT MaybeObject * AllocateJSProxy(Object *handler, Object *prototype)
void ProcessWeakReferences(WeakObjectRetainer *retainer)
void ClearNormalizedMapCaches()
static const int kHeaderSize
static void VisitPointer(Heap *heap, Object **p)
MUST_USE_RESULT MaybeObject * NumberFromDouble(double value, PretenureFlag pretenure=NOT_TENURED)
Vector< const uint8_t > OneByteVector(const char *data, int length)
bool SlowContains(Address addr)
static void VisitLiveObject(Heap *, Code *, WeakObjectRetainer *, bool)
MUST_USE_RESULT MaybeObject * CreateCode(const CodeDesc &desc, Code::Flags flags, Handle< Object > self_reference, bool immovable=false, bool crankshafted=false, int prologue_offset=Code::kPrologueOffsetNotSet)
void Update(Map *map, Name *name, int field_offset)
intptr_t * old_data_space_capacity
static int SizeFor(int length)
HeapProfiler * heap_profiler() const
bool is_compacting() const
MUST_USE_RESULT MaybeObject * AllocateArgumentsObject(Object *callee, int length)
intptr_t SizeExecutable()
void set_instance_type(InstanceType value)
#define FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(V)
bool PostGarbageCollectionProcessing(GarbageCollector collector, GCTracer *tracer)
static HeapNumber * cast(Object *obj)
void RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback)
static void WriteToFlat(String *source, sinkchar *sink, int from, int to)
static MUST_USE_RESULT MaybeObject * Allocate(Heap *heap, int at_least_space_for, MinimumCapacity capacity_option=USE_DEFAULT_MINIMUM_CAPACITY, PretenureFlag pretenure=NOT_TENURED)
void set_value(double value)
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
virtual size_t length() const =0
void IterateRoots(ObjectVisitor *v, VisitMode mode)
static const int kLengthOffset
static double nan_value()
MUST_USE_RESULT MaybeObject * ReinitializeJSReceiver(JSReceiver *object, InstanceType type, int size)
MUST_USE_RESULT MaybeObject * AllocateAccessorPair()
void set_raw_kind_specific_flags1(int value)
int steps_count_since_last_gc()
void set_counters(int value)
MUST_USE_RESULT MaybeObject * AllocateConstantPoolArray(int number_of_int64_entries, int number_of_code_ptr_entries, int number_of_heap_ptr_entries, int number_of_int32_entries)
MUST_USE_RESULT MaybeObject * AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field)
MUST_USE_RESULT MaybeObject * AllocateCatchContext(JSFunction *function, Context *previous, String *name, Object *thrown_object)
const uint32_t kFreeListZapValue
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
static const int kArgumentsLengthIndex
void set_bit_field3(uint32_t bits)
#define CODE_KIND_LIST(V)
#define OBJECT_POINTER_ALIGN(value)
void CheckNewSpaceExpansionCriteria()
const intptr_t kObjectAlignment
CpuProfiler * cpu_profiler() const
INLINE(static HeapObject *EnsureDoubleAligned(Heap *heap, HeapObject *object, int size))
static NewSpacePage * FromLimit(Address address_limit)
void SetInternalField(int index, Object *value)
void RecordStats(HeapStats *stats)
MUST_USE_RESULT MaybeObject * AllocateScopeInfo(int length)
Object * native_contexts_list()
double get_min_in_mutator()
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
IncrementalMarking * incremental_marking()
double get_max_gc_pause()
bool Contains(Address addr)
MUST_USE_RESULT MaybeObject * Put(Object *key, Object *value)
void EnableInlineAllocation()
size_t CommittedPhysicalMemory()
Map * get_initial_js_array_map(ElementsKind kind)
#define ARRAY_TYPE_TO_ROOT_INDEX(Type, type, TYPE, ctype, size)
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedArray(int length)
void set_extension(Object *object)
intptr_t CommittedMemory()
MUST_USE_RESULT MaybeObject * AllocateAllocationSite()
void MoveElements(FixedArray *array, int dst_index, int src_index, int len)
static const int kStartMarker
void set_bit_field(byte value)
static int SizeFor(int length)
void CopyBytes(uint8_t *target, uint8_t *source)
void Iterate(v8::internal::ObjectVisitor *v)
NewSpacePage * next_page() const
void MemsetPointer(T **dest, U *value, int counter)
void set_owner(Space *space)
static int SizeFor(int number_of_int64_entries, int number_of_code_ptr_entries, int number_of_heap_ptr_entries, int number_of_int32_entries)
ScavengeWeakObjectRetainer(Heap *heap)
void RememberUnmappedPage(Address page, bool compacted)
void NotifyOfHighPromotionRate()
static void IncrementLiveBytesFromMutator(Address address, int by)
static const int kNotFound
static const int kRegExpResultsCacheSize
void PrintPID(const char *format,...)
#define ASSERT_EQ(v1, v2)
void IterateAllRoots(ObjectVisitor *visitor)
static const int kBodyOffset
#define ALLOCATE_EXTERNAL_ARRAY_MAP(Type, type, TYPE, ctype, size)
MUST_USE_RESULT MaybeObject * LookupSingleCharacterStringFromCode(uint16_t code)
InstanceType instance_type()
static void CopyBlock(Address dst, Address src, int byte_size)
#define CONSTANT_STRING_ELEMENT(name, contents)
static bool ShouldZapGarbage()
static HeapObject * FromAddress(Address address)
static const uchar kBadChar
void set_size(size_t size)
MUST_USE_RESULT MaybeObject * AllocateFixedDoubleArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
int count_of_int32_entries()
ScavengeVisitor(Heap *heap)
MUST_USE_RESULT MaybeObject * AllocateFixedArray(int length, PretenureFlag pretenure=NOT_TENURED)
static const int kArgumentsCalleeIndex
MUST_USE_RESULT MaybeObject * AllocateHashTable(int length, PretenureFlag pretenure=NOT_TENURED)
PerThreadAssertScopeDebugOnly< HEAP_ALLOCATION_ASSERT, false > DisallowHeapAllocation
#define ALLOCATE_MAP(instance_type, size, field_name)
IN DWORD64 OUT PDWORD64 OUT PIMAGEHLP_SYMBOL64 Symbol
static FixedArray * cast(Object *obj)
static const int kWeakNextOffset
MUST_USE_RESULT MaybeObject * InternalizeUtf8String(const char *str)
static const int kHeaderSize
void Print(const v8::FunctionCallbackInfo< v8::Value > &args)
void DisableInlineAllocation()
void RemoveObjectGroups()
void UpdateInlineAllocationLimit(int size_in_bytes)
OptimizingCompilerThread * optimizing_compiler_thread()
ElementsKind elements_kind()
void set_previous(Context *context)
intptr_t PromotedSpaceSizeOfObjects()
void IterateNewSpaceWeakIndependentRoots(ObjectVisitor *v)
intptr_t * old_pointer_space_capacity
bool OldGenerationAllocationLimitReached()
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
double steps_took_since_last_gc()
int first_heap_ptr_index()
StaticResource< Utf8Decoder > * utf8_decoder()
int OffsetOfElementAt(int index)
static const uint32_t kHashBitMask
void set_instance_size(int value)
static VisitorId GetVisitorId(int instance_type, int instance_size)
void ClearJSFunctionResultCaches()
void set_compiler_hints(int value)
void RecordStats(HeapStats *stats, bool take_snapshot=false)
void set_formal_parameter_count(int value)
static const int kWeakNextOffset
bool HasFastDoubleElements()
static const int kMaxLength
int count_of_code_ptr_entries()
virtual ~HeapObjectsFilter()
static WeakHashTable * cast(Object *obj)
void set_bit_field2(byte value)
void CopyFrom(VisitorDispatchTable *other)
void CreateFillerObjectAt(Address addr, int size)
void set_marked_for_deoptimization(bool flag)
static const int kHashShift
static int GetLastError()
Object * array_buffers_list()
MUST_USE_RESULT MaybeObject * AllocateSharedFunctionInfo(Object *name)
bool AdvanceSweepers(int step_size)
static JSArrayBuffer * cast(Object *obj)
void RegisterSpecializations()
static NormalizedMapCache * cast(Object *obj)
static const int kMaxLength
MUST_USE_RESULT MaybeObject * Allocate(Map *map, AllocationSpace space, AllocationSite *allocation_site=NULL)
intptr_t * map_space_capacity
static int SizeFor(int body_size)
static intptr_t MaxVirtualMemory()
#define ALLOCATE_VARSIZE_MAP(instance_type, field_name)
static const intptr_t kAllocatedThreshold
static const int kCapacityMask
#define ALLOCATE_EMPTY_FIXED_TYPED_ARRAY(Type, type, TYPE, ctype, size)
static void ScavengeObject(HeapObject **p, HeapObject *object)
static void SetWeakNext(JSFunction *function, Object *next)
void set_visitor_id(int visitor_id)
bool IsSweepingComplete()
void set_length(int value)
MUST_USE_RESULT MaybeObject * CopyConstantPoolArrayWithMap(ConstantPoolArray *src, Map *map)
bool SetUp(int reserved_semispace_size_, int max_semispace_size)
void IterateBuiltins(ObjectVisitor *v)
void CopyChars(sinkchar *dest, const sourcechar *src, int chars)
static VisitorDispatchTable< ScavengingCallback > * GetTable()
void set_ast_node_count(int count)
static void DeoptimizeMarkedCode(Isolate *isolate)
bool CanMoveObjectStart(HeapObject *object)
static int SizeFor(int length)
static const int kNoScriptId
intptr_t * memory_allocator_capacity
static const int kEmptyHashField
bool EnsureSweeperProgress(intptr_t size_in_bytes)
MUST_USE_RESULT MaybeObject * AllocateJSArrayStorage(JSArray *array, int length, int capacity, ArrayStorageAllocationMode mode=DONT_INITIALIZE_ARRAY_ELEMENTS)
static ConsString * cast(Object *obj)
virtual intptr_t SizeOfObjects()
double marking_time() const
int count_of_heap_ptr_entries()
static const int kSloppyArgumentsObjectSize
static FixedArrayBase * cast(Object *object)
void set_array_buffers_list(Object *object)
void set_flags(Flags flags)
static const int kInitialMaxFastElementArray
static bool CanTrack(InstanceType type)
MUST_USE_RESULT MaybeObject * AllocateStruct(InstanceType type)
void EnterDirectlyIntoStoreBuffer(Address addr)
void(* GCPrologueCallback)(Isolate *isolate, GCType type, GCCallbackFlags flags)
intptr_t * old_data_space_size
ParallelSweepingState parallel_sweeping()
static void VisitLiveObject(Heap *heap, JSArrayBuffer *array_buffer, WeakObjectRetainer *retainer, bool record_slots)
MUST_USE_RESULT MaybeObject * AllocateRaw(int object_size, Executability executable)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
MUST_USE_RESULT MaybeObject * Initialize(Heap *heap, const char *to_string, Object *to_number, byte kind)
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size)
SpaceIterator(Heap *heap)
MUST_USE_RESULT MaybeObject * AllocateMap(InstanceType instance_type, int instance_size, ElementsKind elements_kind=TERMINAL_FAST_ELEMENTS_KIND)
void UpdateMaximumCommitted()
const intptr_t kDoubleAlignment
intptr_t MaxExecutableSize()
static const int32_t kMaxOneByteCharCode
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
UnreachableObjectsFilter(Heap *heap)
Object * allocation_sites_list()
void set_hash_field(uint32_t value)
void set_next_chunk(MemoryChunk *next)
void PrintShortHeapStatistics()
static JSObject * cast(Object *obj)
static const int kStrictArgumentsObjectSize
void AddGCPrologueCallback(v8::Isolate::GCPrologueCallback callback, GCType gc_type_filter, bool pass_isolate=true)
static const int kHashMask
static int WeakNextOffset()
static AllocationSpace TargetSpaceId(InstanceType type)
uint32_t RoundUpToPowerOf2(uint32_t x)
OldSpace * old_data_space()
MUST_USE_RESULT MaybeObject * AllocateRawTwoByteString(int length, PretenureFlag pretenure=NOT_TENURED)
static void AssertValidRange(Address from, Address to)
MarkCompactCollector * mark_compact_collector()
void set_raw_kind_specific_flags2(int value)
void EnableCodeFlushing(bool enable)
MUST_USE_RESULT MaybeObject * AllocatePrivateSymbol()
MUST_USE_RESULT MaybeObject * AllocateFunction(Map *function_map, SharedFunctionInfo *shared, Object *prototype, PretenureFlag pretenure=TENURED)
void UpdateNewSpaceReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
#define ALLOCATE_FIXED_TYPED_ARRAY_MAP(Type, type, TYPE, ctype, size)
bool InternalizeStringIfExists(String *str, String **result)
static uint32_t encode(intvalue)
static const int kAlignedSize
bool CommitFromSpaceIfNeeded()
bool IsFastDoubleElementsKind(ElementsKind kind)
AllocationSpace identity()
static void FreeArrayBuffer(Isolate *isolate, JSArrayBuffer *phantom_array_buffer)
void set_unused_property_fields(int value)
void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags)
static const int kIsExtensible
MUST_USE_RESULT MaybeObject * AllocateStringFromTwoByte(Vector< const uc16 > str, PretenureFlag pretenure=NOT_TENURED)
static const int kEntriesPerBucket
void PrepareForScavenge()
static const int kPointerFieldsBeginOffset
MUST_USE_RESULT MaybeObject * CopyAndTenureFixedCOWArray(FixedArray *src)
DependentCode * LookupWeakObjectToCodeDependency(Object *obj)
int count_of_int64_entries()
void InitializeBody(Map *map, Object *pre_allocated_value, Object *filler_value)
MUST_USE_RESULT MaybeObject * AllocateAliasedArgumentsEntry(int slot)
intptr_t * property_cell_space_size
MemoryChunk * next_chunk() const
void set_requires_slow_elements()
static int WeakNextOffset()
static JSFunction * cast(Object *obj)