49 #define STRONG_ROOT_LIST(V) \
50 V(Map, byte_array_map, ByteArrayMap) \
51 V(Map, free_space_map, FreeSpaceMap) \
52 V(Map, one_pointer_filler_map, OnePointerFillerMap) \
53 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
55 V(Smi, store_buffer_top, StoreBufferTop) \
56 V(Oddball, undefined_value, UndefinedValue) \
57 V(Oddball, the_hole_value, TheHoleValue) \
58 V(Oddball, null_value, NullValue) \
59 V(Oddball, true_value, TrueValue) \
60 V(Oddball, false_value, FalseValue) \
61 V(Map, global_property_cell_map, GlobalPropertyCellMap) \
62 V(Map, shared_function_info_map, SharedFunctionInfoMap) \
63 V(Map, meta_map, MetaMap) \
64 V(Map, ascii_symbol_map, AsciiSymbolMap) \
65 V(Map, ascii_string_map, AsciiStringMap) \
66 V(Map, heap_number_map, HeapNumberMap) \
67 V(Map, native_context_map, NativeContextMap) \
68 V(Map, fixed_array_map, FixedArrayMap) \
69 V(Map, code_map, CodeMap) \
70 V(Map, scope_info_map, ScopeInfoMap) \
71 V(Map, fixed_cow_array_map, FixedCOWArrayMap) \
72 V(Map, fixed_double_array_map, FixedDoubleArrayMap) \
73 V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
74 V(Map, hash_table_map, HashTableMap) \
75 V(FixedArray, empty_fixed_array, EmptyFixedArray) \
76 V(ByteArray, empty_byte_array, EmptyByteArray) \
77 V(String, empty_string, EmptyString) \
78 V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
79 V(Smi, stack_limit, StackLimit) \
80 V(Oddball, arguments_marker, ArgumentsMarker) \
84 V(FixedArray, number_string_cache, NumberStringCache) \
85 V(Object, instanceof_cache_function, InstanceofCacheFunction) \
86 V(Object, instanceof_cache_map, InstanceofCacheMap) \
87 V(Object, instanceof_cache_answer, InstanceofCacheAnswer) \
88 V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
89 V(FixedArray, string_split_cache, StringSplitCache) \
90 V(FixedArray, regexp_multiple_cache, RegExpMultipleCache) \
91 V(Object, termination_exception, TerminationException) \
92 V(Smi, hash_seed, HashSeed) \
93 V(Map, string_map, StringMap) \
94 V(Map, symbol_map, SymbolMap) \
95 V(Map, cons_string_map, ConsStringMap) \
96 V(Map, cons_ascii_string_map, ConsAsciiStringMap) \
97 V(Map, sliced_string_map, SlicedStringMap) \
98 V(Map, sliced_ascii_string_map, SlicedAsciiStringMap) \
99 V(Map, cons_symbol_map, ConsSymbolMap) \
100 V(Map, cons_ascii_symbol_map, ConsAsciiSymbolMap) \
101 V(Map, external_symbol_map, ExternalSymbolMap) \
102 V(Map, external_symbol_with_ascii_data_map, ExternalSymbolWithAsciiDataMap) \
103 V(Map, external_ascii_symbol_map, ExternalAsciiSymbolMap) \
104 V(Map, external_string_map, ExternalStringMap) \
105 V(Map, external_string_with_ascii_data_map, ExternalStringWithAsciiDataMap) \
106 V(Map, external_ascii_string_map, ExternalAsciiStringMap) \
107 V(Map, short_external_symbol_map, ShortExternalSymbolMap) \
109 short_external_symbol_with_ascii_data_map, \
110 ShortExternalSymbolWithAsciiDataMap) \
111 V(Map, short_external_ascii_symbol_map, ShortExternalAsciiSymbolMap) \
112 V(Map, short_external_string_map, ShortExternalStringMap) \
114 short_external_string_with_ascii_data_map, \
115 ShortExternalStringWithAsciiDataMap) \
116 V(Map, short_external_ascii_string_map, ShortExternalAsciiStringMap) \
117 V(Map, undetectable_string_map, UndetectableStringMap) \
118 V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap) \
119 V(Map, external_pixel_array_map, ExternalPixelArrayMap) \
120 V(Map, external_byte_array_map, ExternalByteArrayMap) \
121 V(Map, external_unsigned_byte_array_map, ExternalUnsignedByteArrayMap) \
122 V(Map, external_short_array_map, ExternalShortArrayMap) \
123 V(Map, external_unsigned_short_array_map, ExternalUnsignedShortArrayMap) \
124 V(Map, external_int_array_map, ExternalIntArrayMap) \
125 V(Map, external_unsigned_int_array_map, ExternalUnsignedIntArrayMap) \
126 V(Map, external_float_array_map, ExternalFloatArrayMap) \
127 V(Map, external_double_array_map, ExternalDoubleArrayMap) \
128 V(Map, non_strict_arguments_elements_map, NonStrictArgumentsElementsMap) \
129 V(Map, function_context_map, FunctionContextMap) \
130 V(Map, catch_context_map, CatchContextMap) \
131 V(Map, with_context_map, WithContextMap) \
132 V(Map, block_context_map, BlockContextMap) \
133 V(Map, module_context_map, ModuleContextMap) \
134 V(Map, global_context_map, GlobalContextMap) \
135 V(Map, oddball_map, OddballMap) \
136 V(Map, message_object_map, JSMessageObjectMap) \
137 V(Map, foreign_map, ForeignMap) \
138 V(HeapNumber, nan_value, NanValue) \
139 V(HeapNumber, infinity_value, InfinityValue) \
140 V(HeapNumber, minus_zero_value, MinusZeroValue) \
141 V(Map, neander_map, NeanderMap) \
142 V(JSObject, message_listeners, MessageListeners) \
143 V(Foreign, prototype_accessors, PrototypeAccessors) \
144 V(UnseededNumberDictionary, code_stubs, CodeStubs) \
145 V(UnseededNumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
146 V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache) \
147 V(Code, js_entry_code, JsEntryCode) \
148 V(Code, js_construct_entry_code, JsConstructEntryCode) \
149 V(FixedArray, natives_source_cache, NativesSourceCache) \
150 V(Object, last_script_id, LastScriptId) \
151 V(Script, empty_script, EmptyScript) \
152 V(Smi, real_stack_limit, RealStackLimit) \
153 V(StringDictionary, intrinsic_function_names, IntrinsicFunctionNames) \
154 V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
155 V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset) \
156 V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset) \
157 V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)
159 #define ROOT_LIST(V) \
160 STRONG_ROOT_LIST(V) \
161 V(SymbolTable, symbol_table, SymbolTable)
163 #define SYMBOL_LIST(V) \
164 V(Array_symbol, "Array") \
165 V(Object_symbol, "Object") \
166 V(Proto_symbol, "__proto__") \
167 V(StringImpl_symbol, "StringImpl") \
168 V(arguments_symbol, "arguments") \
169 V(Arguments_symbol, "Arguments") \
170 V(call_symbol, "call") \
171 V(apply_symbol, "apply") \
172 V(caller_symbol, "caller") \
173 V(boolean_symbol, "boolean") \
174 V(Boolean_symbol, "Boolean") \
175 V(callee_symbol, "callee") \
176 V(constructor_symbol, "constructor") \
177 V(code_symbol, ".code") \
178 V(result_symbol, ".result") \
179 V(dot_for_symbol, ".for.") \
180 V(catch_var_symbol, ".catch-var") \
181 V(empty_symbol, "") \
182 V(eval_symbol, "eval") \
183 V(function_symbol, "function") \
184 V(length_symbol, "length") \
185 V(module_symbol, "module") \
186 V(name_symbol, "name") \
187 V(native_symbol, "native") \
188 V(null_symbol, "null") \
189 V(number_symbol, "number") \
190 V(Number_symbol, "Number") \
191 V(nan_symbol, "NaN") \
192 V(RegExp_symbol, "RegExp") \
193 V(source_symbol, "source") \
194 V(global_symbol, "global") \
195 V(ignore_case_symbol, "ignoreCase") \
196 V(multiline_symbol, "multiline") \
197 V(input_symbol, "input") \
198 V(index_symbol, "index") \
199 V(last_index_symbol, "lastIndex") \
200 V(object_symbol, "object") \
201 V(prototype_symbol, "prototype") \
202 V(string_symbol, "string") \
203 V(String_symbol, "String") \
204 V(Date_symbol, "Date") \
205 V(this_symbol, "this") \
206 V(to_string_symbol, "toString") \
207 V(char_at_symbol, "CharAt") \
208 V(undefined_symbol, "undefined") \
209 V(value_of_symbol, "valueOf") \
210 V(InitializeVarGlobal_symbol, "InitializeVarGlobal") \
211 V(InitializeConstGlobal_symbol, "InitializeConstGlobal") \
212 V(KeyedLoadElementMonomorphic_symbol, \
213 "KeyedLoadElementMonomorphic") \
214 V(KeyedStoreElementMonomorphic_symbol, \
215 "KeyedStoreElementMonomorphic") \
216 V(KeyedStoreAndGrowElementMonomorphic_symbol, \
217 "KeyedStoreAndGrowElementMonomorphic") \
218 V(stack_overflow_symbol, "kStackOverflowBoilerplate") \
219 V(illegal_access_symbol, "illegal access") \
220 V(out_of_memory_symbol, "out-of-memory") \
221 V(illegal_execution_state_symbol, "illegal execution state") \
222 V(get_symbol, "get") \
223 V(set_symbol, "set") \
224 V(function_class_symbol, "Function") \
225 V(illegal_argument_symbol, "illegal argument") \
226 V(MakeReferenceError_symbol, "MakeReferenceError") \
227 V(MakeSyntaxError_symbol, "MakeSyntaxError") \
228 V(MakeTypeError_symbol, "MakeTypeError") \
229 V(invalid_lhs_in_assignment_symbol, "invalid_lhs_in_assignment") \
230 V(invalid_lhs_in_for_in_symbol, "invalid_lhs_in_for_in") \
231 V(invalid_lhs_in_postfix_op_symbol, "invalid_lhs_in_postfix_op") \
232 V(invalid_lhs_in_prefix_op_symbol, "invalid_lhs_in_prefix_op") \
233 V(illegal_return_symbol, "illegal_return") \
234 V(illegal_break_symbol, "illegal_break") \
235 V(illegal_continue_symbol, "illegal_continue") \
236 V(unknown_label_symbol, "unknown_label") \
237 V(redeclaration_symbol, "redeclaration") \
238 V(failure_symbol, "<failure>") \
239 V(space_symbol, " ") \
240 V(exec_symbol, "exec") \
241 V(zero_symbol, "0") \
242 V(global_eval_symbol, "GlobalEval") \
243 V(identity_hash_symbol, "v8::IdentityHash") \
244 V(closure_symbol, "(closure)") \
245 V(use_strict, "use strict") \
247 V(anonymous_function_symbol, "(anonymous function)") \
248 V(compare_ic_symbol, "==") \
249 V(strict_compare_ic_symbol, "===") \
250 V(infinity_symbol, "Infinity") \
251 V(minus_infinity_symbol, "-Infinity") \
252 V(hidden_stack_trace_symbol, "v8::hidden_stack_trace") \
253 V(query_colon_symbol, "(?:)")
259 class WeakObjectRetainer;
262 typedef String* (*ExternalStringTableUpdaterCallback)(
Heap* heap,
268 : store_buffer_(store_buffer) {
280 Object*** start_of_current_page_;
291 class HeapDebugUtils;
310 delete emergency_stack_;
311 emergency_stack_ =
NULL;
317 return Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
326 limit_ =
reinterpret_cast<intptr_t*
>(limit);
328 if (limit_ <= rear_) {
336 return (front_ == rear_) &&
337 (emergency_stack_ ==
NULL || emergency_stack_->length() == 0);
344 if (front_ == rear_) {
345 Entry e = emergency_stack_->RemoveLast();
358 *target =
reinterpret_cast<HeapObject*
>(*(--front_));
359 *size =
static_cast<int>(*(--front_));
362 reinterpret_cast<Address>(front_));
373 static const int kEntrySizeInWords = 2;
376 Entry(
HeapObject* obj,
int size) : obj_(obj), size_(size) { }
381 List<Entry>* emergency_stack_;
385 void RelocateQueueHead();
404 inline void Iterate(ObjectVisitor* v);
418 inline void Verify();
420 inline void AddOldString(
String*
string);
423 inline void ShrinkNewStrings(
int position);
446 intptr_t max_old_gen_size,
447 intptr_t max_executable_size);
453 bool SetUp(
bool create_heap_objects);
472 return 4 * reserved_semispace_size_ + max_old_generation_size_;
533 return reinterpret_cast<Address>(&always_allocate_scope_depth_);
536 return linear_allocation_scope_depth_ != 0;
720 uint32_t hash_field);
724 uint32_t hash_field);
728 uint32_t hash_field);
778 void* external_pointer,
910 Object* callee,
int length);
1019 bool immovable =
false);
1072 const char* gc_reason,
1073 const char* collector_reason);
1079 const char* gc_reason =
NULL);
1108 flush_monomorphic_ics_ =
true;
1109 return ++contexts_disposed_;
1117 scan_on_scavenge_pages_++;
1118 if (FLAG_gc_verbose) {
1119 PrintF(
"Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
1124 scan_on_scavenge_pages_--;
1125 if (FLAG_gc_verbose) {
1126 PrintF(
"Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
1134 void GarbageCollectionGreedyCheck();
1146 ASSERT((callback ==
NULL) ^ (global_gc_prologue_callback_ ==
NULL));
1147 global_gc_prologue_callback_ = callback;
1150 ASSERT((callback ==
NULL) ^ (global_gc_epilogue_callback_ ==
NULL));
1151 global_gc_epilogue_callback_ = callback;
1158 #define ROOT_ACCESSOR(type, name, camel_name) \
1160 return type::cast(roots_[k##camel_name##RootIndex]); \
1162 type* raw_unchecked_##name() { \
1163 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
1166 #undef ROOT_ACCESSOR
1169 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
1170 Map* name##_map() { \
1171 return Map::cast(roots_[k##Name##MapRootIndex]); \
1174 #undef STRUCT_MAP_ACCESSOR
1176 #define SYMBOL_ACCESSOR(name, str) String* name() { \
1177 return String::cast(roots_[k##name##RootIndex]); \
1180 #undef SYMBOL_ACCESSOR
1187 native_contexts_list_ = object;
1230 roots_[kCodeStubsRootIndex] = value;
1237 return gc_safe_size_of_old_object_;
1242 roots_[kNonMonomorphicCacheRootIndex] = value;
1246 roots_[kEmptyScriptRootIndex] = script;
1250 roots_[kStoreBufferTopRootIndex] =
reinterpret_cast<Smi*
>(top);
1260 return reinterpret_cast<Address*
>(&roots_[kStoreBufferTopRootIndex]);
1265 return &native_contexts_list_;
1275 void PrintHandles();
1277 void OldPointerSpaceCheckStoreBuffer();
1278 void MapSpaceCheckStoreBuffer();
1279 void LargeObjectSpaceCheckStoreBuffer();
1282 void ReportHeapStatistics(
const char* title);
1283 void ReportCodeStatistics(
const char* title);
1292 return FLAG_verify_heap;
1310 const char* str,
int length,
int hash);
1331 bool IsAllocationAllowed() {
return allocation_allowed_; }
1332 inline bool allow_allocation(
bool enable);
1334 bool disallow_allocation_failure() {
1335 return disallow_allocation_failure_;
1338 void TracePathToObjectFrom(
Object* target,
Object* root);
1339 void TracePathToObject(
Object* target);
1340 void TracePathToGlobal();
1348 static inline void ScavengeObject(HeapObject** p, HeapObject*
object);
1373 intptr_t change_in_bytes);
1403 const int divisor = FLAG_stress_compaction ? 10 : 3;
1407 limit *= old_gen_limit_factor_;
1408 intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2;
1409 return Min(limit, halfway_to_the_max);
1413 const int divisor = FLAG_stress_compaction ? 8 : 2;
1417 limit *= old_gen_limit_factor_;
1418 intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2;
1419 return Min(limit, halfway_to_the_max);
1427 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1429 #undef ROOT_INDEX_DECLARATION
1431 #define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
1433 #undef SYMBOL_DECLARATION
1436 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
1438 #undef DECLARE_STRUCT_MAP
1452 Object* number,
bool check_number_string_cache =
true);
1454 uint32_t value,
bool check_number_string_cache =
true);
1475 young_survivors_after_last_gc_ = survived;
1476 survived_since_last_expansion_ += survived;
1480 if (FLAG_gc_global)
return true;
1482 if (FLAG_stress_compaction && (gc_count_ & 1) != 0)
return true;
1486 intptr_t adjusted_promotion_limit =
1487 old_gen_promotion_limit_ - new_space_.
Capacity();
1489 if (total_promoted >= adjusted_promotion_limit)
return true;
1491 intptr_t adjusted_allocation_limit =
1492 old_gen_allocation_limit_ - new_space_.
Capacity() / 5;
1529 total_regexp_code_generated_ += size;
1542 return &mark_compact_collector_;
1546 return &store_buffer_;
1554 return &incremental_marking_;
1565 return sweeping_complete;
1569 return &external_string_table_;
1574 return sweep_generation_;
1580 if (global_gc_prologue_callback_ !=
NULL) global_gc_prologue_callback_();
1584 if (global_gc_epilogue_callback_ !=
NULL) global_gc_epilogue_callback_();
1590 scavenging_visitors_table_.
GetVisitor(map)(map, slot, obj);
1604 uint32_t
seed =
static_cast<uint32_t
>(hash_seed()->value());
1605 ASSERT(FLAG_randomize_hashes || seed == 0);
1611 set_arguments_adaptor_deopt_pc_offset(
Smi::FromInt(pc_offset));
1616 set_construct_stub_deopt_pc_offset(
Smi::FromInt(pc_offset));
1621 set_getter_stub_deopt_pc_offset(
Smi::FromInt(pc_offset));
1626 set_setter_stub_deopt_pc_offset(
Smi::FromInt(pc_offset));
1635 return global_ic_age_;
1645 return amount_of_external_allocated_memory_;
1662 object_counts_[type]++;
1663 object_sizes_[type] += size;
1684 if (FLAG_parallel_recompilation) {
1685 heap_->relocation_mutex_->
Lock();
1689 if (FLAG_parallel_recompilation) {
1690 heap_->relocation_mutex_->
Unlock();
1707 intptr_t code_range_size_;
1708 int reserved_semispace_size_;
1709 int max_semispace_size_;
1710 int initial_semispace_size_;
1711 intptr_t max_old_generation_size_;
1712 intptr_t max_executable_size_;
1716 int survived_since_last_expansion_;
1719 int sweep_generation_;
1721 int always_allocate_scope_depth_;
1722 int linear_allocation_scope_depth_;
1725 int contexts_disposed_;
1729 bool flush_monomorphic_ics_;
1731 int scan_on_scavenge_pages_;
1733 #if defined(V8_TARGET_ARCH_X64)
1734 static const int kMaxObjectSizeInNewSpace = 1024*
KB;
1736 static const int kMaxObjectSizeInNewSpace = 512*
KB;
1747 int gc_post_processing_depth_;
1750 intptr_t PromotedExternalMemorySize();
1752 unsigned int ms_count_;
1753 unsigned int gc_count_;
1756 static const int kRememberedUnmappedPages = 128;
1757 int remembered_unmapped_pages_index_;
1758 Address remembered_unmapped_pages_[kRememberedUnmappedPages];
1761 int unflattened_strings_length_;
1763 #define ROOT_ACCESSOR(type, name, camel_name) \
1764 inline void set_##name(type* value) { \
1767 ASSERT(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \
1768 roots_[k##camel_name##RootIndex] = value; \
1771 #undef ROOT_ACCESSOR
1774 bool allocation_allowed_;
1779 int allocation_timeout_;
1783 bool disallow_allocation_failure_;
1785 HeapDebugUtils* debug_utils_;
1790 bool new_space_high_promotion_mode_active_;
1795 intptr_t old_gen_promotion_limit_;
1800 intptr_t old_gen_allocation_limit_;
1804 int old_gen_limit_factor_;
1807 intptr_t size_of_old_gen_at_last_old_space_gc_;
1811 intptr_t external_allocation_limit_;
1815 intptr_t amount_of_external_allocated_memory_;
1818 intptr_t amount_of_external_allocated_memory_at_last_global_gc_;
1822 int old_gen_exhausted_;
1824 Object* native_contexts_list_;
1826 StoreBufferRebuilder store_buffer_rebuilder_;
1828 struct StringTypeTable {
1834 struct ConstantSymbolTable {
1835 const char* contents;
1839 struct StructTable {
1845 static const StringTypeTable string_type_table[];
1846 static const ConstantSymbolTable constant_symbol_table[];
1847 static const StructTable struct_table[];
1851 String* hidden_symbol_;
1855 struct GCPrologueCallbackPair {
1857 : callback(callback), gc_type(gc_type) {
1859 bool operator==(
const GCPrologueCallbackPair& pair)
const {
1860 return pair.callback == callback;
1865 List<GCPrologueCallbackPair> gc_prologue_callbacks_;
1867 struct GCEpilogueCallbackPair {
1869 : callback(callback), gc_type(gc_type) {
1871 bool operator==(
const GCEpilogueCallbackPair& pair)
const {
1872 return pair.callback == callback;
1877 List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
1884 static int GcSafeSizeOfOldObject(HeapObject*
object);
1887 void MarkMapPointersAsEncoded(
bool encoded) {
1889 gc_safe_size_of_old_object_ = &GcSafeSizeOfOldObject;
1894 const char** reason);
1903 inline void UpdateOldSpaceLimits();
1915 void InitializeJSObjectFromMap(JSObject* obj,
1916 FixedArray* properties,
1919 bool CreateInitialMaps();
1920 bool CreateInitialObjects();
1924 NO_INLINE(
void CreateJSEntryStub());
1925 NO_INLINE(
void CreateJSConstructEntryStub());
1927 void CreateFixedStubs();
1929 MaybeObject* CreateOddball(
const char* to_string,
1947 static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
1951 Address DoScavenge(ObjectVisitor* scavenge_visitor,
Address new_space_front);
1952 static void ScavengeStoreBufferCallback(Heap* heap,
1960 void MarkCompactPrologue();
1963 void ReportStatisticsBeforeGC();
1964 void ReportStatisticsAfterGC();
1967 static void ScavengeObjectSlow(HeapObject** p, HeapObject*
object);
1974 inline void InitializeFunction(
1975 JSFunction*
function,
1976 SharedFunctionInfo* shared,
1980 double total_regexp_code_generated_;
1988 void AllocateFullSizeNumberStringCache();
1991 int FullSizeNumberStringCacheLength();
1993 void FlushNumberStringCache();
1995 void UpdateSurvivalRateTrend(
int start_new_space_size);
1997 enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING };
1999 static const int kYoungSurvivalRateHighThreshold = 90;
2000 static const int kYoungSurvivalRateLowThreshold = 10;
2001 static const int kYoungSurvivalRateAllowedDeviation = 15;
2003 int young_survivors_after_last_gc_;
2004 int high_survival_rate_period_length_;
2005 int low_survival_rate_period_length_;
2006 double survival_rate_;
2007 SurvivalRateTrend previous_survival_rate_trend_;
2008 SurvivalRateTrend survival_rate_trend_;
2010 void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) {
2011 ASSERT(survival_rate_trend != FLUCTUATING);
2012 previous_survival_rate_trend_ = survival_rate_trend_;
2013 survival_rate_trend_ = survival_rate_trend;
2016 SurvivalRateTrend survival_rate_trend() {
2017 if (survival_rate_trend_ == STABLE) {
2019 }
else if (previous_survival_rate_trend_ == STABLE) {
2020 return survival_rate_trend_;
2021 }
else if (survival_rate_trend_ != previous_survival_rate_trend_) {
2024 return survival_rate_trend_;
2028 bool IsStableOrIncreasingSurvivalTrend() {
2029 switch (survival_rate_trend()) {
2038 bool IsStableOrDecreasingSurvivalTrend() {
2039 switch (survival_rate_trend()) {
2048 bool IsIncreasingSurvivalTrend() {
2049 return survival_rate_trend() == INCREASING;
2052 bool IsHighSurvivalRate() {
2053 return high_survival_rate_period_length_ > 0;
2056 bool IsLowSurvivalRate() {
2057 return low_survival_rate_period_length_ > 0;
2060 void SelectScavengingVisitorsTable();
2062 void StartIdleRound() {
2063 mark_sweeps_since_idle_round_started_ = 0;
2064 ms_count_at_last_idle_notification_ = ms_count_;
2067 void FinishIdleRound() {
2068 mark_sweeps_since_idle_round_started_ = kMaxMarkSweepsInIdleRound;
2069 scavenges_since_last_idle_round_ = 0;
2072 bool EnoughGarbageSinceLastIdleRound() {
2073 return (scavenges_since_last_idle_round_ >= kIdleScavengeThreshold);
2080 int TimeMarkSweepWouldTakeInMs() {
2082 static const int kMbPerMs = 2;
2085 return heap_size_mb / kMbPerMs;
2089 bool IdleGlobalGC();
2091 void AdvanceIdleIncrementalMarking(intptr_t step_size);
2093 void ClearObjectStats(
bool clear_last_time_stats =
false);
2095 static const int kInitialSymbolTableSize = 2048;
2096 static const int kInitialEvalCacheSize = 64;
2097 static const int kInitialNumberStringCacheSize = 256;
2109 int total_gc_time_ms_;
2112 intptr_t max_alive_after_gc_;
2115 int min_in_mutator_;
2118 intptr_t alive_after_last_gc_;
2120 double last_gc_end_timestamp_;
2124 StoreBuffer store_buffer_;
2128 IncrementalMarking incremental_marking_;
2130 int number_idle_notifications_;
2131 unsigned int last_idle_notification_gc_count_;
2132 bool last_idle_notification_gc_count_init_;
2134 int mark_sweeps_since_idle_round_started_;
2135 int ms_count_at_last_idle_notification_;
2136 unsigned int gc_count_at_last_idle_gc_;
2137 int scavenges_since_last_idle_round_;
2139 static const int kMaxMarkSweepsInIdleRound = 7;
2140 static const int kIdleScavengeThreshold = 5;
2143 PromotionQueue promotion_queue_;
2149 ExternalStringTable external_string_table_;
2151 VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
2153 MemoryChunk* chunks_queued_for_free_;
2155 Mutex* relocation_mutex_;
2313 explicit HeapIterator(HeapObjectsFiltering filtering);
2326 HeapObjectsFiltering filtering_;
2361 for (
int i = 0; i <
kLength; ++i) {
2362 keys_[i].map =
NULL;
2363 keys_[i].name =
NULL;
2368 static inline int Hash(Map* map,
String* name);
2373 return reinterpret_cast<Address>(&keys_);
2376 Address field_offsets_address() {
2377 return reinterpret_cast<Address>(&field_offsets_);
2403 if (!StringShape(name).IsSymbol())
return kAbsent;
2404 int index = Hash(source, name);
2405 Key& key = keys_[index];
2406 if ((key.source == source) && (key.name == name))
return results_[index];
2413 if (StringShape(name).IsSymbol()) {
2414 int index = Hash(source, name);
2415 Key& key = keys_[index];
2416 key.source = source;
2418 results_[index] = result;
2429 for (
int i = 0; i < kLength; ++i) {
2430 keys_[i].source =
NULL;
2431 keys_[i].name =
NULL;
2438 uint32_t source_hash =
2439 static_cast<uint32_t
>(
reinterpret_cast<uintptr_t
>(source))
2441 uint32_t name_hash =
2442 static_cast<uint32_t
>(
reinterpret_cast<uintptr_t
>(name))
2444 return (source_hash ^ name_hash) % kLength;
2447 static const int kLength = 64;
2454 int results_[kLength];
2506 MC_UPDATE_NEW_TO_NEW_POINTERS,
2507 MC_UPDATE_ROOT_TO_NEW_POINTERS,
2508 MC_UPDATE_OLD_TO_NEW_POINTERS,
2509 MC_UPDATE_POINTERS_TO_EVACUATED,
2510 MC_UPDATE_POINTERS_BETWEEN_EVACUATED,
2511 MC_UPDATE_MISC_POINTERS,
2516 Scope(GCTracer* tracer, ScopeId scope)
2523 ASSERT(scope_ < kNumberOfScopes);
2533 explicit GCTracer(Heap* heap,
2534 const char* gc_reason,
2535 const char* collector_reason);
2548 promoted_objects_size_ += object_size;
2553 const char* CollectorString();
2556 inline double SizeOfHeapObjects();
2562 intptr_t start_object_size_;
2565 intptr_t start_memory_size_;
2572 unsigned int gc_count_;
2578 double scopes_[Scope::kNumberOfScopes];
2582 intptr_t in_free_list_or_wasted_before_gc_;
2586 intptr_t allocated_since_last_gc_;
2590 double spent_in_mutator_;
2593 intptr_t promoted_objects_size_;
2598 double longest_step_;
2599 int steps_count_since_last_gc_;
2600 double steps_took_since_last_gc_;
2604 const char* gc_reason_;
2605 const char* collector_reason_;
2630 static const int kArrayEntriesPerCacheEntry = 4;
2631 static const int kStringOffset = 0;
2632 static const int kPatternOffset = 1;
2633 static const int kArrayOffset = 2;
2653 static const int kCacheSize = 512;
2655 explicit SubCache(
Type t);
2659 inline double Calculate(
double input);
2668 uint32_t integers[2];
2671 inline static int Hash(
const Converter& c) {
2672 uint32_t hash = (c.integers[0] ^ c.integers[1]);
2673 hash ^=
static_cast<int32_t>(hash) >> 16;
2674 hash ^=
static_cast<int32_t>(hash) >> 8;
2675 return (hash & (kCacheSize - 1));
2678 Element elements_[kCacheSize];
2687 friend class TranscendentalCache;
2692 TranscendentalCache() {
2697 inline Address cache_array_address();
2733 return (object->
map_word().ToRawValue() & kNotMarkedBit) == 0;
2737 uintptr_t map_word =
object->map_word().ToRawValue();
2738 object->set_map_word(MapWord::FromRawValue(map_word | kNotMarkedBit));
2743 uintptr_t map_word =
object->map_word().ToRawValue();
2744 object->set_map_word(MapWord::FromRawValue(map_word & ~kNotMarkedBit));
2749 uintptr_t map_word =
object->
map_word().ToRawValue();
2750 return MapWord::FromRawValue(map_word | kNotMarkedBit).ToMap();
2758 static const uintptr_t kNotMarkedBit = 0x1;
2763 #if defined(DEBUG) || defined(LIVE_OBJECT_LIST)
2767 class PathTracer :
public ObjectVisitor {
2777 PathTracer(
Object* search_target,
2778 WhatToFind what_to_find,
2780 : search_target_(search_target),
2781 found_target_(
false),
2782 found_target_in_trace_(
false),
2783 what_to_find_(what_to_find),
2784 visit_mode_(visit_mode),
2788 virtual void VisitPointers(
Object** start,
Object** end);
2791 void TracePathFrom(
Object** root);
2793 bool found()
const {
return found_target_; }
2795 static Object*
const kAnyGlobalObject;
2799 class UnmarkVisitor;
2801 void MarkRecursively(
Object** p, MarkVisitor* mark_visitor);
2802 void UnmarkRecursively(
Object** p, UnmarkVisitor* unmark_visitor);
2803 virtual void ProcessResults();
2806 static const int kMarkTag = 2;
2810 bool found_target_in_trace_;
2811 WhatToFind what_to_find_;
2813 List<Object*> object_stack_;
2815 AssertNoAllocation no_alloc;
2820 #endif // DEBUG || LIVE_OBJECT_LIST
2824 #endif // V8_HEAP_H_
static int SizeOfMarkedObject(HeapObject *object)
Object ** roots_array_start()
MUST_USE_RESULT MaybeObject * AllocateJSModule(Context *context, ScopeInfo *scope_info)
intptr_t OldGenPromotionLimit(intptr_t old_gen_size)
void GarbageCollectionEpilogue()
MUST_USE_RESULT MaybeObject * CopyCode(Code *code)
static void Clear(FixedArray *cache)
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8Slow(Vector< const char > str, int non_ascii_start, PretenureFlag pretenure=NOT_TENURED)
double total_regexp_code_generated()
MUST_USE_RESULT MaybeObject * AllocateFixedArray(int length, PretenureFlag pretenure)
void CallGlobalGCEpilogueCallback()
bool NextGCIsLikelyToBeFull()
MUST_USE_RESULT MaybeObject * AllocateExternalStringFromAscii(const ExternalAsciiString::Resource *resource)
MUST_USE_RESULT MaybeObject * AllocateSymbol(Vector< const char > str, int chars, uint32_t hash_field)
void set_full_gc_count(int count)
intptr_t OldGenerationCapacityAvailable()
void Callback(MemoryChunk *page, StoreBufferEvent event)
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
intptr_t * old_pointer_space_size
MUST_USE_RESULT MaybeObject * AllocateFunctionPrototype(JSFunction *function)
Object ** native_contexts_list_address()
void RecordObjectStats(InstanceType type, int sub_type, size_t size)
intptr_t * cell_space_size
static const int kMapHashShift
int ReservedSemiSpaceSize()
void RecordWrite(Address address, int offset)
void PrintF(const char *format,...)
bool OldGenerationPromotionLimitReached()
bool InNewSpace(Object *object)
static const int kArgumentsObjectSize
MUST_USE_RESULT MaybeObject * AllocateFunctionContext(int length, JSFunction *function)
MUST_USE_RESULT MaybeObject * Allocate(Map *map, AllocationSpace space)
MUST_USE_RESULT MaybeObject * AllocateSubString(String *buffer, int start, int end, PretenureFlag pretenure=NOT_TENURED)
void(* ObjectSlotCallback)(HeapObject **from, HeapObject *to)
void SetConstructStubDeoptPCOffset(int pc_offset)
void IterateWeakRoots(ObjectVisitor *v, VisitMode mode)
Object * ToBoolean(bool condition)
MUST_USE_RESULT MaybeObject * ReinitializeJSGlobalProxy(JSFunction *constructor, JSGlobalProxy *global)
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArrayWithMap(FixedDoubleArray *src, Map *map)
MUST_USE_RESULT MaybeObject * AllocateGlobalObject(JSFunction *constructor)
static Smi * FromInt(int value)
MUST_USE_RESULT MaybeObject * AllocateJSFunctionProxy(Object *handler, Object *call_trap, Object *construct_trap, Object *prototype)
bool flush_monomorphic_ics()
void FinalizeExternalString(String *string)
MUST_USE_RESULT MaybeObject * AllocateJSObject(JSFunction *constructor, PretenureFlag pretenure=NOT_TENURED)
void CompletelyClearInstanceofCache()
void CollectAllGarbage(int flags, const char *gc_reason=NULL)
DisallowAllocationFailure()
Map * MapForExternalArrayType(ExternalArrayType array_type)
void SetNumberStringCache(Object *number, String *str)
static const int kNullValueRootIndex
MUST_USE_RESULT MaybeObject * AllocateModuleContext(ScopeInfo *scope_info)
MUST_USE_RESULT MaybeObject * AllocateCodeCache()
MUST_USE_RESULT MaybeObject * AllocateTwoByteSymbol(Vector< const uc16 > str, uint32_t hash_field)
void AddString(String *string)
MUST_USE_RESULT MaybeObject * AllocateNativeContext()
void AddGCPrologueCallback(GCPrologueCallback callback, GCType gc_type_filter)
RootListIndex RootIndexForExternalArrayType(ExternalArrayType array_type)
MUST_USE_RESULT MaybeObject * LookupAsciiSymbol(Vector< const char > str)
static const int kOldSpaceRoots
void VisitExternalResources(v8::ExternalResourceVisitor *visitor)
intptr_t OldGenAllocationLimit(intptr_t old_gen_size)
MUST_USE_RESULT MaybeObject * AllocateByteArray(int length, PretenureFlag pretenure)
Address * allocation_top_address()
void DoScavengeObject(Map *map, HeapObject **slot, HeapObject *obj)
void SetGlobalGCPrologueCallback(GCCallback callback)
DisableAssertNoAllocation()
void ClearInstanceofCache()
HeapObjectCallback GcSafeSizeOfOldObjectFunction()
bool InFromSpace(Object *object)
MUST_USE_RESULT MaybeObject * Uint32ToString(uint32_t value, bool check_number_string_cache=true)
PromotionQueue * promotion_queue()
void SetGetterStubDeoptPCOffset(int pc_offset)
Map * SymbolMapForString(String *str)
intptr_t * code_space_size
void AddGCEpilogueCallback(GCEpilogueCallback callback, GCType gc_type_filter)
MUST_USE_RESULT MaybeObject * AllocateRawAsciiString(int length, PretenureFlag pretenure=NOT_TENURED)
static void MoveBlock(Address dst, Address src, int byte_size)
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
#define ASSERT(condition)
bool InSpace(Address addr, AllocationSpace space)
void(* GCPrologueCallback)(GCType type, GCCallbackFlags flags)
v8::Handle< v8::Value > Print(const v8::Arguments &args)
MUST_USE_RESULT MaybeObject * AllocateGlobalContext(JSFunction *function, ScopeInfo *scope_info)
void public_set_code_stubs(UnseededNumberDictionary *value)
static const int kReduceMemoryFootprintMask
OldSpace * TargetSpace(HeapObject *object)
void set_collector(GarbageCollector collector)
const int kPointerSizeLog2
MUST_USE_RESULT MaybeObject * LookupTwoByteSymbol(Vector< const uc16 > str)
Address * NewSpaceAllocationLimitAddress()
#define STRONG_ROOT_LIST(V)
MUST_USE_RESULT MaybeObject * LookupSymbol(Vector< const char > str)
bool SetUp(bool create_heap_objects)
MUST_USE_RESULT MaybeObject * AllocateBlockContext(JSFunction *function, Context *previous, ScopeInfo *info)
void(* ScavengingCallback)(Map *map, HeapObject **slot, HeapObject *object)
intptr_t CommittedMemoryExecutable()
friend class ExternalReference
#define SYMBOL_ACCESSOR(name, str)
RelocationLock(Heap *heap)
MUST_USE_RESULT MaybeObject * CopyJSObject(JSObject *source)
static const int kPageSize
int * pending_global_handle_count
Address * store_buffer_top_address()
void CallGlobalGCPrologueCallback()
bool IsSweepingComplete()
~DisallowAllocationFailure()
MUST_USE_RESULT MaybeObject * AllocateInternalSymbol(unibrow::CharacterStream *buffer, int chars, uint32_t hash_field)
Address always_allocate_scope_depth_address()
void IterateAndMarkPointersToFromSpace(Address start, Address end, ObjectSlotCallback callback)
MUST_USE_RESULT MaybeObject * AllocateJSArrayWithElements(FixedArrayBase *array_base, ElementsKind elements_kind, PretenureFlag pretenure=NOT_TENURED)
ArrayStorageAllocationMode
STATIC_CHECK(kUndefinedValueRootIndex==Internals::kUndefinedValueRootIndex)
virtual Object * RetainAs(Object *object)=0
StoreBuffer * store_buffer()
MUST_USE_RESULT MaybeObject * AllocateHeapNumber(double value, PretenureFlag pretenure)
bool CollectGarbage(AllocationSpace space, GarbageCollector collector, const char *gc_reason, const char *collector_reason)
MUST_USE_RESULT MaybeObject * AllocateWithContext(JSFunction *function, Context *previous, JSObject *extension)
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
int NotifyContextDisposed()
MUST_USE_RESULT MaybeObject * AllocateConsString(String *first, String *second)
#define STRUCT_MAP_ACCESSOR(NAME, Name, name)
void RepairFreeListsAfterBoot()
MUST_USE_RESULT MaybeObject * NumberToString(Object *number, bool check_number_string_cache=true)
void public_set_empty_script(Script *script)
int * near_death_global_handle_count
STATIC_ASSERT((kEntriesPerBucket &(kEntriesPerBucket-1))==0)
String *(* ExternalStringTableUpdaterCallback)(Heap *heap, Object **pointer)
static const intptr_t kMinimumPromotionLimit
void SetArgumentsAdaptorDeoptPCOffset(int pc_offset)
static void ClearMark(HeapObject *object)
static const int kEndMarker
bool IdleNotification(int hint)
MUST_USE_RESULT MaybeObject * AllocateStringFromAscii(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
intptr_t MaxOldGenerationSize()
void IncreaseTotalRegexpCodeGenerated(int size)
friend class MarkCompactCollector
void EnsureHeapIsIterable()
static const int kArgumentsObjectSizeStrict
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedDoubleArray(int length, PretenureFlag pretenure=NOT_TENURED)
int(* HeapObjectCallback)(HeapObject *obj)
void SetLastScriptId(Object *last_script_id)
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
bool AdvanceSweeper(intptr_t bytes_to_sweep)
static bool IsMarked(HeapObject *object)
void RemoveGCEpilogueCallback(GCEpilogueCallback callback)
int MaxObjectSizeInNewSpace()
static NewSpacePage * FromAddress(Address address_in_page)
void CollectAllAvailableGarbage(const char *gc_reason=NULL)
bool ConfigureHeapDefault()
PagedSpace * paged_space(int idx)
static const int kNoGCFlags
MUST_USE_RESULT MaybeObject * AllocateFixedArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
bool LookupTwoCharsSymbolIfExists(String *str, String **symbol)
MUST_USE_RESULT MaybeObject * AllocateInitialMap(JSFunction *fun)
friend class ExternalReference
int * global_handle_count
MUST_USE_RESULT MaybeObject * AllocateForeign(Address address, PretenureFlag pretenure=NOT_TENURED)
void QueueMemoryChunkForFree(MemoryChunk *chunk)
void CheckpointObjectStats()
MUST_USE_RESULT MaybeObject * AllocateExternalArray(int length, ExternalArrayType array_type, void *external_pointer, PretenureFlag pretenure)
static void ScavengePointer(HeapObject **p)
intptr_t * cell_space_capacity
intptr_t * memory_allocator_size
#define DISALLOW_IMPLICIT_CONSTRUCTORS(TypeName)
void VisitPointers(Object **start, Object **end)
void decrement_scan_on_scavenge_pages()
void IncrementYoungSurvivorsCounter(int survived)
MUST_USE_RESULT MaybeObject * AllocatePolymorphicCodeCache()
intptr_t * code_space_capacity
static void Enter(Heap *heap, String *key_string, Object *key_pattern, FixedArray *value_array, ResultsCacheType type)
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
void Update(Map *map, String *name, int field_offset)
void ReserveSpace(int *sizes, Address *addresses)
static Map * MapOfMarkedObject(HeapObject *object)
OldSpace * old_pointer_space()
bool ConfigureHeap(int max_semispace_size, intptr_t max_old_gen_size, intptr_t max_executable_size)
intptr_t * map_space_size
static double TimeCurrentMillis()
MUST_USE_RESULT MaybeObject * AllocateTypeFeedbackInfo()
static const int kMakeHeapIterableMask
void public_set_store_buffer_top(Address *top)
intptr_t AdjustAmountOfExternalAllocatedMemory(intptr_t change_in_bytes)
MUST_USE_RESULT MaybeObject * AllocateJSArrayAndStorage(ElementsKind elements_kind, int length, int capacity, ArrayStorageAllocationMode mode=DONT_INITIALIZE_ARRAY_ELEMENTS, PretenureFlag pretenure=NOT_TENURED)
static const int kUndefinedValueRootIndex
void Iterate(ObjectVisitor *v)
bool InToSpace(Object *object)
void GarbageCollectionPrologue()
#define ROOT_ACCESSOR(type, name, camel_name)
LargeObjectSpace * lo_space()
MUST_USE_RESULT MaybeObject * AllocateJSMessageObject(String *type, JSArray *arguments, int start_position, int end_position, Object *script, Object *stack_trace, Object *stack_frames)
static const int kFalseValueRootIndex
MUST_USE_RESULT MaybeObject * AllocateExternalStringFromTwoByte(const ExternalTwoByteString::Resource *resource)
MUST_USE_RESULT MaybeObject * AllocatePartialMap(InstanceType instance_type, int instance_size)
int * free_global_handle_count
MUST_USE_RESULT MaybeObject * CreateCode(const CodeDesc &desc, Code::Flags flags, Handle< Object > self_reference, bool immovable=false)
static Object * Lookup(Heap *heap, String *key_string, Object *key_pattern, ResultsCacheType type)
STATIC_ASSERT((1<< kTranscendentalTypeBits) >=kNumberOfCaches)
MUST_USE_RESULT MaybeObject * AllocateAsciiSymbol(Vector< const char > str, uint32_t hash_field)
activate correct semantics for inheriting readonliness false
void Update(Map *source, String *name, int result)
void set_gc_count(unsigned int count)
static const int kAbortIncrementalMarkingMask
void RemoveGCPrologueCallback(GCPrologueCallback callback)
Vector< const char > CStrVector(const char *data)
intptr_t CommittedMemory()
void increment_promoted_objects_size(int object_size)
Object * GetNumberStringCache(Object *number)
MUST_USE_RESULT MaybeObject * AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure)
MUST_USE_RESULT MaybeObject * AllocateEmptyJSArray(ElementsKind elements_kind, PretenureFlag pretenure=NOT_TENURED)
virtual ~WeakObjectRetainer()
Address * NewSpaceAllocationTopAddress()
void RecordWrites(Address address, int start, int len)
void(* GCEpilogueCallback)(GCType type, GCCallbackFlags flags)
intptr_t get_max_alive_after_gc()
void UpdateReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
MUST_USE_RESULT MaybeObject * AllocateJSProxy(Object *handler, Object *prototype)
void ProcessWeakReferences(WeakObjectRetainer *retainer)
void ClearNormalizedMapCaches()
~DisableAssertNoAllocation()
MUST_USE_RESULT MaybeObject * NumberFromDouble(double value, PretenureFlag pretenure=NOT_TENURED)
MUST_USE_RESULT MaybeObject * AllocateExternalSymbol(Vector< const char > str, int chars)
intptr_t * old_data_space_capacity
MUST_USE_RESULT MaybeObject * AllocateArgumentsObject(Object *callee, int length)
int Lookup(Map *map, String *name)
int InitialSemiSpaceSize()
void SetSetterStubDeoptPCOffset(int pc_offset)
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
void IterateRoots(ObjectVisitor *v, VisitMode mode)
MUST_USE_RESULT MaybeObject * ReinitializeJSReceiver(JSReceiver *object, InstanceType type, int size)
MUST_USE_RESULT MaybeObject * AllocateAccessorPair()
int * weak_global_handle_count
MUST_USE_RESULT MaybeObject * AllocateCatchContext(JSFunction *function, Context *previous, String *name, Object *thrown_object)
MUST_USE_RESULT MaybeObject * Get(Type type, double input)
void SetGlobalGCEpilogueCallback(GCCallback callback)
static const int kEmptySymbolRootIndex
static const int kArgumentsLengthIndex
MUST_USE_RESULT MaybeObject * LookupAsciiSymbol(const char *str)
void CheckNewSpaceExpansionCriteria()
MUST_USE_RESULT MaybeObject * AllocateScopeInfo(int length)
Object * native_contexts_list()
bool LookupSymbolIfExists(String *str, String **symbol)
ExternalStringTable * external_string_table()
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
IncrementalMarking * incremental_marking()
bool Contains(Address addr)
bool InNewSpacePage(Address addr)
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedArray(int length)
static const int kStartMarker
bool ShouldBePromoted(Address old_address, int object_size)
static const intptr_t kMinimumAllocationLimit
MUST_USE_RESULT MaybeObject * AllocateJSObjectFromMap(Map *map, PretenureFlag pretenure=NOT_TENURED)
void RememberUnmappedPage(Address page, bool compacted)
static const int kNotFound
static const int kRegExpResultsCacheSize
friend class TranscendentalCacheStub
MUST_USE_RESULT MaybeObject * LookupSingleCharacterStringFromCode(uint16_t code)
intptr_t PromotedTotalSize()
static void CopyBlock(Address dst, Address src, int byte_size)
MUST_USE_RESULT MaybeObject * AllocateJSGlobalPropertyCell(Object *value)
static bool ShouldZapGarbage()
MUST_USE_RESULT MaybeObject * CreateSymbol(const char *str, int length, int hash)
MUST_USE_RESULT MaybeObject * AllocateFixedDoubleArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
MUST_USE_RESULT MaybeObject * AllocateRawFixedArray(int length)
static const int kArgumentsCalleeIndex
MUST_USE_RESULT MaybeObject * AllocateHashTable(int length, PretenureFlag pretenure=NOT_TENURED)
void public_set_non_monomorphic_cache(UnseededNumberDictionary *value)
static const int kHeaderSize
Object * FindCodeObject(Address a)
#define DECLARE_STRUCT_MAP(NAME, Name, name)
intptr_t PromotedSpaceSizeOfObjects()
intptr_t * old_pointer_space_capacity
bool OldGenerationAllocationLimitReached()
#define ROOT_INDEX_DECLARATION(type, name, camel_name)
MUST_USE_RESULT MaybeObject * NumberFromInt32(int32_t value, PretenureFlag pretenure=NOT_TENURED)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
void ClearJSFunctionResultCaches()
#define SYMBOL_INDEX_DECLARATION(name, str)
void RecordStats(HeapStats *stats, bool take_snapshot=false)
NewSpacePage * prev_page() const
bool IsInGCPostProcessing()
void CreateFillerObjectAt(Address addr, int size)
MUST_USE_RESULT MaybeObject * AllocateSharedFunctionInfo(Object *name)
bool AdvanceSweepers(int step_size)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
void increment_scan_on_scavenge_pages()
Address * allocation_limit_address()
intptr_t * map_space_capacity
static const int kTrueValueRootIndex
static const int kCapacityMask
static void ScavengeObject(HeapObject **p, HeapObject *object)
bool IsSweepingComplete()
static const int kTranscendentalTypeBits
MUST_USE_RESULT MaybeObject * PrepareForCompare(String *str)
intptr_t amount_of_external_allocated_memory()
static bool IsAtStart(Address addr)
intptr_t * memory_allocator_capacity
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
static const int kSweepPreciselyMask
MUST_USE_RESULT MaybeObject * AllocateStruct(InstanceType type)
intptr_t * old_data_space_size
intptr_t OldGenerationSpaceAvailable()
MUST_USE_RESULT MaybeObject * AllocateMap(InstanceType instance_type, int instance_size, ElementsKind elements_kind=TERMINAL_FAST_ELEMENTS_KIND)
int Lookup(Map *source, String *name)
intptr_t MaxExecutableSize()
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
void PrintShortHeapStatistics()
static const int kHashMask
AllocationSpace TargetSpaceId(InstanceType type)
OldSpace * old_data_space()
MUST_USE_RESULT MaybeObject * AllocateRawTwoByteString(int length, PretenureFlag pretenure=NOT_TENURED)
static void AssertValidRange(Address from, Address to)
static void SetMark(HeapObject *object)
MarkCompactCollector * mark_compact_collector()
MUST_USE_RESULT MaybeObject * AllocateFunction(Map *function_map, SharedFunctionInfo *shared, Object *prototype, PretenureFlag pretenure=TENURED)
void UpdateNewSpaceReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
StoreBufferRebuilder(StoreBuffer *store_buffer)
MUST_USE_RESULT MaybeObject * AllocateStringFromTwoByte(Vector< const uc16 > str, PretenureFlag pretenure=NOT_TENURED)
static const int kEntriesPerBucket
void EnsureFromSpaceIsCommitted()
MUST_USE_RESULT MaybeObject * AllocateAliasedArgumentsEntry(int slot)
Callback GetVisitor(Map *map)
void set_native_contexts_list(Object *object)