50 #define STRONG_ROOT_LIST(V) \
51 V(Map, byte_array_map, ByteArrayMap) \
52 V(Map, free_space_map, FreeSpaceMap) \
53 V(Map, one_pointer_filler_map, OnePointerFillerMap) \
54 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
56 V(Smi, store_buffer_top, StoreBufferTop) \
57 V(Oddball, undefined_value, UndefinedValue) \
58 V(Oddball, the_hole_value, TheHoleValue) \
59 V(Oddball, null_value, NullValue) \
60 V(Oddball, true_value, TrueValue) \
61 V(Oddball, false_value, FalseValue) \
62 V(Oddball, uninitialized_value, UninitializedValue) \
63 V(Map, cell_map, CellMap) \
64 V(Map, global_property_cell_map, GlobalPropertyCellMap) \
65 V(Map, shared_function_info_map, SharedFunctionInfoMap) \
66 V(Map, meta_map, MetaMap) \
67 V(Map, heap_number_map, HeapNumberMap) \
68 V(Map, native_context_map, NativeContextMap) \
69 V(Map, fixed_array_map, FixedArrayMap) \
70 V(Map, code_map, CodeMap) \
71 V(Map, scope_info_map, ScopeInfoMap) \
72 V(Map, fixed_cow_array_map, FixedCOWArrayMap) \
73 V(Map, fixed_double_array_map, FixedDoubleArrayMap) \
74 V(Map, constant_pool_array_map, ConstantPoolArrayMap) \
75 V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
76 V(Map, hash_table_map, HashTableMap) \
77 V(FixedArray, empty_fixed_array, EmptyFixedArray) \
78 V(ByteArray, empty_byte_array, EmptyByteArray) \
79 V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
80 V(ConstantPoolArray, empty_constant_pool_array, EmptyConstantPoolArray) \
81 V(Oddball, arguments_marker, ArgumentsMarker) \
85 V(FixedArray, number_string_cache, NumberStringCache) \
86 V(Object, instanceof_cache_function, InstanceofCacheFunction) \
87 V(Object, instanceof_cache_map, InstanceofCacheMap) \
88 V(Object, instanceof_cache_answer, InstanceofCacheAnswer) \
89 V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
90 V(FixedArray, string_split_cache, StringSplitCache) \
91 V(FixedArray, regexp_multiple_cache, RegExpMultipleCache) \
92 V(Object, termination_exception, TerminationException) \
93 V(Smi, hash_seed, HashSeed) \
94 V(Map, symbol_map, SymbolMap) \
95 V(Map, string_map, StringMap) \
96 V(Map, ascii_string_map, AsciiStringMap) \
97 V(Map, cons_string_map, ConsStringMap) \
98 V(Map, cons_ascii_string_map, ConsAsciiStringMap) \
99 V(Map, sliced_string_map, SlicedStringMap) \
100 V(Map, sliced_ascii_string_map, SlicedAsciiStringMap) \
101 V(Map, external_string_map, ExternalStringMap) \
103 external_string_with_one_byte_data_map, \
104 ExternalStringWithOneByteDataMap) \
105 V(Map, external_ascii_string_map, ExternalAsciiStringMap) \
106 V(Map, short_external_string_map, ShortExternalStringMap) \
108 short_external_string_with_one_byte_data_map, \
109 ShortExternalStringWithOneByteDataMap) \
110 V(Map, internalized_string_map, InternalizedStringMap) \
111 V(Map, ascii_internalized_string_map, AsciiInternalizedStringMap) \
112 V(Map, cons_internalized_string_map, ConsInternalizedStringMap) \
113 V(Map, cons_ascii_internalized_string_map, ConsAsciiInternalizedStringMap) \
115 external_internalized_string_map, \
116 ExternalInternalizedStringMap) \
118 external_internalized_string_with_one_byte_data_map, \
119 ExternalInternalizedStringWithOneByteDataMap) \
121 external_ascii_internalized_string_map, \
122 ExternalAsciiInternalizedStringMap) \
124 short_external_internalized_string_map, \
125 ShortExternalInternalizedStringMap) \
127 short_external_internalized_string_with_one_byte_data_map, \
128 ShortExternalInternalizedStringWithOneByteDataMap) \
130 short_external_ascii_internalized_string_map, \
131 ShortExternalAsciiInternalizedStringMap) \
132 V(Map, short_external_ascii_string_map, ShortExternalAsciiStringMap) \
133 V(Map, undetectable_string_map, UndetectableStringMap) \
134 V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap) \
135 V(Map, external_int8_array_map, ExternalInt8ArrayMap) \
136 V(Map, external_uint8_array_map, ExternalUint8ArrayMap) \
137 V(Map, external_int16_array_map, ExternalInt16ArrayMap) \
138 V(Map, external_uint16_array_map, ExternalUint16ArrayMap) \
139 V(Map, external_int32_array_map, ExternalInt32ArrayMap) \
140 V(Map, external_uint32_array_map, ExternalUint32ArrayMap) \
141 V(Map, external_float32_array_map, ExternalFloat32ArrayMap) \
142 V(Map, external_float64_array_map, ExternalFloat64ArrayMap) \
143 V(Map, external_uint8_clamped_array_map, ExternalUint8ClampedArrayMap) \
144 V(ExternalArray, empty_external_int8_array, \
145 EmptyExternalInt8Array) \
146 V(ExternalArray, empty_external_uint8_array, \
147 EmptyExternalUint8Array) \
148 V(ExternalArray, empty_external_int16_array, EmptyExternalInt16Array) \
149 V(ExternalArray, empty_external_uint16_array, \
150 EmptyExternalUint16Array) \
151 V(ExternalArray, empty_external_int32_array, EmptyExternalInt32Array) \
152 V(ExternalArray, empty_external_uint32_array, \
153 EmptyExternalUint32Array) \
154 V(ExternalArray, empty_external_float32_array, EmptyExternalFloat32Array) \
155 V(ExternalArray, empty_external_float64_array, EmptyExternalFloat64Array) \
156 V(ExternalArray, empty_external_uint8_clamped_array, \
157 EmptyExternalUint8ClampedArray) \
158 V(Map, fixed_uint8_array_map, FixedUint8ArrayMap) \
159 V(Map, fixed_int8_array_map, FixedInt8ArrayMap) \
160 V(Map, fixed_uint16_array_map, FixedUint16ArrayMap) \
161 V(Map, fixed_int16_array_map, FixedInt16ArrayMap) \
162 V(Map, fixed_uint32_array_map, FixedUint32ArrayMap) \
163 V(Map, fixed_int32_array_map, FixedInt32ArrayMap) \
164 V(Map, fixed_float32_array_map, FixedFloat32ArrayMap) \
165 V(Map, fixed_float64_array_map, FixedFloat64ArrayMap) \
166 V(Map, fixed_uint8_clamped_array_map, FixedUint8ClampedArrayMap) \
167 V(FixedTypedArrayBase, empty_fixed_uint8_array, EmptyFixedUint8Array) \
168 V(FixedTypedArrayBase, empty_fixed_int8_array, EmptyFixedInt8Array) \
169 V(FixedTypedArrayBase, empty_fixed_uint16_array, EmptyFixedUint16Array) \
170 V(FixedTypedArrayBase, empty_fixed_int16_array, EmptyFixedInt16Array) \
171 V(FixedTypedArrayBase, empty_fixed_uint32_array, EmptyFixedUint32Array) \
172 V(FixedTypedArrayBase, empty_fixed_int32_array, EmptyFixedInt32Array) \
173 V(FixedTypedArrayBase, empty_fixed_float32_array, EmptyFixedFloat32Array) \
174 V(FixedTypedArrayBase, empty_fixed_float64_array, EmptyFixedFloat64Array) \
175 V(FixedTypedArrayBase, empty_fixed_uint8_clamped_array, \
176 EmptyFixedUint8ClampedArray) \
177 V(Map, sloppy_arguments_elements_map, SloppyArgumentsElementsMap) \
178 V(Map, function_context_map, FunctionContextMap) \
179 V(Map, catch_context_map, CatchContextMap) \
180 V(Map, with_context_map, WithContextMap) \
181 V(Map, block_context_map, BlockContextMap) \
182 V(Map, module_context_map, ModuleContextMap) \
183 V(Map, global_context_map, GlobalContextMap) \
184 V(Map, oddball_map, OddballMap) \
185 V(Map, message_object_map, JSMessageObjectMap) \
186 V(Map, foreign_map, ForeignMap) \
187 V(HeapNumber, nan_value, NanValue) \
188 V(HeapNumber, infinity_value, InfinityValue) \
189 V(HeapNumber, minus_zero_value, MinusZeroValue) \
190 V(Map, neander_map, NeanderMap) \
191 V(JSObject, message_listeners, MessageListeners) \
192 V(UnseededNumberDictionary, code_stubs, CodeStubs) \
193 V(UnseededNumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
194 V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache) \
195 V(Code, js_entry_code, JsEntryCode) \
196 V(Code, js_construct_entry_code, JsConstructEntryCode) \
197 V(FixedArray, natives_source_cache, NativesSourceCache) \
198 V(Script, empty_script, EmptyScript) \
199 V(NameDictionary, intrinsic_function_names, IntrinsicFunctionNames) \
200 V(Cell, undefined_cell, UndefineCell) \
201 V(JSObject, observation_state, ObservationState) \
202 V(Map, external_map, ExternalMap) \
203 V(Object, symbol_registry, SymbolRegistry) \
204 V(Symbol, frozen_symbol, FrozenSymbol) \
205 V(Symbol, nonexistent_symbol, NonExistentSymbol) \
206 V(Symbol, elements_transition_symbol, ElementsTransitionSymbol) \
207 V(SeededNumberDictionary, empty_slow_element_dictionary, \
208 EmptySlowElementDictionary) \
209 V(Symbol, observed_symbol, ObservedSymbol) \
210 V(Symbol, uninitialized_symbol, UninitializedSymbol) \
211 V(Symbol, megamorphic_symbol, MegamorphicSymbol) \
212 V(FixedArray, materialized_objects, MaterializedObjects) \
213 V(FixedArray, allocation_sites_scratchpad, AllocationSitesScratchpad) \
214 V(JSObject, microtask_state, MicrotaskState)
217 #define SMI_ROOT_LIST(V) \
218 V(Smi, stack_limit, StackLimit) \
219 V(Smi, real_stack_limit, RealStackLimit) \
220 V(Smi, last_script_id, LastScriptId) \
221 V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
222 V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset) \
223 V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset) \
224 V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)
226 #define ROOT_LIST(V) \
227 STRONG_ROOT_LIST(V) \
229 V(StringTable, string_table, StringTable)
233 #define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \
236 V(one_pointer_filler_map) \
237 V(two_pointer_filler_map) \
243 V(uninitialized_value) \
245 V(global_property_cell_map) \
246 V(shared_function_info_map) \
249 V(native_context_map) \
253 V(fixed_cow_array_map) \
254 V(fixed_double_array_map) \
255 V(constant_pool_array_map) \
256 V(no_interceptor_result_sentinel) \
258 V(empty_fixed_array) \
259 V(empty_byte_array) \
260 V(empty_descriptor_array) \
261 V(empty_constant_pool_array) \
262 V(arguments_marker) \
264 V(sloppy_arguments_elements_map) \
265 V(function_context_map) \
266 V(catch_context_map) \
267 V(with_context_map) \
268 V(block_context_map) \
269 V(module_context_map) \
270 V(global_context_map) \
272 V(message_object_map) \
276 #define INTERNALIZED_STRING_LIST(V) \
277 V(Array_string, "Array") \
278 V(Object_string, "Object") \
279 V(proto_string, "__proto__") \
280 V(arguments_string, "arguments") \
281 V(Arguments_string, "Arguments") \
282 V(call_string, "call") \
283 V(apply_string, "apply") \
284 V(caller_string, "caller") \
285 V(boolean_string, "boolean") \
286 V(Boolean_string, "Boolean") \
287 V(callee_string, "callee") \
288 V(constructor_string, "constructor") \
289 V(dot_result_string, ".result") \
290 V(dot_for_string, ".for.") \
291 V(dot_iterator_string, ".iterator") \
292 V(dot_generator_object_string, ".generator_object") \
293 V(eval_string, "eval") \
294 V(empty_string, "") \
295 V(function_string, "function") \
296 V(length_string, "length") \
297 V(module_string, "module") \
298 V(name_string, "name") \
299 V(native_string, "native") \
300 V(null_string, "null") \
301 V(number_string, "number") \
302 V(Number_string, "Number") \
303 V(nan_string, "NaN") \
304 V(RegExp_string, "RegExp") \
305 V(source_string, "source") \
306 V(global_string, "global") \
307 V(ignore_case_string, "ignoreCase") \
308 V(multiline_string, "multiline") \
309 V(input_string, "input") \
310 V(index_string, "index") \
311 V(last_index_string, "lastIndex") \
312 V(object_string, "object") \
313 V(literals_string, "literals") \
314 V(prototype_string, "prototype") \
315 V(string_string, "string") \
316 V(String_string, "String") \
317 V(symbol_string, "symbol") \
318 V(Symbol_string, "Symbol") \
319 V(for_string, "for") \
320 V(for_api_string, "for_api") \
321 V(for_intern_string, "for_intern") \
322 V(private_api_string, "private_api") \
323 V(private_intern_string, "private_intern") \
324 V(Date_string, "Date") \
325 V(this_string, "this") \
326 V(to_string_string, "toString") \
327 V(char_at_string, "CharAt") \
328 V(undefined_string, "undefined") \
329 V(value_of_string, "valueOf") \
330 V(stack_string, "stack") \
331 V(toJSON_string, "toJSON") \
332 V(InitializeVarGlobal_string, "InitializeVarGlobal") \
333 V(InitializeConstGlobal_string, "InitializeConstGlobal") \
334 V(KeyedLoadElementMonomorphic_string, \
335 "KeyedLoadElementMonomorphic") \
336 V(KeyedStoreElementMonomorphic_string, \
337 "KeyedStoreElementMonomorphic") \
338 V(stack_overflow_string, "kStackOverflowBoilerplate") \
339 V(illegal_access_string, "illegal access") \
340 V(illegal_execution_state_string, "illegal execution state") \
341 V(get_string, "get") \
342 V(set_string, "set") \
343 V(map_field_string, "%map") \
344 V(elements_field_string, "%elements") \
345 V(length_field_string, "%length") \
346 V(cell_value_string, "%cell_value") \
347 V(function_class_string, "Function") \
348 V(illegal_argument_string, "illegal argument") \
349 V(MakeReferenceError_string, "MakeReferenceError") \
350 V(MakeSyntaxError_string, "MakeSyntaxError") \
351 V(MakeTypeError_string, "MakeTypeError") \
352 V(illegal_return_string, "illegal_return") \
353 V(illegal_break_string, "illegal_break") \
354 V(illegal_continue_string, "illegal_continue") \
355 V(unknown_label_string, "unknown_label") \
356 V(redeclaration_string, "redeclaration") \
357 V(space_string, " ") \
358 V(exec_string, "exec") \
359 V(zero_string, "0") \
360 V(global_eval_string, "GlobalEval") \
361 V(identity_hash_string, "v8::IdentityHash") \
362 V(closure_string, "(closure)") \
363 V(use_strict_string, "use strict") \
365 V(anonymous_function_string, "(anonymous function)") \
366 V(compare_ic_string, "==") \
367 V(strict_compare_ic_string, "===") \
368 V(infinity_string, "Infinity") \
369 V(minus_infinity_string, "-Infinity") \
370 V(hidden_stack_trace_string, "v8::hidden_stack_trace") \
371 V(query_colon_string, "(?:)") \
372 V(Generator_string, "Generator") \
373 V(throw_string, "throw") \
374 V(done_string, "done") \
375 V(value_string, "value") \
376 V(next_string, "next") \
377 V(byte_length_string, "byteLength") \
378 V(byte_offset_string, "byteOffset") \
379 V(buffer_string, "buffer")
385 class WeakObjectRetainer;
388 typedef String* (*ExternalStringTableUpdaterCallback)(
Heap* heap,
394 : store_buffer_(store_buffer) {
406 Object*** start_of_current_page_;
428 delete emergency_stack_;
429 emergency_stack_ =
NULL;
435 return Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
444 limit_ =
reinterpret_cast<intptr_t*
>(limit);
446 if (limit_ <= rear_) {
454 return (front_ == rear_) &&
455 (emergency_stack_ ==
NULL || emergency_stack_->length() == 0);
462 if (front_ == rear_) {
463 Entry e = emergency_stack_->RemoveLast();
476 *target =
reinterpret_cast<HeapObject*
>(*(--front_));
477 *
size =
static_cast<int>(*(--front_));
480 reinterpret_cast<Address>(front_));
491 static const int kEntrySizeInWords = 2;
499 List<Entry>* emergency_stack_;
503 void RelocateQueueHead();
522 inline void Iterate(ObjectVisitor* v);
536 inline void Verify();
538 inline void AddOldString(
String*
string);
541 inline void ShrinkNewStrings(
int position);
565 intptr_t max_old_gen_size,
566 intptr_t max_executable_size);
594 return 4 * reserved_semispace_size_ + max_old_generation_size_;
644 return property_cell_space_;
670 return reinterpret_cast<Address>(&always_allocate_scope_depth_);
673 return linear_allocation_scope_depth_ != 0;
793 bool alloc_props =
true,
892 uint32_t hash_field);
896 uint32_t hash_field);
900 uint32_t hash_field);
907 T t,
int chars, uint32_t hash_field);
909 template<
bool is_one_byte,
typename T>
911 T t,
int chars, uint32_t hash_field);
949 void* external_pointer,
1031 int number_of_int64_entries,
1032 int number_of_code_ptr_entries,
1033 int number_of_heap_ptr_entries,
1034 int number_of_int32_entries);
1113 Object* callee,
int length);
1207 bool immovable =
false,
1208 bool crankshafted =
false,
1254 const char* gc_reason =
NULL,
1272 const char* gc_reason =
NULL,
1289 scan_on_scavenge_pages_++;
1290 if (FLAG_gc_verbose) {
1291 PrintF(
"Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
1296 scan_on_scavenge_pages_--;
1297 if (FLAG_gc_verbose) {
1298 PrintF(
"Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
1306 void GarbageCollectionGreedyCheck();
1311 bool pass_isolate =
true);
1316 bool pass_isolate =
true);
1323 #define ROOT_ACCESSOR(type, name, camel_name) \
1325 return type::cast(roots_[k##camel_name##RootIndex]); \
1327 type* raw_unchecked_##name() { \
1328 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
1331 #undef ROOT_ACCESSOR
1334 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
1335 Map* name##_map() { \
1336 return Map::cast(roots_[k##Name##MapRootIndex]); \
1339 #undef STRUCT_MAP_ACCESSOR
1341 #define STRING_ACCESSOR(name, str) String* name() { \
1342 return String::cast(roots_[k##name##RootIndex]); \
1345 #undef STRING_ACCESSOR
1352 native_contexts_list_ = object;
1357 array_buffers_list_ = object;
1362 allocation_sites_list_ = object;
1423 roots_[kCodeStubsRootIndex] = value;
1430 return gc_safe_size_of_old_object_;
1435 roots_[kNonMonomorphicCacheRootIndex] = value;
1439 roots_[kEmptyScriptRootIndex] = script;
1443 roots_[kStoreBufferTopRootIndex] =
reinterpret_cast<Smi*
>(top);
1447 roots_[kMaterializedObjectsRootIndex] = objects;
1454 return reinterpret_cast<Address*
>(&roots_[kStoreBufferTopRootIndex]);
1459 return &native_contexts_list_;
1467 bool weak_embedded_objects_verification_enabled() {
1468 return no_weak_object_verification_scope_depth_ == 0;
1474 void PrintHandles();
1476 void OldPointerSpaceCheckStoreBuffer();
1477 void MapSpaceCheckStoreBuffer();
1478 void LargeObjectSpaceCheckStoreBuffer();
1481 void ReportHeapStatistics(
const char* title);
1482 void ReportCodeStatistics(
const char* title);
1491 return FLAG_verify_heap;
1505 INLINE(
void RecordWrites(
Address address,
int start,
int len));
1513 void set_allocation_timeout(
int timeout) {
1514 allocation_timeout_ = timeout;
1517 void TracePathToObjectFrom(
Object* target,
Object* root);
1518 void TracePathToObject(
Object* target);
1519 void TracePathToGlobal();
1527 static inline void ScavengeObject(HeapObject** p, HeapObject*
object);
1560 int64_t change_in_bytes);
1564 new_space_high_promotion_mode_active_ =
mode;
1570 return FLAG_pretenuring && new_space_high_promotion_mode_active_
1575 return reinterpret_cast<Address*
>(&new_space_high_promotion_mode_active_);
1581 if (total < 0)
return 0;
1582 return static_cast<intptr_t
>(total);
1597 const int divisor = FLAG_stress_compaction ? 10 : 1;
1599 Max(old_gen_size + old_gen_size / divisor,
1602 intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2;
1603 return Min(limit, halfway_to_the_max);
1618 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1620 #undef ROOT_INDEX_DECLARATION
1622 #define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex,
1624 #undef STRING_DECLARATION
1627 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
1629 #undef DECLARE_STRUCT_MAP
1633 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1635 #undef ROOT_INDEX_DECLARATION
1655 Object* number,
bool check_number_string_cache =
true);
1657 uint32_t value,
bool check_number_string_cache =
true);
1687 young_survivors_after_last_gc_ = survived;
1688 survived_since_last_expansion_ += survived;
1692 if (FLAG_gc_global)
return true;
1694 if (FLAG_stress_compaction && (gc_count_ & 1) != 0)
return true;
1696 intptr_t adjusted_allocation_limit =
1697 old_generation_allocation_limit_ - new_space_.
Capacity();
1731 total_regexp_code_generated_ +=
size;
1735 if (is_crankshafted) {
1736 crankshaft_codegen_bytes_generated_ +=
size;
1738 full_codegen_bytes_generated_ +=
size;
1757 return marking_time_;
1766 return sweeping_time_;
1770 return &mark_compact_collector_;
1774 return &store_buffer_;
1782 return &incremental_marking_;
1796 return sweeping_complete;
1800 return &external_string_table_;
1805 return sweep_generation_;
1832 uint32_t seed =
static_cast<uint32_t
>(hash_seed()->value());
1833 ASSERT(FLAG_randomize_hashes || seed == 0);
1839 set_arguments_adaptor_deopt_pc_offset(
Smi::FromInt(pc_offset));
1844 set_construct_stub_deopt_pc_offset(
Smi::FromInt(pc_offset));
1849 set_getter_stub_deopt_pc_offset(
Smi::FromInt(pc_offset));
1854 set_setter_stub_deopt_pc_offset(
Smi::FromInt(pc_offset));
1863 return global_ic_age_;
1873 return amount_of_external_allocated_memory_;
1892 object_counts_[type]++;
1893 object_sizes_[type] +=
size;
1898 int code_age_index =
1904 object_counts_[code_sub_type_index]++;
1905 object_sizes_[code_sub_type_index] +=
size;
1906 object_counts_[code_age_index]++;
1907 object_sizes_[code_age_index] +=
size;
1923 heap_->relocation_mutex_.Lock();
1928 heap_->relocation_mutex_.Unlock();
1940 set_weak_object_to_code_table(undefined_value());
1946 bool take_snapshot =
false);
1957 intptr_t code_range_size_;
1958 int reserved_semispace_size_;
1959 int max_semispace_size_;
1960 int initial_semispace_size_;
1961 intptr_t max_old_generation_size_;
1962 intptr_t max_executable_size_;
1963 intptr_t maximum_committed_;
1967 int survived_since_last_expansion_;
1970 int sweep_generation_;
1972 int always_allocate_scope_depth_;
1973 int linear_allocation_scope_depth_;
1976 int contexts_disposed_;
1980 bool flush_monomorphic_ics_;
1982 int scan_on_scavenge_pages_;
1993 int gc_post_processing_depth_;
1996 int64_t PromotedExternalMemorySize();
1998 unsigned int ms_count_;
1999 unsigned int gc_count_;
2002 static const int kRememberedUnmappedPages = 128;
2003 int remembered_unmapped_pages_index_;
2004 Address remembered_unmapped_pages_[kRememberedUnmappedPages];
2007 int unflattened_strings_length_;
2009 #define ROOT_ACCESSOR(type, name, camel_name) \
2010 inline void set_##name(type* value) { \
2013 ASSERT(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \
2014 roots_[k##camel_name##RootIndex] = value; \
2017 #undef ROOT_ACCESSOR
2023 int allocation_timeout_;
2029 intptr_t new_space_high_promotion_mode_active_;
2035 intptr_t old_generation_allocation_limit_;
2038 intptr_t size_of_old_gen_at_last_old_space_gc_;
2042 intptr_t external_allocation_limit_;
2046 int64_t amount_of_external_allocated_memory_;
2049 int64_t amount_of_external_allocated_memory_at_last_global_gc_;
2053 bool old_gen_exhausted_;
2057 bool inline_allocation_disabled_;
2061 Object* native_contexts_list_;
2062 Object* array_buffers_list_;
2063 Object* allocation_sites_list_;
2068 Object* weak_object_to_code_table_;
2070 StoreBufferRebuilder store_buffer_rebuilder_;
2072 struct StringTypeTable {
2078 struct ConstantStringTable {
2079 const char* contents;
2083 struct StructTable {
2089 static const StringTypeTable string_type_table[];
2090 static const ConstantStringTable constant_string_table[];
2091 static const StructTable struct_table[];
2095 String* hidden_string_;
2099 struct GCPrologueCallbackPair {
2103 : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {
2105 bool operator==(
const GCPrologueCallbackPair& pair)
const {
2106 return pair.callback == callback;
2113 List<GCPrologueCallbackPair> gc_prologue_callbacks_;
2115 struct GCEpilogueCallbackPair {
2119 : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {
2121 bool operator==(
const GCEpilogueCallbackPair& pair)
const {
2122 return pair.callback == callback;
2129 List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
2133 static int GcSafeSizeOfOldObject(HeapObject*
object);
2136 void MarkMapPointersAsEncoded(
bool encoded) {
2138 gc_safe_size_of_old_object_ = &GcSafeSizeOfOldObject;
2143 void GarbageCollectionPrologue();
2144 void GarbageCollectionEpilogue();
2150 void ProcessPretenuringFeedback();
2154 const char** reason);
2159 void EnsureFillerObjectAtTop();
2166 const char* gc_reason,
2167 const char* collector_reason,
2173 bool PerformGarbageCollection(
2178 inline void UpdateOldSpaceLimits();
2204 void InitializeJSObjectFromMap(JSObject*
obj,
2205 FixedArray* properties,
2207 void InitializeAllocationMemento(AllocationMemento* memento,
2208 AllocationSite* allocation_site);
2210 bool CreateInitialMaps();
2211 bool CreateInitialObjects();
2215 NO_INLINE(
void CreateJSEntryStub());
2216 NO_INLINE(
void CreateJSConstructEntryStub());
2218 void CreateFixedStubs();
2260 void EnsureFromSpaceIsCommitted();
2266 void ZapFromSpace();
2268 static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
2272 Address DoScavenge(ObjectVisitor* scavenge_visitor,
Address new_space_front);
2273 static void ScavengeStoreBufferCallback(Heap* heap,
2281 void MarkCompactPrologue();
2283 void ProcessNativeContexts(WeakObjectRetainer* retainer,
bool record_slots);
2284 void ProcessArrayBuffers(WeakObjectRetainer* retainer,
bool record_slots);
2285 void ProcessAllocationSites(WeakObjectRetainer* retainer,
bool record_slots);
2294 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
2297 void TearDownArrayBuffers();
2300 void ReportStatisticsBeforeGC();
2301 void ReportStatisticsAfterGC();
2304 static void ScavengeObjectSlow(HeapObject** p, HeapObject*
object);
2311 inline void InitializeFunction(
2312 JSFunction*
function,
2313 SharedFunctionInfo* shared,
2317 double total_regexp_code_generated_;
2324 void AllocateFullSizeNumberStringCache();
2327 int FullSizeNumberStringCacheLength();
2329 void FlushNumberStringCache();
2335 void FlushAllocationSitesScratchpad();
2338 void InitializeAllocationSitesScratchpad();
2341 void AddAllocationSiteToScratchpad(AllocationSite* site,
2344 void UpdateSurvivalRateTrend(
int start_new_space_size);
2346 enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING };
2348 static const int kYoungSurvivalRateHighThreshold = 90;
2349 static const int kYoungSurvivalRateLowThreshold = 10;
2350 static const int kYoungSurvivalRateAllowedDeviation = 15;
2352 static const int kOldSurvivalRateLowThreshold = 20;
2354 int young_survivors_after_last_gc_;
2355 int high_survival_rate_period_length_;
2356 int low_survival_rate_period_length_;
2357 double survival_rate_;
2358 SurvivalRateTrend previous_survival_rate_trend_;
2359 SurvivalRateTrend survival_rate_trend_;
2361 void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) {
2362 ASSERT(survival_rate_trend != FLUCTUATING);
2363 previous_survival_rate_trend_ = survival_rate_trend_;
2364 survival_rate_trend_ = survival_rate_trend;
2367 SurvivalRateTrend survival_rate_trend() {
2368 if (survival_rate_trend_ == STABLE) {
2370 }
else if (previous_survival_rate_trend_ == STABLE) {
2371 return survival_rate_trend_;
2372 }
else if (survival_rate_trend_ != previous_survival_rate_trend_) {
2375 return survival_rate_trend_;
2379 bool IsStableOrIncreasingSurvivalTrend() {
2380 switch (survival_rate_trend()) {
2389 bool IsStableOrDecreasingSurvivalTrend() {
2390 switch (survival_rate_trend()) {
2399 bool IsIncreasingSurvivalTrend() {
2400 return survival_rate_trend() == INCREASING;
2403 bool IsHighSurvivalRate() {
2404 return high_survival_rate_period_length_ > 0;
2407 bool IsLowSurvivalRate() {
2408 return low_survival_rate_period_length_ > 0;
2411 void SelectScavengingVisitorsTable();
2413 void StartIdleRound() {
2414 mark_sweeps_since_idle_round_started_ = 0;
2417 void FinishIdleRound() {
2418 mark_sweeps_since_idle_round_started_ = kMaxMarkSweepsInIdleRound;
2419 scavenges_since_last_idle_round_ = 0;
2422 bool EnoughGarbageSinceLastIdleRound() {
2423 return (scavenges_since_last_idle_round_ >= kIdleScavengeThreshold);
2430 int TimeMarkSweepWouldTakeInMs() {
2432 static const int kMbPerMs = 2;
2435 return heap_size_mb / kMbPerMs;
2439 bool IdleGlobalGC();
2441 void AdvanceIdleIncrementalMarking(intptr_t step_size);
2443 void ClearObjectStats(
bool clear_last_time_stats =
false);
2445 void set_weak_object_to_code_table(
Object* value) {
2447 weak_object_to_code_table_ = value;
2450 Object** weak_object_to_code_table_address() {
2451 return &weak_object_to_code_table_;
2454 static const int kInitialStringTableSize = 2048;
2455 static const int kInitialEvalCacheSize = 64;
2456 static const int kInitialNumberStringCacheSize = 256;
2465 double max_gc_pause_;
2468 double total_gc_time_ms_;
2471 intptr_t max_alive_after_gc_;
2474 double min_in_mutator_;
2477 intptr_t alive_after_last_gc_;
2479 double last_gc_end_timestamp_;
2482 double marking_time_;
2485 double sweeping_time_;
2489 StoreBuffer store_buffer_;
2493 IncrementalMarking incremental_marking_;
2495 int number_idle_notifications_;
2496 unsigned int last_idle_notification_gc_count_;
2497 bool last_idle_notification_gc_count_init_;
2499 int mark_sweeps_since_idle_round_started_;
2500 unsigned int gc_count_at_last_idle_gc_;
2501 int scavenges_since_last_idle_round_;
2504 size_t full_codegen_bytes_generated_;
2505 size_t crankshaft_codegen_bytes_generated_;
2510 int gcs_since_last_deopt_;
2513 int no_weak_object_verification_scope_depth_;
2516 static const int kAllocationSiteScratchpadSize = 256;
2517 int allocation_sites_scratchpad_length_;
2519 static const int kMaxMarkSweepsInIdleRound = 7;
2520 static const int kIdleScavengeThreshold = 5;
2523 PromotionQueue promotion_queue_;
2529 ExternalStringTable external_string_table_;
2531 VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
2533 MemoryChunk* chunks_queued_for_free_;
2535 Mutex relocation_mutex_;
2537 int gc_callbacks_depth_;
2548 friend class NoWeakObjectVerificationScope;
2604 class NoWeakObjectVerificationScope {
2606 inline NoWeakObjectVerificationScope();
2607 inline ~NoWeakObjectVerificationScope();
2720 explicit HeapIterator(
Heap* heap);
2721 HeapIterator(
Heap* heap, HeapObjectsFiltering filtering);
2735 HeapObjectsFiltering filtering_;
2770 for (
int i = 0; i <
kLength; ++i) {
2771 keys_[i].map =
NULL;
2772 keys_[i].name =
NULL;
2777 static inline int Hash(Map* map, Name* name);
2782 return reinterpret_cast<Address>(&keys_);
2785 Address field_offsets_address() {
2786 return reinterpret_cast<Address>(&field_offsets_);
2812 if (!name->IsUniqueName())
return kAbsent;
2813 int index = Hash(source, name);
2814 Key& key = keys_[index];
2815 if ((key.source == source) && (key.name == name))
return results_[index];
2822 if (name->IsUniqueName()) {
2823 int index = Hash(source, name);
2824 Key& key = keys_[index];
2825 key.source = source;
2827 results_[index] = result;
2838 for (
int i = 0; i < kLength; ++i) {
2839 keys_[i].source =
NULL;
2840 keys_[i].name =
NULL;
2845 static int Hash(
Object* source,
Name* name) {
2847 uint32_t source_hash =
2848 static_cast<uint32_t
>(
reinterpret_cast<uintptr_t
>(source))
2850 uint32_t name_hash =
2851 static_cast<uint32_t
>(
reinterpret_cast<uintptr_t
>(
name))
2853 return (source_hash ^ name_hash) % kLength;
2856 static const int kLength = 64;
2863 int results_[kLength];
2903 ASSERT(scope_ < kNumberOfScopes);
2913 explicit GCTracer(
Heap* heap,
2914 const char* gc_reason,
2915 const char* collector_reason);
2928 promoted_objects_size_ += object_size;
2932 nodes_died_in_new_space_++;
2936 nodes_copied_in_new_space_++;
2945 const char* CollectorString();
2948 inline double SizeOfHeapObjects();
2954 intptr_t start_object_size_;
2957 intptr_t start_memory_size_;
2964 unsigned int gc_count_;
2970 double scopes_[Scope::kNumberOfScopes];
2974 intptr_t in_free_list_or_wasted_before_gc_;
2978 intptr_t allocated_since_last_gc_;
2982 double spent_in_mutator_;
2985 intptr_t promoted_objects_size_;
2988 int nodes_died_in_new_space_;
2991 int nodes_copied_in_new_space_;
2994 int nodes_promoted_;
2999 double longest_step_;
3000 int steps_count_since_last_gc_;
3001 double steps_took_since_last_gc_;
3005 const char* gc_reason_;
3006 const char* collector_reason_;
3031 static const int kArrayEntriesPerCacheEntry = 4;
3032 static const int kStringOffset = 0;
3033 static const int kPatternOffset = 1;
3034 static const int kArrayOffset = 2;
3060 return (object->
map_word().ToRawValue() & kNotMarkedBit) == 0;
3064 uintptr_t map_word =
object->map_word().ToRawValue();
3065 object->set_map_word(MapWord::FromRawValue(map_word | kNotMarkedBit));
3070 uintptr_t map_word =
object->map_word().ToRawValue();
3071 object->set_map_word(MapWord::FromRawValue(map_word & ~kNotMarkedBit));
3076 uintptr_t map_word =
object->
map_word().ToRawValue();
3077 return MapWord::FromRawValue(map_word | kNotMarkedBit).ToMap();
3085 static const uintptr_t kNotMarkedBit = 0x1;
3094 class PathTracer :
public ObjectVisitor {
3104 PathTracer(
Object* search_target,
3105 WhatToFind what_to_find,
3107 : search_target_(search_target),
3108 found_target_(
false),
3109 found_target_in_trace_(
false),
3110 what_to_find_(what_to_find),
3111 visit_mode_(visit_mode),
3115 virtual void VisitPointers(
Object** start,
Object** end);
3118 void TracePathFrom(
Object** root);
3120 bool found()
const {
return found_target_; }
3122 static Object*
const kAnyGlobalObject;
3126 class UnmarkVisitor;
3128 void MarkRecursively(
Object** p, MarkVisitor* mark_visitor);
3129 void UnmarkRecursively(
Object** p, UnmarkVisitor* unmark_visitor);
3130 virtual void ProcessResults();
3133 static const int kMarkTag = 2;
3137 bool found_target_in_trace_;
3138 WhatToFind what_to_find_;
3140 List<Object*> object_stack_;
3151 #endif // V8_HEAP_H_
static int SizeOfMarkedObject(HeapObject *object)
MUST_USE_RESULT MaybeObject * CopyConstantPoolArray(ConstantPoolArray *src)
Object ** roots_array_start()
MUST_USE_RESULT MaybeObject * AllocateJSModule(Context *context, ScopeInfo *scope_info)
void RecordFixedArraySubTypeStats(int array_sub_type, size_t size)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
MUST_USE_RESULT MaybeObject * CopyCode(Code *code)
static void Clear(FixedArray *cache)
MUST_USE_RESULT MaybeObject * AllocateFixedTypedArray(int length, ExternalArrayType array_type, PretenureFlag pretenure)
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8Slow(Vector< const char > str, int non_ascii_start, PretenureFlag pretenure=NOT_TENURED)
MUST_USE_RESULT MaybeObject * AllocateSymbol()
double total_regexp_code_generated()
bool NextGCIsLikelyToBeFull()
MUST_USE_RESULT MaybeObject * AllocateOneByteInternalizedString(Vector< const uint8_t > str, uint32_t hash_field)
MUST_USE_RESULT MaybeObject * AllocateExternalStringFromAscii(const ExternalAsciiString::Resource *resource)
void set_full_gc_count(int count)
MUST_USE_RESULT MaybeObject * AllocateRawOneByteString(int length, PretenureFlag pretenure=NOT_TENURED)
intptr_t OldGenerationCapacityAvailable()
void Callback(MemoryChunk *page, StoreBufferEvent event)
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
intptr_t * old_pointer_space_size
int Lookup(Map *map, Name *name)
Object ** native_contexts_list_address()
intptr_t * cell_space_size
static const int kMapHashShift
void DeoptMarkedAllocationSites()
int ReservedSemiSpaceSize()
void PrintF(const char *format,...)
void SetNewSpaceHighPromotionModeActive(bool mode)
void CollectAllGarbage(int flags, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
bool InOldDataSpace(Address address)
bool InNewSpace(Object *object)
Address * OldPointerSpaceAllocationTopAddress()
void AddGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback, GCType gc_type_filter, bool pass_isolate=true)
MUST_USE_RESULT MaybeObject * AllocateFunctionContext(int length, JSFunction *function)
RootListIndex RootIndexForEmptyExternalArray(ElementsKind kind)
void(* ObjectSlotCallback)(HeapObject **from, HeapObject *to)
void SetConstructStubDeoptPCOffset(int pc_offset)
void IterateWeakRoots(ObjectVisitor *v, VisitMode mode)
Object * ToBoolean(bool condition)
MUST_USE_RESULT MaybeObject * ReinitializeJSGlobalProxy(JSFunction *constructor, JSGlobalProxy *global)
int64_t AdjustAmountOfExternalAllocatedMemory(int64_t change_in_bytes)
void RemoveGCPrologueCallback(v8::Isolate::GCPrologueCallback callback)
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArrayWithMap(FixedDoubleArray *src, Map *map)
static Smi * FromInt(int value)
MUST_USE_RESULT MaybeObject * AllocateJSFunctionProxy(Object *handler, Object *call_trap, Object *construct_trap, Object *prototype)
bool flush_monomorphic_ics()
void FinalizeExternalString(String *string)
bool EnsureSweepersProgressed(int step_size)
MUST_USE_RESULT MaybeObject * CopyJSObject(JSObject *source, AllocationSite *site=NULL)
void Update(Map *source, Name *name, int result)
Map * MapForFixedTypedArray(ExternalArrayType array_type)
void CompletelyClearInstanceofCache()
Address * OldDataSpaceAllocationLimitAddress()
Map * MapForExternalArrayType(ExternalArrayType array_type)
void SetNumberStringCache(Object *number, String *str)
static const int kNullValueRootIndex
static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index)
MUST_USE_RESULT MaybeObject * AllocateModuleContext(ScopeInfo *scope_info)
MUST_USE_RESULT MaybeObject * AllocateCodeCache()
void AddString(String *string)
MUST_USE_RESULT MaybeObject * AllocateNativeContext()
RootListIndex RootIndexForExternalArrayType(ExternalArrayType array_type)
void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags)
static const int kOldSpaceRoots
void VisitExternalResources(v8::ExternalResourceVisitor *visitor)
static bool IsOneByte(T t, int chars)
MaybeObject * AddWeakObjectToCodeDependency(Object *obj, DependentCode *dep)
PretenureFlag GetPretenureMode()
kSerializedDataOffset Object
static const intptr_t kMinimumOldGenerationAllocationLimit
Address * allocation_top_address()
MUST_USE_RESULT MaybeObject * AllocateJSMessageObject(String *type, JSArray *arguments, int start_position, int end_position, Object *script, Object *stack_frames)
void DoScavengeObject(Map *map, HeapObject **slot, HeapObject *obj)
void ClearInstanceofCache()
HeapObjectCallback GcSafeSizeOfOldObjectFunction()
bool InFromSpace(Object *object)
MUST_USE_RESULT MaybeObject * Uint32ToString(uint32_t value, bool check_number_string_cache=true)
Object * weak_object_to_code_table()
PromotionQueue * promotion_queue()
void SetGetterStubDeoptPCOffset(int pc_offset)
RootListIndex RootIndexForFixedTypedArray(ExternalArrayType array_type)
bool IsConcurrentSweepingInProgress()
intptr_t * code_space_size
MUST_USE_RESULT MaybeObject * InternalizeStringWithKey(HashTableKey *key)
MUST_USE_RESULT MaybeObject * AllocateExternal(void *value)
bool InternalizeTwoCharsStringIfExists(String *str, String **result)
static void MoveBlock(Address dst, Address src, int byte_size)
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
intptr_t MaximumCommittedMemory()
#define ASSERT(condition)
bool InSpace(Address addr, AllocationSpace space)
MUST_USE_RESULT MaybeObject * AllocateGlobalContext(JSFunction *function, ScopeInfo *scope_info)
void public_set_code_stubs(UnseededNumberDictionary *value)
bool InNewSpacePage(Address address)
static const int kReduceMemoryFootprintMask
OldSpace * TargetSpace(HeapObject *object)
void set_collector(GarbageCollector collector)
const int kPointerSizeLog2
void RecordObjectStats(InstanceType type, size_t size)
Address * NewSpaceAllocationLimitAddress()
#define STRONG_ROOT_LIST(V)
Address * OldDataSpaceAllocationTopAddress()
MUST_USE_RESULT MaybeObject * AllocateBlockContext(JSFunction *function, Context *previous, ScopeInfo *info)
void InitializeWeakObjectToCodeTable()
MUST_USE_RESULT MaybeObject * AllocateTwoByteInternalizedString(Vector< const uc16 > str, uint32_t hash_field)
void(* ScavengingCallback)(Map *map, HeapObject **slot, HeapObject *object)
intptr_t CommittedMemoryExecutable()
friend class ExternalReference
RelocationLock(Heap *heap)
#define INTERNALIZED_STRING_LIST(V)
static const int kPageSize
int * pending_global_handle_count
Address * store_buffer_top_address()
void IterateSmiRoots(ObjectVisitor *v)
MUST_USE_RESULT MaybeObject * AllocateJSObject(JSFunction *constructor, PretenureFlag pretenure=NOT_TENURED, AllocationSite *allocation_site=NULL)
void AdjustLiveBytes(Address address, int by, InvocationMode mode)
Address always_allocate_scope_depth_address()
void IterateAndMarkPointersToFromSpace(Address start, Address end, ObjectSlotCallback callback)
void public_set_materialized_objects(FixedArray *objects)
ArrayStorageAllocationMode
STATIC_CHECK(kUndefinedValueRootIndex==Internals::kUndefinedValueRootIndex)
void increment_nodes_copied_in_new_space()
virtual Object * RetainAs(Object *object)=0
StoreBuffer * store_buffer()
MUST_USE_RESULT MaybeObject * AllocateStringFromOneByte(Vector< const uint8_t > str, PretenureFlag pretenure=NOT_TENURED)
Address * NewSpaceHighPromotionModeActiveAddress()
FixedTypedArrayBase * EmptyFixedTypedArrayForMap(Map *map)
kInstanceClassNameOffset flag
GCCallbacksScope(Heap *heap)
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
void EnsureWeakObjectToCodeTable()
INLINE(void RecordWrite(Address address, int offset))
int NotifyContextDisposed()
MUST_USE_RESULT MaybeObject * AllocateHeapNumber(double value, PretenureFlag pretenure=NOT_TENURED)
#define STRUCT_MAP_ACCESSOR(NAME, Name, name)
void RepairFreeListsAfterBoot()
MUST_USE_RESULT MaybeObject * NumberToString(Object *number, bool check_number_string_cache=true)
void public_set_empty_script(Script *script)
int * near_death_global_handle_count
STATIC_ASSERT((kEntriesPerBucket &(kEntriesPerBucket-1))==0)
String *(* ExternalStringTableUpdaterCallback)(Heap *heap, Object **pointer)
double sweeping_time() const
void SetArgumentsAdaptorDeoptPCOffset(int pc_offset)
static void ClearMark(HeapObject *object)
static const int kEndMarker
bool IdleNotification(int hint)
intptr_t MaxOldGenerationSize()
void IncreaseTotalRegexpCodeGenerated(int size)
friend class MarkCompactCollector
void(* GCEpilogueCallback)(Isolate *isolate, GCType type, GCCallbackFlags flags)
void EnsureHeapIsIterable()
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedDoubleArray(int length, PretenureFlag pretenure=NOT_TENURED)
bool InOldPointerSpace(Address address)
intptr_t * property_cell_space_capacity
int(* HeapObjectCallback)(HeapObject *obj)
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
intptr_t OldGenerationAllocationLimit(intptr_t old_gen_size)
Address * allocation_top_address()
static bool IsMarked(HeapObject *object)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
static void UpdateAllocationSiteFeedback(HeapObject *object, ScratchpadSlotMode mode)
MUST_USE_RESULT MaybeObject * AllocateJSArrayWithElements(FixedArrayBase *array_base, ElementsKind elements_kind, int length, PretenureFlag pretenure=NOT_TENURED)
static NewSpacePage * FromAddress(Address address_in_page)
void ClearAllICsByKind(Code::Kind kind)
MUST_USE_RESULT MaybeObject * AllocateWithContext(JSFunction *function, Context *previous, JSReceiver *extension)
void CollectAllAvailableGarbage(const char *gc_reason=NULL)
void increment_nodes_died_in_new_space()
bool AllowedToBeMigrated(HeapObject *object, AllocationSpace dest)
bool ConfigureHeapDefault()
PagedSpace * paged_space(int idx)
static const int kNoGCFlags
PropertyCellSpace * property_cell_space()
MUST_USE_RESULT MaybeObject * AllocateFixedArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
void VisitPointers(Object **start, Object **end)
int * global_handle_count
MUST_USE_RESULT MaybeObject * AllocateForeign(Address address, PretenureFlag pretenure=NOT_TENURED)
void QueueMemoryChunkForFree(MemoryChunk *chunk)
void CheckpointObjectStats()
MUST_USE_RESULT MaybeObject * AllocateExternalArray(int length, ExternalArrayType array_type, void *external_pointer, PretenureFlag pretenure)
static void ScavengePointer(HeapObject **p)
intptr_t * cell_space_capacity
intptr_t * memory_allocator_size
#define DISALLOW_IMPLICIT_CONSTRUCTORS(TypeName)
void VisitPointers(Object **start, Object **end)
void decrement_scan_on_scavenge_pages()
void IncrementYoungSurvivorsCounter(int survived)
void set_allocation_sites_list(Object *object)
MUST_USE_RESULT MaybeObject * AllocatePolymorphicCodeCache()
static const int kMaxRegularHeapObjectSize
intptr_t * code_space_capacity
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
MUST_USE_RESULT MaybeObject * InternalizeString(String *str)
static void Enter(Heap *heap, String *key_string, Object *key_pattern, FixedArray *value_array, ResultsCacheType type)
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
void ReserveSpace(int *sizes, Address *addresses)
bool inline_allocation_disabled()
static Map * MapOfMarkedObject(HeapObject *object)
ExternalArray * EmptyExternalArrayForMap(Map *map)
OldSpace * old_pointer_space()
Map * InternalizedStringMapForString(String *str)
bool ConfigureHeap(int max_semispace_size, intptr_t max_old_gen_size, intptr_t max_executable_size)
intptr_t * map_space_size
static double TimeCurrentMillis()
MUST_USE_RESULT MaybeObject * AllocateTypeFeedbackInfo()
MUST_USE_RESULT MaybeObject * AllocateByteArray(int length, PretenureFlag pretenure=NOT_TENURED)
static const int kMakeHeapIterableMask
MUST_USE_RESULT MaybeObject * AllocateJSObjectFromMap(Map *map, PretenureFlag pretenure=NOT_TENURED, bool alloc_props=true, AllocationSite *allocation_site=NULL)
void public_set_store_buffer_top(Address *top)
MUST_USE_RESULT MaybeObject * AllocateJSArrayAndStorage(ElementsKind elements_kind, int length, int capacity, ArrayStorageAllocationMode mode=DONT_INITIALIZE_ARRAY_ELEMENTS, PretenureFlag pretenure=NOT_TENURED)
static const int kUndefinedValueRootIndex
void Iterate(ObjectVisitor *v)
bool InToSpace(Object *object)
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind)
#define ROOT_ACCESSOR(type, name, camel_name)
LargeObjectSpace * lo_space()
bool RootCanBeTreatedAsConstant(RootListIndex root_index)
static const int kFalseValueRootIndex
MUST_USE_RESULT MaybeObject * AllocateExternalStringFromTwoByte(const ExternalTwoByteString::Resource *resource)
MUST_USE_RESULT MaybeObject * AllocatePartialMap(InstanceType instance_type, int instance_size)
int * free_global_handle_count
static Object * Lookup(Heap *heap, String *key_string, Object *key_pattern, ResultsCacheType type)
void set_gc_count(unsigned int count)
static const int kAbortIncrementalMarkingMask
Vector< const char > CStrVector(const char *data)
bool CollectGarbage(AllocationSpace space, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
intptr_t CommittedMemory()
void increment_promoted_objects_size(int object_size)
Object * GetNumberStringCache(Object *number)
#define T(name, string, precedence)
MUST_USE_RESULT MaybeObject * AllocateEmptyJSArray(ElementsKind elements_kind, PretenureFlag pretenure=NOT_TENURED)
virtual ~WeakObjectRetainer()
Address * NewSpaceAllocationTopAddress()
MUST_USE_RESULT MaybeObject * AllocateJSArrayBuffer()
static const int kEmptyStringRootIndex
intptr_t get_max_alive_after_gc()
void UpdateReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
MUST_USE_RESULT MaybeObject * AllocateJSProxy(Object *handler, Object *prototype)
void ProcessWeakReferences(WeakObjectRetainer *retainer)
void ClearNormalizedMapCaches()
void RecordCodeSubTypeStats(int code_sub_type, int code_age, size_t size)
MUST_USE_RESULT MaybeObject * NumberFromDouble(double value, PretenureFlag pretenure=NOT_TENURED)
MUST_USE_RESULT MaybeObject * CreateCode(const CodeDesc &desc, Code::Flags flags, Handle< Object > self_reference, bool immovable=false, bool crankshafted=false, int prologue_offset=Code::kPrologueOffsetNotSet)
void Update(Map *map, Name *name, int field_offset)
intptr_t * old_data_space_capacity
MUST_USE_RESULT MaybeObject * AllocateArgumentsObject(Object *callee, int length)
int InitialSemiSpaceSize()
void RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback)
void SetSetterStubDeoptPCOffset(int pc_offset)
Scope(GCTracer *tracer, ScopeId scope)
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
void IterateRoots(ObjectVisitor *v, VisitMode mode)
Address * allocation_limit_address()
void IncrementCodeGeneratedBytes(bool is_crankshafted, int size)
MUST_USE_RESULT MaybeObject * ReinitializeJSReceiver(JSReceiver *object, InstanceType type, int size)
MUST_USE_RESULT MaybeObject * AllocateAccessorPair()
int * weak_global_handle_count
MUST_USE_RESULT MaybeObject * AllocateConstantPoolArray(int number_of_int64_entries, int number_of_code_ptr_entries, int number_of_heap_ptr_entries, int number_of_int32_entries)
MUST_USE_RESULT MaybeObject * AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field)
MUST_USE_RESULT MaybeObject * AllocateCatchContext(JSFunction *function, Context *previous, String *name, Object *thrown_object)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
static const int kArgumentsLengthIndex
void CheckNewSpaceExpansionCriteria()
#define STRING_INDEX_DECLARATION(name, str)
MUST_USE_RESULT MaybeObject * AllocateScopeInfo(int length)
Object * native_contexts_list()
double get_min_in_mutator()
ExternalStringTable * external_string_table()
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
IncrementalMarking * incremental_marking()
double get_max_gc_pause()
bool Contains(Address addr)
void EnableInlineAllocation()
size_t CommittedPhysicalMemory()
Address * OldPointerSpaceAllocationLimitAddress()
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedArray(int length)
MUST_USE_RESULT MaybeObject * AllocateAllocationSite()
void MoveElements(FixedArray *array, int dst_index, int src_index, int len)
static const int kStartMarker
static const int kPrologueOffsetNotSet
bool ShouldBePromoted(Address old_address, int object_size)
bool IsLazySweepingComplete()
void RememberUnmappedPage(Address page, bool compacted)
static const int kNotFound
static const int kRegExpResultsCacheSize
MUST_USE_RESULT MaybeObject * LookupSingleCharacterStringFromCode(uint16_t code)
intptr_t PromotedTotalSize()
static void CopyBlock(Address dst, Address src, int byte_size)
void AddSweepingTime(double sweeping_time)
static bool ShouldZapGarbage()
MUST_USE_RESULT MaybeObject * AllocateFixedDoubleArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
MUST_USE_RESULT MaybeObject * AllocateFixedArray(int length, PretenureFlag pretenure=NOT_TENURED)
#define STRING_ACCESSOR(name, str)
static const int kArgumentsCalleeIndex
MUST_USE_RESULT MaybeObject * AllocateHashTable(int length, PretenureFlag pretenure=NOT_TENURED)
void public_set_non_monomorphic_cache(UnseededNumberDictionary *value)
PerThreadAssertScopeDebugOnly< HEAP_ALLOCATION_ASSERT, false > DisallowHeapAllocation
MUST_USE_RESULT MaybeObject * AllocateInternalizedStringFromUtf8(Vector< const char > str, int chars, uint32_t hash_field)
MUST_USE_RESULT MaybeObject * InternalizeUtf8String(const char *str)
static const int kHeaderSize
void Print(const v8::FunctionCallbackInfo< v8::Value > &args)
void DisableInlineAllocation()
int64_t amount_of_external_allocated_memory()
#define DECLARE_STRUCT_MAP(NAME, Name, name)
intptr_t PromotedSpaceSizeOfObjects()
intptr_t * old_pointer_space_capacity
bool OldGenerationAllocationLimitReached()
#define ROOT_INDEX_DECLARATION(type, name, camel_name)
MUST_USE_RESULT MaybeObject * NumberFromInt32(int32_t value, PretenureFlag pretenure=NOT_TENURED)
void ClearJSFunctionResultCaches()
void RecordStats(HeapStats *stats, bool take_snapshot=false)
NewSpacePage * prev_page() const
bool IsInGCPostProcessing()
void CreateFillerObjectAt(Address addr, int size)
Object * array_buffers_list()
MUST_USE_RESULT MaybeObject * AllocateSharedFunctionInfo(Object *name)
bool AdvanceSweepers(int step_size)
void increment_nodes_promoted()
Object ** allocation_sites_list_address()
void increment_scan_on_scavenge_pages()
Address * allocation_limit_address()
MUST_USE_RESULT MaybeObject * Allocate(Map *map, AllocationSpace space, AllocationSite *allocation_site=NULL)
intptr_t * map_space_capacity
static const int kTrueValueRootIndex
static const int kCapacityMask
static void ScavengeObject(HeapObject **p, HeapObject *object)
bool IsSweepingComplete()
MUST_USE_RESULT MaybeObject * CopyConstantPoolArrayWithMap(ConstantPoolArray *src, Map *map)
AlwaysAllocateScope(Isolate *isolate)
MUST_USE_RESULT MaybeObject * AllocateStringFromOneByte(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
bool CanMoveObjectStart(HeapObject *object)
MUST_USE_RESULT MaybeObject * PrepareForCompare(String *str)
static bool IsAtStart(Address addr)
intptr_t * memory_allocator_capacity
bool EnsureSweeperProgress(intptr_t size_in_bytes)
MUST_USE_RESULT MaybeObject * AllocateJSArrayStorage(JSArray *array, int length, int capacity, ArrayStorageAllocationMode mode=DONT_INITIALIZE_ARRAY_ELEMENTS)
void AddMarkingTime(double marking_time)
double marking_time() const
static const int kSweepPreciselyMask
static const int kSloppyArgumentsObjectSize
void set_array_buffers_list(Object *object)
MUST_USE_RESULT MaybeObject * AllocateStruct(InstanceType type)
void(* GCPrologueCallback)(Isolate *isolate, GCType type, GCCallbackFlags flags)
intptr_t * old_data_space_size
intptr_t OldGenerationSpaceAvailable()
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
SpaceIterator(Heap *heap)
MUST_USE_RESULT MaybeObject * AllocateMap(InstanceType instance_type, int instance_size, ElementsKind elements_kind=TERMINAL_FAST_ELEMENTS_KIND)
void UpdateMaximumCommitted()
intptr_t MaxExecutableSize()
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
Object * allocation_sites_list()
void PrintShortHeapStatistics()
static const int kStrictArgumentsObjectSize
void AddGCPrologueCallback(v8::Isolate::GCPrologueCallback callback, GCType gc_type_filter, bool pass_isolate=true)
static const int kHashMask
static AllocationSpace TargetSpaceId(InstanceType type)
OldSpace * old_data_space()
MUST_USE_RESULT MaybeObject * AllocateRawTwoByteString(int length, PretenureFlag pretenure=NOT_TENURED)
static void AssertValidRange(Address from, Address to)
static void SetMark(HeapObject *object)
MarkCompactCollector * mark_compact_collector()
MUST_USE_RESULT MaybeObject * AllocatePrivateSymbol()
int Lookup(Map *source, Name *name)
MUST_USE_RESULT MaybeObject * AllocateFunction(Map *function_map, SharedFunctionInfo *shared, Object *prototype, PretenureFlag pretenure=TENURED)
void UpdateNewSpaceReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
bool InternalizeStringIfExists(String *str, String **result)
StoreBufferRebuilder(StoreBuffer *store_buffer)
void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags)
MUST_USE_RESULT MaybeObject * AllocateStringFromTwoByte(Vector< const uc16 > str, PretenureFlag pretenure=NOT_TENURED)
static const int kEntriesPerBucket
MUST_USE_RESULT MaybeObject * CopyAndTenureFixedCOWArray(FixedArray *src)
DependentCode * LookupWeakObjectToCodeDependency(Object *obj)
MUST_USE_RESULT MaybeObject * AllocateAliasedArgumentsEntry(int slot)
intptr_t * property_cell_space_size
Callback GetVisitor(Map *map)
void set_native_contexts_list(Object *object)