v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
heap.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_HEAP_H_
29 #define V8_HEAP_H_
30 
31 #include <cmath>
32 
33 #include "allocation.h"
34 #include "assert-scope.h"
35 #include "globals.h"
36 #include "incremental-marking.h"
37 #include "list.h"
38 #include "mark-compact.h"
39 #include "objects-visiting.h"
40 #include "spaces.h"
41 #include "splay-tree-inl.h"
42 #include "store-buffer.h"
43 #include "v8-counters.h"
44 #include "v8globals.h"
45 
46 namespace v8 {
47 namespace internal {
48 
49 // Defines all the roots in Heap.
50 #define STRONG_ROOT_LIST(V) \
51  V(Map, byte_array_map, ByteArrayMap) \
52  V(Map, free_space_map, FreeSpaceMap) \
53  V(Map, one_pointer_filler_map, OnePointerFillerMap) \
54  V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
55  /* Cluster the most popular ones in a few cache lines here at the top. */ \
56  V(Smi, store_buffer_top, StoreBufferTop) \
57  V(Oddball, undefined_value, UndefinedValue) \
58  V(Oddball, the_hole_value, TheHoleValue) \
59  V(Oddball, null_value, NullValue) \
60  V(Oddball, true_value, TrueValue) \
61  V(Oddball, false_value, FalseValue) \
62  V(Oddball, uninitialized_value, UninitializedValue) \
63  V(Map, cell_map, CellMap) \
64  V(Map, global_property_cell_map, GlobalPropertyCellMap) \
65  V(Map, shared_function_info_map, SharedFunctionInfoMap) \
66  V(Map, meta_map, MetaMap) \
67  V(Map, heap_number_map, HeapNumberMap) \
68  V(Map, native_context_map, NativeContextMap) \
69  V(Map, fixed_array_map, FixedArrayMap) \
70  V(Map, code_map, CodeMap) \
71  V(Map, scope_info_map, ScopeInfoMap) \
72  V(Map, fixed_cow_array_map, FixedCOWArrayMap) \
73  V(Map, fixed_double_array_map, FixedDoubleArrayMap) \
74  V(Map, constant_pool_array_map, ConstantPoolArrayMap) \
75  V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
76  V(Map, hash_table_map, HashTableMap) \
77  V(FixedArray, empty_fixed_array, EmptyFixedArray) \
78  V(ByteArray, empty_byte_array, EmptyByteArray) \
79  V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
80  V(ConstantPoolArray, empty_constant_pool_array, EmptyConstantPoolArray) \
81  V(Oddball, arguments_marker, ArgumentsMarker) \
82  /* The roots above this line should be boring from a GC point of view. */ \
83  /* This means they are never in new space and never on a page that is */ \
84  /* being compacted. */ \
85  V(FixedArray, number_string_cache, NumberStringCache) \
86  V(Object, instanceof_cache_function, InstanceofCacheFunction) \
87  V(Object, instanceof_cache_map, InstanceofCacheMap) \
88  V(Object, instanceof_cache_answer, InstanceofCacheAnswer) \
89  V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
90  V(FixedArray, string_split_cache, StringSplitCache) \
91  V(FixedArray, regexp_multiple_cache, RegExpMultipleCache) \
92  V(Object, termination_exception, TerminationException) \
93  V(Smi, hash_seed, HashSeed) \
94  V(Map, symbol_map, SymbolMap) \
95  V(Map, string_map, StringMap) \
96  V(Map, ascii_string_map, AsciiStringMap) \
97  V(Map, cons_string_map, ConsStringMap) \
98  V(Map, cons_ascii_string_map, ConsAsciiStringMap) \
99  V(Map, sliced_string_map, SlicedStringMap) \
100  V(Map, sliced_ascii_string_map, SlicedAsciiStringMap) \
101  V(Map, external_string_map, ExternalStringMap) \
102  V(Map, \
103  external_string_with_one_byte_data_map, \
104  ExternalStringWithOneByteDataMap) \
105  V(Map, external_ascii_string_map, ExternalAsciiStringMap) \
106  V(Map, short_external_string_map, ShortExternalStringMap) \
107  V(Map, \
108  short_external_string_with_one_byte_data_map, \
109  ShortExternalStringWithOneByteDataMap) \
110  V(Map, internalized_string_map, InternalizedStringMap) \
111  V(Map, ascii_internalized_string_map, AsciiInternalizedStringMap) \
112  V(Map, cons_internalized_string_map, ConsInternalizedStringMap) \
113  V(Map, cons_ascii_internalized_string_map, ConsAsciiInternalizedStringMap) \
114  V(Map, \
115  external_internalized_string_map, \
116  ExternalInternalizedStringMap) \
117  V(Map, \
118  external_internalized_string_with_one_byte_data_map, \
119  ExternalInternalizedStringWithOneByteDataMap) \
120  V(Map, \
121  external_ascii_internalized_string_map, \
122  ExternalAsciiInternalizedStringMap) \
123  V(Map, \
124  short_external_internalized_string_map, \
125  ShortExternalInternalizedStringMap) \
126  V(Map, \
127  short_external_internalized_string_with_one_byte_data_map, \
128  ShortExternalInternalizedStringWithOneByteDataMap) \
129  V(Map, \
130  short_external_ascii_internalized_string_map, \
131  ShortExternalAsciiInternalizedStringMap) \
132  V(Map, short_external_ascii_string_map, ShortExternalAsciiStringMap) \
133  V(Map, undetectable_string_map, UndetectableStringMap) \
134  V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap) \
135  V(Map, external_int8_array_map, ExternalInt8ArrayMap) \
136  V(Map, external_uint8_array_map, ExternalUint8ArrayMap) \
137  V(Map, external_int16_array_map, ExternalInt16ArrayMap) \
138  V(Map, external_uint16_array_map, ExternalUint16ArrayMap) \
139  V(Map, external_int32_array_map, ExternalInt32ArrayMap) \
140  V(Map, external_uint32_array_map, ExternalUint32ArrayMap) \
141  V(Map, external_float32_array_map, ExternalFloat32ArrayMap) \
142  V(Map, external_float64_array_map, ExternalFloat64ArrayMap) \
143  V(Map, external_uint8_clamped_array_map, ExternalUint8ClampedArrayMap) \
144  V(ExternalArray, empty_external_int8_array, \
145  EmptyExternalInt8Array) \
146  V(ExternalArray, empty_external_uint8_array, \
147  EmptyExternalUint8Array) \
148  V(ExternalArray, empty_external_int16_array, EmptyExternalInt16Array) \
149  V(ExternalArray, empty_external_uint16_array, \
150  EmptyExternalUint16Array) \
151  V(ExternalArray, empty_external_int32_array, EmptyExternalInt32Array) \
152  V(ExternalArray, empty_external_uint32_array, \
153  EmptyExternalUint32Array) \
154  V(ExternalArray, empty_external_float32_array, EmptyExternalFloat32Array) \
155  V(ExternalArray, empty_external_float64_array, EmptyExternalFloat64Array) \
156  V(ExternalArray, empty_external_uint8_clamped_array, \
157  EmptyExternalUint8ClampedArray) \
158  V(Map, fixed_uint8_array_map, FixedUint8ArrayMap) \
159  V(Map, fixed_int8_array_map, FixedInt8ArrayMap) \
160  V(Map, fixed_uint16_array_map, FixedUint16ArrayMap) \
161  V(Map, fixed_int16_array_map, FixedInt16ArrayMap) \
162  V(Map, fixed_uint32_array_map, FixedUint32ArrayMap) \
163  V(Map, fixed_int32_array_map, FixedInt32ArrayMap) \
164  V(Map, fixed_float32_array_map, FixedFloat32ArrayMap) \
165  V(Map, fixed_float64_array_map, FixedFloat64ArrayMap) \
166  V(Map, fixed_uint8_clamped_array_map, FixedUint8ClampedArrayMap) \
167  V(FixedTypedArrayBase, empty_fixed_uint8_array, EmptyFixedUint8Array) \
168  V(FixedTypedArrayBase, empty_fixed_int8_array, EmptyFixedInt8Array) \
169  V(FixedTypedArrayBase, empty_fixed_uint16_array, EmptyFixedUint16Array) \
170  V(FixedTypedArrayBase, empty_fixed_int16_array, EmptyFixedInt16Array) \
171  V(FixedTypedArrayBase, empty_fixed_uint32_array, EmptyFixedUint32Array) \
172  V(FixedTypedArrayBase, empty_fixed_int32_array, EmptyFixedInt32Array) \
173  V(FixedTypedArrayBase, empty_fixed_float32_array, EmptyFixedFloat32Array) \
174  V(FixedTypedArrayBase, empty_fixed_float64_array, EmptyFixedFloat64Array) \
175  V(FixedTypedArrayBase, empty_fixed_uint8_clamped_array, \
176  EmptyFixedUint8ClampedArray) \
177  V(Map, sloppy_arguments_elements_map, SloppyArgumentsElementsMap) \
178  V(Map, function_context_map, FunctionContextMap) \
179  V(Map, catch_context_map, CatchContextMap) \
180  V(Map, with_context_map, WithContextMap) \
181  V(Map, block_context_map, BlockContextMap) \
182  V(Map, module_context_map, ModuleContextMap) \
183  V(Map, global_context_map, GlobalContextMap) \
184  V(Map, oddball_map, OddballMap) \
185  V(Map, message_object_map, JSMessageObjectMap) \
186  V(Map, foreign_map, ForeignMap) \
187  V(HeapNumber, nan_value, NanValue) \
188  V(HeapNumber, infinity_value, InfinityValue) \
189  V(HeapNumber, minus_zero_value, MinusZeroValue) \
190  V(Map, neander_map, NeanderMap) \
191  V(JSObject, message_listeners, MessageListeners) \
192  V(UnseededNumberDictionary, code_stubs, CodeStubs) \
193  V(UnseededNumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
194  V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache) \
195  V(Code, js_entry_code, JsEntryCode) \
196  V(Code, js_construct_entry_code, JsConstructEntryCode) \
197  V(FixedArray, natives_source_cache, NativesSourceCache) \
198  V(Script, empty_script, EmptyScript) \
199  V(NameDictionary, intrinsic_function_names, IntrinsicFunctionNames) \
200  V(Cell, undefined_cell, UndefineCell) \
201  V(JSObject, observation_state, ObservationState) \
202  V(Map, external_map, ExternalMap) \
203  V(Object, symbol_registry, SymbolRegistry) \
204  V(Symbol, frozen_symbol, FrozenSymbol) \
205  V(Symbol, nonexistent_symbol, NonExistentSymbol) \
206  V(Symbol, elements_transition_symbol, ElementsTransitionSymbol) \
207  V(SeededNumberDictionary, empty_slow_element_dictionary, \
208  EmptySlowElementDictionary) \
209  V(Symbol, observed_symbol, ObservedSymbol) \
210  V(Symbol, uninitialized_symbol, UninitializedSymbol) \
211  V(Symbol, megamorphic_symbol, MegamorphicSymbol) \
212  V(FixedArray, materialized_objects, MaterializedObjects) \
213  V(FixedArray, allocation_sites_scratchpad, AllocationSitesScratchpad) \
214  V(JSObject, microtask_state, MicrotaskState)
215 
216 // Entries in this list are limited to Smis and are not visited during GC.
217 #define SMI_ROOT_LIST(V) \
218  V(Smi, stack_limit, StackLimit) \
219  V(Smi, real_stack_limit, RealStackLimit) \
220  V(Smi, last_script_id, LastScriptId) \
221  V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
222  V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset) \
223  V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset) \
224  V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)
225 
226 #define ROOT_LIST(V) \
227  STRONG_ROOT_LIST(V) \
228  SMI_ROOT_LIST(V) \
229  V(StringTable, string_table, StringTable)
230 
231 // Heap roots that are known to be immortal immovable, for which we can safely
232 // skip write barriers.
233 #define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \
234  V(byte_array_map) \
235  V(free_space_map) \
236  V(one_pointer_filler_map) \
237  V(two_pointer_filler_map) \
238  V(undefined_value) \
239  V(the_hole_value) \
240  V(null_value) \
241  V(true_value) \
242  V(false_value) \
243  V(uninitialized_value) \
244  V(cell_map) \
245  V(global_property_cell_map) \
246  V(shared_function_info_map) \
247  V(meta_map) \
248  V(heap_number_map) \
249  V(native_context_map) \
250  V(fixed_array_map) \
251  V(code_map) \
252  V(scope_info_map) \
253  V(fixed_cow_array_map) \
254  V(fixed_double_array_map) \
255  V(constant_pool_array_map) \
256  V(no_interceptor_result_sentinel) \
257  V(hash_table_map) \
258  V(empty_fixed_array) \
259  V(empty_byte_array) \
260  V(empty_descriptor_array) \
261  V(empty_constant_pool_array) \
262  V(arguments_marker) \
263  V(symbol_map) \
264  V(sloppy_arguments_elements_map) \
265  V(function_context_map) \
266  V(catch_context_map) \
267  V(with_context_map) \
268  V(block_context_map) \
269  V(module_context_map) \
270  V(global_context_map) \
271  V(oddball_map) \
272  V(message_object_map) \
273  V(foreign_map) \
274  V(neander_map)
275 
276 #define INTERNALIZED_STRING_LIST(V) \
277  V(Array_string, "Array") \
278  V(Object_string, "Object") \
279  V(proto_string, "__proto__") \
280  V(arguments_string, "arguments") \
281  V(Arguments_string, "Arguments") \
282  V(call_string, "call") \
283  V(apply_string, "apply") \
284  V(caller_string, "caller") \
285  V(boolean_string, "boolean") \
286  V(Boolean_string, "Boolean") \
287  V(callee_string, "callee") \
288  V(constructor_string, "constructor") \
289  V(dot_result_string, ".result") \
290  V(dot_for_string, ".for.") \
291  V(dot_iterator_string, ".iterator") \
292  V(dot_generator_object_string, ".generator_object") \
293  V(eval_string, "eval") \
294  V(empty_string, "") \
295  V(function_string, "function") \
296  V(length_string, "length") \
297  V(module_string, "module") \
298  V(name_string, "name") \
299  V(native_string, "native") \
300  V(null_string, "null") \
301  V(number_string, "number") \
302  V(Number_string, "Number") \
303  V(nan_string, "NaN") \
304  V(RegExp_string, "RegExp") \
305  V(source_string, "source") \
306  V(global_string, "global") \
307  V(ignore_case_string, "ignoreCase") \
308  V(multiline_string, "multiline") \
309  V(input_string, "input") \
310  V(index_string, "index") \
311  V(last_index_string, "lastIndex") \
312  V(object_string, "object") \
313  V(literals_string, "literals") \
314  V(prototype_string, "prototype") \
315  V(string_string, "string") \
316  V(String_string, "String") \
317  V(symbol_string, "symbol") \
318  V(Symbol_string, "Symbol") \
319  V(for_string, "for") \
320  V(for_api_string, "for_api") \
321  V(for_intern_string, "for_intern") \
322  V(private_api_string, "private_api") \
323  V(private_intern_string, "private_intern") \
324  V(Date_string, "Date") \
325  V(this_string, "this") \
326  V(to_string_string, "toString") \
327  V(char_at_string, "CharAt") \
328  V(undefined_string, "undefined") \
329  V(value_of_string, "valueOf") \
330  V(stack_string, "stack") \
331  V(toJSON_string, "toJSON") \
332  V(InitializeVarGlobal_string, "InitializeVarGlobal") \
333  V(InitializeConstGlobal_string, "InitializeConstGlobal") \
334  V(KeyedLoadElementMonomorphic_string, \
335  "KeyedLoadElementMonomorphic") \
336  V(KeyedStoreElementMonomorphic_string, \
337  "KeyedStoreElementMonomorphic") \
338  V(stack_overflow_string, "kStackOverflowBoilerplate") \
339  V(illegal_access_string, "illegal access") \
340  V(illegal_execution_state_string, "illegal execution state") \
341  V(get_string, "get") \
342  V(set_string, "set") \
343  V(map_field_string, "%map") \
344  V(elements_field_string, "%elements") \
345  V(length_field_string, "%length") \
346  V(cell_value_string, "%cell_value") \
347  V(function_class_string, "Function") \
348  V(illegal_argument_string, "illegal argument") \
349  V(MakeReferenceError_string, "MakeReferenceError") \
350  V(MakeSyntaxError_string, "MakeSyntaxError") \
351  V(MakeTypeError_string, "MakeTypeError") \
352  V(illegal_return_string, "illegal_return") \
353  V(illegal_break_string, "illegal_break") \
354  V(illegal_continue_string, "illegal_continue") \
355  V(unknown_label_string, "unknown_label") \
356  V(redeclaration_string, "redeclaration") \
357  V(space_string, " ") \
358  V(exec_string, "exec") \
359  V(zero_string, "0") \
360  V(global_eval_string, "GlobalEval") \
361  V(identity_hash_string, "v8::IdentityHash") \
362  V(closure_string, "(closure)") \
363  V(use_strict_string, "use strict") \
364  V(dot_string, ".") \
365  V(anonymous_function_string, "(anonymous function)") \
366  V(compare_ic_string, "==") \
367  V(strict_compare_ic_string, "===") \
368  V(infinity_string, "Infinity") \
369  V(minus_infinity_string, "-Infinity") \
370  V(hidden_stack_trace_string, "v8::hidden_stack_trace") \
371  V(query_colon_string, "(?:)") \
372  V(Generator_string, "Generator") \
373  V(throw_string, "throw") \
374  V(done_string, "done") \
375  V(value_string, "value") \
376  V(next_string, "next") \
377  V(byte_length_string, "byteLength") \
378  V(byte_offset_string, "byteOffset") \
379  V(buffer_string, "buffer")
380 
381 // Forward declarations.
382 class GCTracer;
383 class HeapStats;
384 class Isolate;
385 class WeakObjectRetainer;
386 
387 
388 typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
389  Object** pointer);
390 
392  public:
393  explicit StoreBufferRebuilder(StoreBuffer* store_buffer)
394  : store_buffer_(store_buffer) {
395  }
396 
397  void Callback(MemoryChunk* page, StoreBufferEvent event);
398 
399  private:
400  StoreBuffer* store_buffer_;
401 
402  // We record in this variable how full the store buffer was when we started
403  // iterating over the current page, finding pointers to new space. If the
404  // store buffer overflows again we can exempt the page from the store buffer
405  // by rewinding to this point instead of having to search the store buffer.
406  Object*** start_of_current_page_;
407  // The current page we are scanning in the store buffer iterator.
408  MemoryChunk* current_page_;
409 };
410 
411 
412 
413 // A queue of objects promoted during scavenge. Each object is accompanied
414 // by it's size to avoid dereferencing a map pointer for scanning.
416  public:
417  explicit PromotionQueue(Heap* heap)
418  : front_(NULL),
419  rear_(NULL),
420  limit_(NULL),
421  emergency_stack_(0),
422  heap_(heap) { }
423 
424  void Initialize();
425 
426  void Destroy() {
427  ASSERT(is_empty());
428  delete emergency_stack_;
429  emergency_stack_ = NULL;
430  }
431 
432  inline void ActivateGuardIfOnTheSamePage();
433 
435  return Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
436  }
437 
438  void SetNewLimit(Address limit) {
439  if (!guard_) {
440  return;
441  }
442 
443  ASSERT(GetHeadPage() == Page::FromAllocationTop(limit));
444  limit_ = reinterpret_cast<intptr_t*>(limit);
445 
446  if (limit_ <= rear_) {
447  return;
448  }
449 
450  RelocateQueueHead();
451  }
452 
453  bool is_empty() {
454  return (front_ == rear_) &&
455  (emergency_stack_ == NULL || emergency_stack_->length() == 0);
456  }
457 
458  inline void insert(HeapObject* target, int size);
459 
460  void remove(HeapObject** target, int* size) {
461  ASSERT(!is_empty());
462  if (front_ == rear_) {
463  Entry e = emergency_stack_->RemoveLast();
464  *target = e.obj_;
465  *size = e.size_;
466  return;
467  }
468 
469  if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(front_))) {
470  NewSpacePage* front_page =
471  NewSpacePage::FromAddress(reinterpret_cast<Address>(front_));
472  ASSERT(!front_page->prev_page()->is_anchor());
473  front_ =
474  reinterpret_cast<intptr_t*>(front_page->prev_page()->area_end());
475  }
476  *target = reinterpret_cast<HeapObject*>(*(--front_));
477  *size = static_cast<int>(*(--front_));
478  // Assert no underflow.
479  SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
480  reinterpret_cast<Address>(front_));
481  }
482 
483  private:
484  // The front of the queue is higher in the memory page chain than the rear.
485  intptr_t* front_;
486  intptr_t* rear_;
487  intptr_t* limit_;
488 
489  bool guard_;
490 
491  static const int kEntrySizeInWords = 2;
492 
493  struct Entry {
494  Entry(HeapObject* obj, int size) : obj_(obj), size_(size) { }
495 
496  HeapObject* obj_;
497  int size_;
498  };
499  List<Entry>* emergency_stack_;
500 
501  Heap* heap_;
502 
503  void RelocateQueueHead();
504 
505  DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
506 };
507 
508 
509 typedef void (*ScavengingCallback)(Map* map,
510  HeapObject** slot,
511  HeapObject* object);
512 
513 
514 // External strings table is a place where all external strings are
515 // registered. We need to keep track of such strings to properly
516 // finalize them.
518  public:
519  // Registers an external string.
520  inline void AddString(String* string);
521 
522  inline void Iterate(ObjectVisitor* v);
523 
524  // Restores internal invariant and gets rid of collected strings.
525  // Must be called after each Iterate() that modified the strings.
526  void CleanUp();
527 
528  // Destroys all allocated memory.
529  void TearDown();
530 
531  private:
532  explicit ExternalStringTable(Heap* heap) : heap_(heap) { }
533 
534  friend class Heap;
535 
536  inline void Verify();
537 
538  inline void AddOldString(String* string);
539 
540  // Notifies the table that only a prefix of the new list is valid.
541  inline void ShrinkNewStrings(int position);
542 
543  // To speed up scavenge collections new space string are kept
544  // separate from old space strings.
545  List<Object*> new_space_strings_;
546  List<Object*> old_space_strings_;
547 
548  Heap* heap_;
549 
551 };
552 
553 
557 };
558 
559 
560 class Heap {
561  public:
562  // Configure heap size before setup. Return false if the heap has been
563  // set up already.
564  bool ConfigureHeap(int max_semispace_size,
565  intptr_t max_old_gen_size,
566  intptr_t max_executable_size);
567  bool ConfigureHeapDefault();
568 
569  // Prepares the heap, setting up memory areas that are needed in the isolate
570  // without actually creating any objects.
571  bool SetUp();
572 
573  // Bootstraps the object heap with the core set of objects required to run.
574  // Returns whether it succeeded.
575  bool CreateHeapObjects();
576 
577  // Destroys all memory allocated by the heap.
578  void TearDown();
579 
580  // Set the stack limit in the roots_ array. Some architectures generate
581  // code that looks here, because it is faster than loading from the static
582  // jslimit_/real_jslimit_ variable in the StackGuard.
583  void SetStackLimits();
584 
585  // Returns whether SetUp has been called.
586  bool HasBeenSetUp();
587 
588  // Returns the maximum amount of memory reserved for the heap. For
589  // the young generation, we reserve 4 times the amount needed for a
590  // semi space. The young generation consists of two semi spaces and
591  // we reserve twice the amount needed for those in order to ensure
592  // that new space can be aligned to its size.
593  intptr_t MaxReserved() {
594  return 4 * reserved_semispace_size_ + max_old_generation_size_;
595  }
596  int MaxSemiSpaceSize() { return max_semispace_size_; }
597  int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
598  int InitialSemiSpaceSize() { return initial_semispace_size_; }
599  intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
600  intptr_t MaxExecutableSize() { return max_executable_size_; }
601 
602  // Returns the capacity of the heap in bytes w/o growing. Heap grows when
603  // more spaces are needed until it reaches the limit.
604  intptr_t Capacity();
605 
606  // Returns the amount of memory currently committed for the heap.
607  intptr_t CommittedMemory();
608 
609  // Returns the amount of executable memory currently committed for the heap.
610  intptr_t CommittedMemoryExecutable();
611 
612  // Returns the amount of phyical memory currently committed for the heap.
613  size_t CommittedPhysicalMemory();
614 
615  // Returns the maximum amount of memory ever committed for the heap.
616  intptr_t MaximumCommittedMemory() { return maximum_committed_; }
617 
618  // Updates the maximum committed memory for the heap. Should be called
619  // whenever a space grows.
620  void UpdateMaximumCommitted();
621 
622  // Returns the available bytes in space w/o growing.
623  // Heap doesn't guarantee that it can allocate an object that requires
624  // all available bytes. Check MaxHeapObjectSize() instead.
625  intptr_t Available();
626 
627  // Returns of size of all objects residing in the heap.
628  intptr_t SizeOfObjects();
629 
630  // Return the starting address and a mask for the new space. And-masking an
631  // address with the mask will result in the start address of the new space
632  // for all addresses in either semispace.
633  Address NewSpaceStart() { return new_space_.start(); }
634  uintptr_t NewSpaceMask() { return new_space_.mask(); }
635  Address NewSpaceTop() { return new_space_.top(); }
636 
637  NewSpace* new_space() { return &new_space_; }
638  OldSpace* old_pointer_space() { return old_pointer_space_; }
639  OldSpace* old_data_space() { return old_data_space_; }
640  OldSpace* code_space() { return code_space_; }
641  MapSpace* map_space() { return map_space_; }
642  CellSpace* cell_space() { return cell_space_; }
644  return property_cell_space_;
645  }
646  LargeObjectSpace* lo_space() { return lo_space_; }
648  switch (idx) {
649  case OLD_POINTER_SPACE:
650  return old_pointer_space();
651  case OLD_DATA_SPACE:
652  return old_data_space();
653  case MAP_SPACE:
654  return map_space();
655  case CELL_SPACE:
656  return cell_space();
657  case PROPERTY_CELL_SPACE:
658  return property_cell_space();
659  case CODE_SPACE:
660  return code_space();
661  case NEW_SPACE:
662  case LO_SPACE:
663  UNREACHABLE();
664  }
665  return NULL;
666  }
667 
668  bool always_allocate() { return always_allocate_scope_depth_ != 0; }
670  return reinterpret_cast<Address>(&always_allocate_scope_depth_);
671  }
673  return linear_allocation_scope_depth_ != 0;
674  }
675 
677  return new_space_.allocation_top_address();
678  }
680  return new_space_.allocation_limit_address();
681  }
682 
684  return old_pointer_space_->allocation_top_address();
685  }
687  return old_pointer_space_->allocation_limit_address();
688  }
689 
691  return old_data_space_->allocation_top_address();
692  }
694  return old_data_space_->allocation_limit_address();
695  }
696 
697  // Allocates and initializes a new JavaScript object based on a
698  // constructor.
699  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
700  // failed.
701  // If allocation_site is non-null, then a memento is emitted after the object
702  // that points to the site.
703  // Please note this does not perform a garbage collection.
704  MUST_USE_RESULT MaybeObject* AllocateJSObject(
705  JSFunction* constructor,
706  PretenureFlag pretenure = NOT_TENURED,
707  AllocationSite* allocation_site = NULL);
708 
709  MUST_USE_RESULT MaybeObject* AllocateJSModule(Context* context,
710  ScopeInfo* scope_info);
711 
712  // Allocate a JSArray with no elements
714  ElementsKind elements_kind,
715  PretenureFlag pretenure = NOT_TENURED) {
716  return AllocateJSArrayAndStorage(elements_kind, 0, 0,
718  pretenure);
719  }
720 
721  // Allocate a JSArray with a specified length but elements that are left
722  // uninitialized.
724  ElementsKind elements_kind,
725  int length,
726  int capacity,
728  PretenureFlag pretenure = NOT_TENURED);
729 
731  JSArray* array,
732  int length,
733  int capacity,
735 
736  // Allocate a JSArray with no elements
738  FixedArrayBase* array_base,
739  ElementsKind elements_kind,
740  int length,
741  PretenureFlag pretenure = NOT_TENURED);
742 
743  // Returns a deep copy of the JavaScript object.
744  // Properties and elements are copied too.
745  // Returns failure if allocation failed.
746  // Optionally takes an AllocationSite to be appended in an AllocationMemento.
747  MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source,
748  AllocationSite* site = NULL);
749 
750  // Allocates a JS ArrayBuffer object.
751  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
752  // failed.
753  // Please note this does not perform a garbage collection.
754  MUST_USE_RESULT MaybeObject* AllocateJSArrayBuffer();
755 
756  // Allocates a Harmony proxy or function proxy.
757  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
758  // failed.
759  // Please note this does not perform a garbage collection.
760  MUST_USE_RESULT MaybeObject* AllocateJSProxy(Object* handler,
761  Object* prototype);
762 
763  MUST_USE_RESULT MaybeObject* AllocateJSFunctionProxy(Object* handler,
764  Object* call_trap,
765  Object* construct_trap,
766  Object* prototype);
767 
768  // Reinitialize a JSReceiver into an (empty) JS object of respective type and
769  // size, but keeping the original prototype. The receiver must have at least
770  // the size of the new object. The object is reinitialized and behaves as an
771  // object that has been freshly allocated.
772  // Returns failure if an error occured, otherwise object.
774  InstanceType type,
775  int size);
776 
777  // Reinitialize an JSGlobalProxy based on a constructor. The object
778  // must have the same size as objects allocated using the
779  // constructor. The object is reinitialized and behaves as an
780  // object that has been freshly allocated using the constructor.
782  JSFunction* constructor, JSGlobalProxy* global);
783 
784  // Allocates and initializes a new JavaScript object based on a map.
785  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
786  // failed.
787  // Passing an allocation site means that a memento will be created that
788  // points to the site.
789  // Please note this does not perform a garbage collection.
791  Map* map,
792  PretenureFlag pretenure = NOT_TENURED,
793  bool alloc_props = true,
794  AllocationSite* allocation_site = NULL);
795 
796  // Allocates a heap object based on the map.
797  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
798  // failed.
799  // Please note this function does not perform a garbage collection.
800  MUST_USE_RESULT MaybeObject* Allocate(Map* map, AllocationSpace space,
801  AllocationSite* allocation_site = NULL);
802 
803  // Allocates a JS Map in the heap.
804  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
805  // failed.
806  // Please note this function does not perform a garbage collection.
807  MUST_USE_RESULT MaybeObject* AllocateMap(
808  InstanceType instance_type,
809  int instance_size,
810  ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
811 
812  // Allocates a partial map for bootstrapping.
813  MUST_USE_RESULT MaybeObject* AllocatePartialMap(InstanceType instance_type,
814  int instance_size);
815 
816  // Allocates an empty code cache.
817  MUST_USE_RESULT MaybeObject* AllocateCodeCache();
818 
819  // Allocates a serialized scope info.
820  MUST_USE_RESULT MaybeObject* AllocateScopeInfo(int length);
821 
822  // Allocates an External object for v8's external API.
823  MUST_USE_RESULT MaybeObject* AllocateExternal(void* value);
824 
825  // Allocates an empty PolymorphicCodeCache.
827 
828  // Allocates a pre-tenured empty AccessorPair.
829  MUST_USE_RESULT MaybeObject* AllocateAccessorPair();
830 
831  // Allocates an empty TypeFeedbackInfo.
833 
834  // Allocates an AliasedArgumentsEntry.
835  MUST_USE_RESULT MaybeObject* AllocateAliasedArgumentsEntry(int slot);
836 
837  // Clear the Instanceof cache (used when a prototype changes).
838  inline void ClearInstanceofCache();
839 
840  // Iterates the whole code space to clear all ICs of the given kind.
841  void ClearAllICsByKind(Code::Kind kind);
842 
843  // For use during bootup.
845 
846  // Allocates and fully initializes a String. There are two String
847  // encodings: ASCII and two byte. One should choose between the three string
848  // allocation functions based on the encoding of the string buffer used to
849  // initialized the string.
850  // - ...FromAscii initializes the string from a buffer that is ASCII
851  // encoded (it does not check that the buffer is ASCII encoded) and the
852  // result will be ASCII encoded.
853  // - ...FromUTF8 initializes the string from a buffer that is UTF-8
854  // encoded. If the characters are all single-byte characters, the
855  // result will be ASCII encoded, otherwise it will converted to two
856  // byte.
857  // - ...FromTwoByte initializes the string from a buffer that is two-byte
858  // encoded. If the characters are all single-byte characters, the
859  // result will be converted to ASCII, otherwise it will be left as
860  // two-byte.
861  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
862  // failed.
863  // Please note this does not perform a garbage collection.
866  PretenureFlag pretenure = NOT_TENURED);
867  // TODO(dcarney): remove this function.
869  Vector<const char> str,
870  PretenureFlag pretenure = NOT_TENURED) {
872  pretenure);
873  }
874  MUST_USE_RESULT inline MaybeObject* AllocateStringFromUtf8(
875  Vector<const char> str,
876  PretenureFlag pretenure = NOT_TENURED);
878  Vector<const char> str,
879  int non_ascii_start,
880  PretenureFlag pretenure = NOT_TENURED);
882  Vector<const uc16> str,
883  PretenureFlag pretenure = NOT_TENURED);
884 
885  // Allocates an internalized string in old space based on the character
886  // stream. Returns Failure::RetryAfterGC(requested_bytes, space) if the
887  // allocation failed.
888  // Please note this function does not perform a garbage collection.
890  Vector<const char> str,
891  int chars,
892  uint32_t hash_field);
893 
896  uint32_t hash_field);
897 
899  Vector<const uc16> str,
900  uint32_t hash_field);
901 
902  template<typename T>
903  static inline bool IsOneByte(T t, int chars);
904 
905  template<typename T>
907  T t, int chars, uint32_t hash_field);
908 
909  template<bool is_one_byte, typename T>
911  T t, int chars, uint32_t hash_field);
912 
913  // Allocates and partially initializes a String. There are two String
914  // encodings: ASCII and two byte. These functions allocate a string of the
915  // given length and set its map and length fields. The characters of the
916  // string are uninitialized.
917  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
918  // failed.
919  // Please note this does not perform a garbage collection.
921  int length,
922  PretenureFlag pretenure = NOT_TENURED);
924  int length,
925  PretenureFlag pretenure = NOT_TENURED);
926 
927  // Computes a single character string where the character has code.
928  // A cache is used for ASCII codes.
929  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
930  // failed. Please note this does not perform a garbage collection.
932  uint16_t code);
933 
934  // Allocate a byte array of the specified length
935  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
936  // failed.
937  // Please note this does not perform a garbage collection.
938  MUST_USE_RESULT MaybeObject* AllocateByteArray(
939  int length,
940  PretenureFlag pretenure = NOT_TENURED);
941 
942  // Allocates an external array of the specified length and type.
943  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
944  // failed.
945  // Please note this does not perform a garbage collection.
947  int length,
948  ExternalArrayType array_type,
949  void* external_pointer,
950  PretenureFlag pretenure);
951 
952  // Allocates a fixed typed array of the specified length and type.
953  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
954  // failed.
955  // Please note this does not perform a garbage collection.
957  int length,
958  ExternalArrayType array_type,
959  PretenureFlag pretenure);
960 
961  // Allocate a symbol in old space.
962  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
963  // failed.
964  // Please note this does not perform a garbage collection.
965  MUST_USE_RESULT MaybeObject* AllocateSymbol();
966  MUST_USE_RESULT MaybeObject* AllocatePrivateSymbol();
967 
968  // Allocate a tenured AllocationSite. It's payload is null
970 
971  // Allocates a fixed array initialized with undefined values
972  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
973  // failed.
974  // Please note this does not perform a garbage collection.
975  MUST_USE_RESULT MaybeObject* AllocateFixedArray(
976  int length,
977  PretenureFlag pretenure = NOT_TENURED);
978 
979  // Allocates an uninitialized fixed array. It must be filled by the caller.
980  //
981  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
982  // failed.
983  // Please note this does not perform a garbage collection.
984  MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedArray(int length);
985 
986  // Move len elements within a given array from src_index index to dst_index
987  // index.
988  void MoveElements(FixedArray* array, int dst_index, int src_index, int len);
989 
990  // Make a copy of src and return it. Returns
991  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
992  MUST_USE_RESULT inline MaybeObject* CopyFixedArray(FixedArray* src);
993 
994  // Make a copy of src and return it. Returns
995  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
997 
998  // Make a copy of src, set the map, and return the copy. Returns
999  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1001 
1002  // Make a copy of src and return it. Returns
1003  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1004  MUST_USE_RESULT inline MaybeObject* CopyFixedDoubleArray(
1005  FixedDoubleArray* src);
1006 
1007  // Make a copy of src, set the map, and return the copy. Returns
1008  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1010  FixedDoubleArray* src, Map* map);
1011 
1012  // Make a copy of src and return it. Returns
1013  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1014  MUST_USE_RESULT inline MaybeObject* CopyConstantPoolArray(
1015  ConstantPoolArray* src);
1016 
1017  // Make a copy of src, set the map, and return the copy. Returns
1018  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1020  ConstantPoolArray* src, Map* map);
1021 
1022  // Allocates a fixed array initialized with the hole values.
1023  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1024  // failed.
1025  // Please note this does not perform a garbage collection.
1027  int length,
1028  PretenureFlag pretenure = NOT_TENURED);
1029 
1031  int number_of_int64_entries,
1032  int number_of_code_ptr_entries,
1033  int number_of_heap_ptr_entries,
1034  int number_of_int32_entries);
1035 
1036  // Allocates a fixed double array with uninitialized values. Returns
1037  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1038  // Please note this does not perform a garbage collection.
1040  int length,
1041  PretenureFlag pretenure = NOT_TENURED);
1042 
1043  // Allocates a fixed double array with hole values. Returns
1044  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1045  // Please note this does not perform a garbage collection.
1047  int length,
1048  PretenureFlag pretenure = NOT_TENURED);
1049 
1050  // AllocateHashTable is identical to AllocateFixedArray except
1051  // that the resulting object has hash_table_map as map.
1052  MUST_USE_RESULT MaybeObject* AllocateHashTable(
1053  int length, PretenureFlag pretenure = NOT_TENURED);
1054 
1055  // Allocate a native (but otherwise uninitialized) context.
1056  MUST_USE_RESULT MaybeObject* AllocateNativeContext();
1057 
1058  // Allocate a global context.
1059  MUST_USE_RESULT MaybeObject* AllocateGlobalContext(JSFunction* function,
1060  ScopeInfo* scope_info);
1061 
1062  // Allocate a module context.
1063  MUST_USE_RESULT MaybeObject* AllocateModuleContext(ScopeInfo* scope_info);
1064 
1065  // Allocate a function context.
1066  MUST_USE_RESULT MaybeObject* AllocateFunctionContext(int length,
1067  JSFunction* function);
1068 
1069  // Allocate a catch context.
1070  MUST_USE_RESULT MaybeObject* AllocateCatchContext(JSFunction* function,
1071  Context* previous,
1072  String* name,
1073  Object* thrown_object);
1074  // Allocate a 'with' context.
1075  MUST_USE_RESULT MaybeObject* AllocateWithContext(JSFunction* function,
1076  Context* previous,
1077  JSReceiver* extension);
1078 
1079  // Allocate a block context.
1080  MUST_USE_RESULT MaybeObject* AllocateBlockContext(JSFunction* function,
1081  Context* previous,
1082  ScopeInfo* info);
1083 
1084  // Allocates a new utility object in the old generation.
1085  MUST_USE_RESULT MaybeObject* AllocateStruct(InstanceType type);
1086 
1087  // Allocates a function initialized with a shared part.
1088  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1089  // failed.
1090  // Please note this does not perform a garbage collection.
1091  MUST_USE_RESULT MaybeObject* AllocateFunction(
1092  Map* function_map,
1093  SharedFunctionInfo* shared,
1094  Object* prototype,
1095  PretenureFlag pretenure = TENURED);
1096 
1097  // Sloppy mode arguments object size.
1098  static const int kSloppyArgumentsObjectSize =
1100  // Strict mode arguments has no callee so it is smaller.
1101  static const int kStrictArgumentsObjectSize =
1103  // Indicies for direct access into argument objects.
1104  static const int kArgumentsLengthIndex = 0;
1105  // callee is only valid in sloppy mode.
1106  static const int kArgumentsCalleeIndex = 1;
1107 
1108  // Allocates an arguments object - optionally with an elements array.
1109  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1110  // failed.
1111  // Please note this does not perform a garbage collection.
1113  Object* callee, int length);
1114 
1115  // Same as NewNumberFromDouble, but may return a preallocated/immutable
1116  // number object (e.g., minus_zero_value_, nan_value_)
1117  MUST_USE_RESULT MaybeObject* NumberFromDouble(
1118  double value, PretenureFlag pretenure = NOT_TENURED);
1119 
1120  // Allocated a HeapNumber from value.
1121  MUST_USE_RESULT MaybeObject* AllocateHeapNumber(
1122  double value, PretenureFlag pretenure = NOT_TENURED);
1123 
1124  // Converts an int into either a Smi or a HeapNumber object.
1125  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1126  // failed.
1127  // Please note this does not perform a garbage collection.
1128  MUST_USE_RESULT inline MaybeObject* NumberFromInt32(
1129  int32_t value, PretenureFlag pretenure = NOT_TENURED);
1130 
1131  // Converts an int into either a Smi or a HeapNumber object.
1132  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1133  // failed.
1134  // Please note this does not perform a garbage collection.
1135  MUST_USE_RESULT inline MaybeObject* NumberFromUint32(
1136  uint32_t value, PretenureFlag pretenure = NOT_TENURED);
1137 
1138  // Allocates a new foreign object.
1139  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1140  // failed.
1141  // Please note this does not perform a garbage collection.
1142  MUST_USE_RESULT MaybeObject* AllocateForeign(
1143  Address address, PretenureFlag pretenure = NOT_TENURED);
1144 
1145  // Allocates a new SharedFunctionInfo object.
1146  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1147  // failed.
1148  // Please note this does not perform a garbage collection.
1150 
1151  // Allocates a new JSMessageObject object.
1152  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1153  // failed.
1154  // Please note that this does not perform a garbage collection.
1156  String* type,
1157  JSArray* arguments,
1158  int start_position,
1159  int end_position,
1160  Object* script,
1161  Object* stack_frames);
1162 
1163  // Allocate a new external string object, which is backed by a string
1164  // resource that resides outside the V8 heap.
1165  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1166  // failed.
1167  // Please note this does not perform a garbage collection.
1169  const ExternalAsciiString::Resource* resource);
1171  const ExternalTwoByteString::Resource* resource);
1172 
1173  // Finalizes an external string by deleting the associated external
1174  // data and clearing the resource pointer.
1175  inline void FinalizeExternalString(String* string);
1176 
1177  // Allocates an uninitialized object. The memory is non-executable if the
1178  // hardware and OS allow.
1179  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1180  // failed.
1181  // Please note this function does not perform a garbage collection.
1182  MUST_USE_RESULT inline MaybeObject* AllocateRaw(int size_in_bytes,
1183  AllocationSpace space,
1184  AllocationSpace retry_space);
1185 
1186  // Initialize a filler object to keep the ability to iterate over the heap
1187  // when shortening objects.
1188  void CreateFillerObjectAt(Address addr, int size);
1189 
1190  bool CanMoveObjectStart(HeapObject* object);
1191 
1193 
1194  // Maintain marking consistency for IncrementalMarking.
1195  void AdjustLiveBytes(Address address, int by, InvocationMode mode);
1196 
1197  // Makes a new native code object
1198  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1199  // failed. On success, the pointer to the Code object is stored in the
1200  // self_reference. This allows generated code to reference its own Code
1201  // object by containing this pointer.
1202  // Please note this function does not perform a garbage collection.
1203  MUST_USE_RESULT MaybeObject* CreateCode(
1204  const CodeDesc& desc,
1206  Handle<Object> self_reference,
1207  bool immovable = false,
1208  bool crankshafted = false,
1209  int prologue_offset = Code::kPrologueOffsetNotSet);
1210 
1211  MUST_USE_RESULT MaybeObject* CopyCode(Code* code);
1212 
1213  // Copy the code and scope info part of the code object, but insert
1214  // the provided data as the relocation information.
1215  MUST_USE_RESULT MaybeObject* CopyCode(Code* code, Vector<byte> reloc_info);
1216 
1217  // Finds the internalized copy for string in the string table.
1218  // If not found, a new string is added to the table and returned.
1219  // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
1220  // failed.
1221  // Please note this function does not perform a garbage collection.
1222  MUST_USE_RESULT MaybeObject* InternalizeUtf8String(const char* str) {
1223  return InternalizeUtf8String(CStrVector(str));
1224  }
1226 
1227  MUST_USE_RESULT MaybeObject* InternalizeString(String* str);
1229 
1230  bool InternalizeStringIfExists(String* str, String** result);
1231  bool InternalizeTwoCharsStringIfExists(String* str, String** result);
1232 
1233  // Compute the matching internalized string map for a string if possible.
1234  // NULL is returned if string is in new space or not flattened.
1236 
1237  // Tries to flatten a string before compare operation.
1238  //
1239  // Returns a failure in case it was decided that flattening was
1240  // necessary and failed. Note, if flattening is not necessary the
1241  // string might stay non-flat even when not a failure is returned.
1242  //
1243  // Please note this function does not perform a garbage collection.
1244  MUST_USE_RESULT inline MaybeObject* PrepareForCompare(String* str);
1245 
1246  // Converts the given boolean condition to JavaScript boolean value.
1247  inline Object* ToBoolean(bool condition);
1248 
1249  // Performs garbage collection operation.
1250  // Returns whether there is a chance that another major GC could
1251  // collect more garbage.
1252  inline bool CollectGarbage(
1253  AllocationSpace space,
1254  const char* gc_reason = NULL,
1255  const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1256 
1257  static const int kNoGCFlags = 0;
1258  static const int kSweepPreciselyMask = 1;
1259  static const int kReduceMemoryFootprintMask = 2;
1260  static const int kAbortIncrementalMarkingMask = 4;
1261 
1262  // Making the heap iterable requires us to sweep precisely and abort any
1263  // incremental marking as well.
1264  static const int kMakeHeapIterableMask =
1266 
1267  // Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is
1268  // non-zero, then the slower precise sweeper is used, which leaves the heap
1269  // in a state where we can iterate over the heap visiting all objects.
1270  void CollectAllGarbage(
1271  int flags,
1272  const char* gc_reason = NULL,
1273  const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1274 
1275  // Last hope GC, should try to squeeze as much as possible.
1276  void CollectAllAvailableGarbage(const char* gc_reason = NULL);
1277 
1278  // Check whether the heap is currently iterable.
1279  bool IsHeapIterable();
1280 
1281  // Ensure that we have swept all spaces in such a way that we can iterate
1282  // over all objects. May cause a GC.
1283  void EnsureHeapIsIterable();
1284 
1285  // Notify the heap that a context has been disposed.
1286  int NotifyContextDisposed();
1287 
1289  scan_on_scavenge_pages_++;
1290  if (FLAG_gc_verbose) {
1291  PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
1292  }
1293  }
1294 
1296  scan_on_scavenge_pages_--;
1297  if (FLAG_gc_verbose) {
1298  PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
1299  }
1300  }
1301 
1302  PromotionQueue* promotion_queue() { return &promotion_queue_; }
1303 
1304 #ifdef DEBUG
1305  // Utility used with flag gc-greedy.
1306  void GarbageCollectionGreedyCheck();
1307 #endif
1308 
1310  GCType gc_type_filter,
1311  bool pass_isolate = true);
1313 
1315  GCType gc_type_filter,
1316  bool pass_isolate = true);
1318 
1319  // Heap root getters. We have versions with and without type::cast() here.
1320  // You can't use type::cast during GC because the assert fails.
1321  // TODO(1490): Try removing the unchecked accessors, now that GC marking does
1322  // not corrupt the map.
1323 #define ROOT_ACCESSOR(type, name, camel_name) \
1324  type* name() { \
1325  return type::cast(roots_[k##camel_name##RootIndex]); \
1326  } \
1327  type* raw_unchecked_##name() { \
1328  return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
1329  }
1331 #undef ROOT_ACCESSOR
1332 
1333 // Utility type maps
1334 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
1335  Map* name##_map() { \
1336  return Map::cast(roots_[k##Name##MapRootIndex]); \
1337  }
1339 #undef STRUCT_MAP_ACCESSOR
1340 
1341 #define STRING_ACCESSOR(name, str) String* name() { \
1342  return String::cast(roots_[k##name##RootIndex]); \
1343  }
1345 #undef STRING_ACCESSOR
1346 
1347  // The hidden_string is special because it is the empty string, but does
1348  // not match the empty string.
1349  String* hidden_string() { return hidden_string_; }
1350 
1352  native_contexts_list_ = object;
1353  }
1354  Object* native_contexts_list() { return native_contexts_list_; }
1355 
1357  array_buffers_list_ = object;
1358  }
1359  Object* array_buffers_list() { return array_buffers_list_; }
1360 
1362  allocation_sites_list_ = object;
1363  }
1364  Object* allocation_sites_list() { return allocation_sites_list_; }
1365  Object** allocation_sites_list_address() { return &allocation_sites_list_; }
1366 
1367  Object* weak_object_to_code_table() { return weak_object_to_code_table_; }
1368 
1369  // Number of mark-sweeps.
1370  unsigned int ms_count() { return ms_count_; }
1371 
1372  // Iterates over all roots in the heap.
1373  void IterateRoots(ObjectVisitor* v, VisitMode mode);
1374  // Iterates over all strong roots in the heap.
1375  void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
1376  // Iterates over entries in the smi roots list. Only interesting to the
1377  // serializer/deserializer, since GC does not care about smis.
1378  void IterateSmiRoots(ObjectVisitor* v);
1379  // Iterates over all the other roots in the heap.
1380  void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
1381 
1382  // Iterate pointers to from semispace of new space found in memory interval
1383  // from start to end.
1385  Address end,
1386  ObjectSlotCallback callback);
1387 
1388  // Returns whether the object resides in new space.
1389  inline bool InNewSpace(Object* object);
1390  inline bool InNewSpace(Address address);
1391  inline bool InNewSpacePage(Address address);
1392  inline bool InFromSpace(Object* object);
1393  inline bool InToSpace(Object* object);
1394 
1395  // Returns whether the object resides in old pointer space.
1396  inline bool InOldPointerSpace(Address address);
1397  inline bool InOldPointerSpace(Object* object);
1398 
1399  // Returns whether the object resides in old data space.
1400  inline bool InOldDataSpace(Address address);
1401  inline bool InOldDataSpace(Object* object);
1402 
1403  // Checks whether an address/object in the heap (including auxiliary
1404  // area and unused area).
1405  bool Contains(Address addr);
1406  bool Contains(HeapObject* value);
1407 
1408  // Checks whether an address/object in a space.
1409  // Currently used by tests, serialization and heap verification only.
1410  bool InSpace(Address addr, AllocationSpace space);
1411  bool InSpace(HeapObject* value, AllocationSpace space);
1412 
1413  // Finds out which space an object should get promoted to based on its type.
1414  inline OldSpace* TargetSpace(HeapObject* object);
1415  static inline AllocationSpace TargetSpaceId(InstanceType type);
1416 
1417  // Checks whether the given object is allowed to be migrated from it's
1418  // current space into the given destination space. Used for debugging.
1419  inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
1420 
1421  // Sets the stub_cache_ (only used when expanding the dictionary).
1423  roots_[kCodeStubsRootIndex] = value;
1424  }
1425 
1426  // Support for computing object sizes for old objects during GCs. Returns
1427  // a function that is guaranteed to be safe for computing object sizes in
1428  // the current GC phase.
1430  return gc_safe_size_of_old_object_;
1431  }
1432 
1433  // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
1435  roots_[kNonMonomorphicCacheRootIndex] = value;
1436  }
1437 
1439  roots_[kEmptyScriptRootIndex] = script;
1440  }
1441 
1443  roots_[kStoreBufferTopRootIndex] = reinterpret_cast<Smi*>(top);
1444  }
1445 
1447  roots_[kMaterializedObjectsRootIndex] = objects;
1448  }
1449 
1450  // Generated code can embed this address to get access to the roots.
1451  Object** roots_array_start() { return roots_; }
1452 
1454  return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
1455  }
1456 
1457  // Get address of native contexts list for serialization support.
1459  return &native_contexts_list_;
1460  }
1461 
1462 #ifdef VERIFY_HEAP
1463  // Verify the heap is in its normal state before or after a GC.
1464  void Verify();
1465 
1466 
1467  bool weak_embedded_objects_verification_enabled() {
1468  return no_weak_object_verification_scope_depth_ == 0;
1469  }
1470 #endif
1471 
1472 #ifdef DEBUG
1473  void Print();
1474  void PrintHandles();
1475 
1476  void OldPointerSpaceCheckStoreBuffer();
1477  void MapSpaceCheckStoreBuffer();
1478  void LargeObjectSpaceCheckStoreBuffer();
1479 
1480  // Report heap statistics.
1481  void ReportHeapStatistics(const char* title);
1482  void ReportCodeStatistics(const char* title);
1483 #endif
1484 
1485  // Zapping is needed for verify heap, and always done in debug builds.
1486  static inline bool ShouldZapGarbage() {
1487 #ifdef DEBUG
1488  return true;
1489 #else
1490 #ifdef VERIFY_HEAP
1491  return FLAG_verify_heap;
1492 #else
1493  return false;
1494 #endif
1495 #endif
1496  }
1497 
1498  // Print short heap statistics.
1499  void PrintShortHeapStatistics();
1500 
1501  // Write barrier support for address[offset] = o.
1502  INLINE(void RecordWrite(Address address, int offset));
1503 
1504  // Write barrier support for address[start : start + len[ = o.
1505  INLINE(void RecordWrites(Address address, int start, int len));
1506 
1508  inline HeapState gc_state() { return gc_state_; }
1509 
1510  inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
1511 
1512 #ifdef DEBUG
1513  void set_allocation_timeout(int timeout) {
1514  allocation_timeout_ = timeout;
1515  }
1516 
1517  void TracePathToObjectFrom(Object* target, Object* root);
1518  void TracePathToObject(Object* target);
1519  void TracePathToGlobal();
1520 #endif
1521 
1522  // Callback function passed to Heap::Iterate etc. Copies an object if
1523  // necessary, the object might be promoted to an old space. The caller must
1524  // ensure the precondition that the object is (a) a heap object and (b) in
1525  // the heap's from space.
1526  static inline void ScavengePointer(HeapObject** p);
1527  static inline void ScavengeObject(HeapObject** p, HeapObject* object);
1528 
1532  };
1533 
1534  // An object may have an AllocationSite associated with it through a trailing
1535  // AllocationMemento. Its feedback should be updated when objects are found
1536  // in the heap.
1537  static inline void UpdateAllocationSiteFeedback(
1539 
1540  // Support for partial snapshots. After calling this we have a linear
1541  // space to write objects in each space.
1542  void ReserveSpace(int *sizes, Address* addresses);
1543 
1544  //
1545  // Support for the API.
1546  //
1547 
1548  bool CreateApiObjects();
1549 
1550  // Attempt to find the number in a small cache. If we finds it, return
1551  // the string representation of the number. Otherwise return undefined.
1553 
1554  // Update the cache with a new number-string pair.
1555  void SetNumberStringCache(Object* number, String* str);
1556 
1557  // Adjusts the amount of registered external memory.
1558  // Returns the adjusted value.
1560  int64_t change_in_bytes);
1561 
1562  // This is only needed for testing high promotion mode.
1564  new_space_high_promotion_mode_active_ = mode;
1565  }
1566 
1567  // Returns the allocation mode (pre-tenuring) based on observed promotion
1568  // rates of previous collections.
1570  return FLAG_pretenuring && new_space_high_promotion_mode_active_
1571  ? TENURED : NOT_TENURED;
1572  }
1573 
1575  return reinterpret_cast<Address*>(&new_space_high_promotion_mode_active_);
1576  }
1577 
1578  inline intptr_t PromotedTotalSize() {
1579  int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
1580  if (total > kMaxInt) return static_cast<intptr_t>(kMaxInt);
1581  if (total < 0) return 0;
1582  return static_cast<intptr_t>(total);
1583  }
1584 
1585  inline intptr_t OldGenerationSpaceAvailable() {
1586  return old_generation_allocation_limit_ - PromotedTotalSize();
1587  }
1588 
1590  return max_old_generation_size_ - PromotedTotalSize();
1591  }
1592 
1594  8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
1595 
1596  intptr_t OldGenerationAllocationLimit(intptr_t old_gen_size) {
1597  const int divisor = FLAG_stress_compaction ? 10 : 1;
1598  intptr_t limit =
1599  Max(old_gen_size + old_gen_size / divisor,
1601  limit += new_space_.Capacity();
1602  intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2;
1603  return Min(limit, halfway_to_the_max);
1604  }
1605 
1606  // Indicates whether inline bump-pointer allocation has been disabled.
1607  bool inline_allocation_disabled() { return inline_allocation_disabled_; }
1608 
1609  // Switch whether inline bump-pointer allocation should be used.
1610  void EnableInlineAllocation();
1611  void DisableInlineAllocation();
1612 
1613  // Implements the corresponding V8 API function.
1614  bool IdleNotification(int hint);
1615 
1616  // Declare all the root indices. This defines the root list order.
1618 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1620 #undef ROOT_INDEX_DECLARATION
1621 
1622 #define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex,
1624 #undef STRING_DECLARATION
1625 
1626  // Utility type maps
1627 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
1629 #undef DECLARE_STRUCT_MAP
1630 
1632 
1633 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1635 #undef ROOT_INDEX_DECLARATION
1636 
1640  };
1641 
1642  STATIC_CHECK(kUndefinedValueRootIndex == Internals::kUndefinedValueRootIndex);
1643  STATIC_CHECK(kNullValueRootIndex == Internals::kNullValueRootIndex);
1644  STATIC_CHECK(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
1645  STATIC_CHECK(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
1646  STATIC_CHECK(kempty_stringRootIndex == Internals::kEmptyStringRootIndex);
1647 
1648  // Generated code can embed direct references to non-writable roots if
1649  // they are in new space.
1650  static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
1651  // Generated code can treat direct references to this root as constant.
1652  bool RootCanBeTreatedAsConstant(RootListIndex root_index);
1653 
1654  MUST_USE_RESULT MaybeObject* NumberToString(
1655  Object* number, bool check_number_string_cache = true);
1656  MUST_USE_RESULT MaybeObject* Uint32ToString(
1657  uint32_t value, bool check_number_string_cache = true);
1658 
1661  ExternalArrayType array_type);
1662 
1665  ExternalArrayType array_type);
1666 
1671 
1672  void RecordStats(HeapStats* stats, bool take_snapshot = false);
1673 
1674  // Copy block of memory from src to dst. Size of block should be aligned
1675  // by pointer size.
1676  static inline void CopyBlock(Address dst, Address src, int byte_size);
1677 
1678  // Optimized version of memmove for blocks with pointer size aligned sizes and
1679  // pointer size aligned addresses.
1680  static inline void MoveBlock(Address dst, Address src, int byte_size);
1681 
1682  // Check new space expansion criteria and expand semispaces if it was hit.
1684 
1685  inline void IncrementYoungSurvivorsCounter(int survived) {
1686  ASSERT(survived >= 0);
1687  young_survivors_after_last_gc_ = survived;
1688  survived_since_last_expansion_ += survived;
1689  }
1690 
1691  inline bool NextGCIsLikelyToBeFull() {
1692  if (FLAG_gc_global) return true;
1693 
1694  if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
1695 
1696  intptr_t adjusted_allocation_limit =
1697  old_generation_allocation_limit_ - new_space_.Capacity();
1698 
1699  if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
1700 
1701  return false;
1702  }
1703 
1705  ExternalStringTableUpdaterCallback updater_func);
1706 
1708  ExternalStringTableUpdaterCallback updater_func);
1709 
1710  void ProcessWeakReferences(WeakObjectRetainer* retainer);
1711 
1713 
1714  // Helper function that governs the promotion policy from new space to
1715  // old. If the object's old address lies below the new space's age
1716  // mark or if we've already filled the bottom 1/16th of the to space,
1717  // we try to promote this object.
1718  inline bool ShouldBePromoted(Address old_address, int object_size);
1719 
1721 
1722  void ClearNormalizedMapCaches();
1723 
1724  GCTracer* tracer() { return tracer_; }
1725 
1726  // Returns the size of objects residing in non new spaces.
1727  intptr_t PromotedSpaceSizeOfObjects();
1728 
1729  double total_regexp_code_generated() { return total_regexp_code_generated_; }
1731  total_regexp_code_generated_ += size;
1732  }
1733 
1734  void IncrementCodeGeneratedBytes(bool is_crankshafted, int size) {
1735  if (is_crankshafted) {
1736  crankshaft_codegen_bytes_generated_ += size;
1737  } else {
1738  full_codegen_bytes_generated_ += size;
1739  }
1740  }
1741 
1742  // Returns maximum GC pause.
1743  double get_max_gc_pause() { return max_gc_pause_; }
1744 
1745  // Returns maximum size of objects alive after GC.
1746  intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1747 
1748  // Returns minimal interval between two subsequent collections.
1749  double get_min_in_mutator() { return min_in_mutator_; }
1750 
1751  // TODO(hpayer): remove, should be handled by GCTracer
1753  marking_time_ += marking_time;
1754  }
1755 
1756  double marking_time() const {
1757  return marking_time_;
1758  }
1759 
1760  // TODO(hpayer): remove, should be handled by GCTracer
1762  sweeping_time_ += sweeping_time;
1763  }
1764 
1765  double sweeping_time() const {
1766  return sweeping_time_;
1767  }
1768 
1770  return &mark_compact_collector_;
1771  }
1772 
1774  return &store_buffer_;
1775  }
1776 
1778  return &marking_;
1779  }
1780 
1782  return &incremental_marking_;
1783  }
1784 
1789  }
1790 
1791  bool AdvanceSweepers(int step_size);
1792 
1793  bool EnsureSweepersProgressed(int step_size) {
1794  bool sweeping_complete = old_data_space()->EnsureSweeperProgress(step_size);
1795  sweeping_complete &= old_pointer_space()->EnsureSweeperProgress(step_size);
1796  return sweeping_complete;
1797  }
1798 
1800  return &external_string_table_;
1801  }
1802 
1803  // Returns the current sweep generation.
1805  return sweep_generation_;
1806  }
1807 
1808  inline Isolate* isolate();
1809 
1812 
1814 
1815  inline void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
1816  scavenging_visitors_table_.GetVisitor(map)(map, slot, obj);
1817  }
1818 
1819  void QueueMemoryChunkForFree(MemoryChunk* chunk);
1820  void FreeQueuedChunks();
1821 
1822  int gc_count() const { return gc_count_; }
1823 
1824  // Completely clear the Instanceof cache (to stop it keeping objects alive
1825  // around a GC).
1826  inline void CompletelyClearInstanceofCache();
1827 
1828  // The roots that have an index less than this are always in old space.
1829  static const int kOldSpaceRoots = 0x20;
1830 
1831  uint32_t HashSeed() {
1832  uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
1833  ASSERT(FLAG_randomize_hashes || seed == 0);
1834  return seed;
1835  }
1836 
1837  void SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
1838  ASSERT(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
1839  set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
1840  }
1841 
1842  void SetConstructStubDeoptPCOffset(int pc_offset) {
1843  ASSERT(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
1844  set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1845  }
1846 
1847  void SetGetterStubDeoptPCOffset(int pc_offset) {
1848  ASSERT(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
1849  set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1850  }
1851 
1852  void SetSetterStubDeoptPCOffset(int pc_offset) {
1853  ASSERT(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
1854  set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1855  }
1856 
1857  // For post mortem debugging.
1858  void RememberUnmappedPage(Address page, bool compacted);
1859 
1860  // Global inline caching age: it is incremented on some GCs after context
1861  // disposal. We use it to flush inline caches.
1863  return global_ic_age_;
1864  }
1865 
1867  global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
1868  }
1869 
1870  bool flush_monomorphic_ics() { return flush_monomorphic_ics_; }
1871 
1873  return amount_of_external_allocated_memory_;
1874  }
1875 
1877 
1878  // ObjectStats are kept in two arrays, counts and sizes. Related stats are
1879  // stored in a contiguous linear buffer. Stats groups are stored one after
1880  // another.
1881  enum {
1888  };
1889 
1890  void RecordObjectStats(InstanceType type, size_t size) {
1891  ASSERT(type <= LAST_TYPE);
1892  object_counts_[type]++;
1893  object_sizes_[type] += size;
1894  }
1895 
1896  void RecordCodeSubTypeStats(int code_sub_type, int code_age, size_t size) {
1897  int code_sub_type_index = FIRST_CODE_KIND_SUB_TYPE + code_sub_type;
1898  int code_age_index =
1900  ASSERT(code_sub_type_index >= FIRST_CODE_KIND_SUB_TYPE &&
1901  code_sub_type_index < FIRST_CODE_AGE_SUB_TYPE);
1902  ASSERT(code_age_index >= FIRST_CODE_AGE_SUB_TYPE &&
1903  code_age_index < OBJECT_STATS_COUNT);
1904  object_counts_[code_sub_type_index]++;
1905  object_sizes_[code_sub_type_index] += size;
1906  object_counts_[code_age_index]++;
1907  object_sizes_[code_age_index] += size;
1908  }
1909 
1910  void RecordFixedArraySubTypeStats(int array_sub_type, size_t size) {
1911  ASSERT(array_sub_type <= LAST_FIXED_ARRAY_SUB_TYPE);
1912  object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type]++;
1913  object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type] += size;
1914  }
1915 
1916  void CheckpointObjectStats();
1917 
1918  // We don't use a LockGuard here since we want to lock the heap
1919  // only when FLAG_concurrent_recompilation is true.
1921  public:
1922  explicit RelocationLock(Heap* heap) : heap_(heap) {
1923  heap_->relocation_mutex_.Lock();
1924  }
1925 
1926 
1928  heap_->relocation_mutex_.Unlock();
1929  }
1930 
1931  private:
1932  Heap* heap_;
1933  };
1934 
1936 
1938 
1940  set_weak_object_to_code_table(undefined_value());
1941  }
1942 
1944 
1945  static void FatalProcessOutOfMemory(const char* location,
1946  bool take_snapshot = false);
1947 
1948  private:
1949  Heap();
1950 
1951  // This can be calculated directly from a pointer to the heap; however, it is
1952  // more expedient to get at the isolate directly from within Heap methods.
1953  Isolate* isolate_;
1954 
1955  Object* roots_[kRootListLength];
1956 
1957  intptr_t code_range_size_;
1958  int reserved_semispace_size_;
1959  int max_semispace_size_;
1960  int initial_semispace_size_;
1961  intptr_t max_old_generation_size_;
1962  intptr_t max_executable_size_;
1963  intptr_t maximum_committed_;
1964 
1965  // For keeping track of how much data has survived
1966  // scavenge since last new space expansion.
1967  int survived_since_last_expansion_;
1968 
1969  // For keeping track on when to flush RegExp code.
1970  int sweep_generation_;
1971 
1972  int always_allocate_scope_depth_;
1973  int linear_allocation_scope_depth_;
1974 
1975  // For keeping track of context disposals.
1976  int contexts_disposed_;
1977 
1978  int global_ic_age_;
1979 
1980  bool flush_monomorphic_ics_;
1981 
1982  int scan_on_scavenge_pages_;
1983 
1984  NewSpace new_space_;
1985  OldSpace* old_pointer_space_;
1986  OldSpace* old_data_space_;
1987  OldSpace* code_space_;
1988  MapSpace* map_space_;
1989  CellSpace* cell_space_;
1990  PropertyCellSpace* property_cell_space_;
1991  LargeObjectSpace* lo_space_;
1992  HeapState gc_state_;
1993  int gc_post_processing_depth_;
1994 
1995  // Returns the amount of external memory registered since last global gc.
1996  int64_t PromotedExternalMemorySize();
1997 
1998  unsigned int ms_count_; // how many mark-sweep collections happened
1999  unsigned int gc_count_; // how many gc happened
2000 
2001  // For post mortem debugging.
2002  static const int kRememberedUnmappedPages = 128;
2003  int remembered_unmapped_pages_index_;
2004  Address remembered_unmapped_pages_[kRememberedUnmappedPages];
2005 
2006  // Total length of the strings we failed to flatten since the last GC.
2007  int unflattened_strings_length_;
2008 
2009 #define ROOT_ACCESSOR(type, name, camel_name) \
2010  inline void set_##name(type* value) { \
2011  /* The deserializer makes use of the fact that these common roots are */ \
2012  /* never in new space and never on a page that is being compacted. */ \
2013  ASSERT(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \
2014  roots_[k##camel_name##RootIndex] = value; \
2015  }
2017 #undef ROOT_ACCESSOR
2018 
2019 #ifdef DEBUG
2020  // If the --gc-interval flag is set to a positive value, this
2021  // variable holds the value indicating the number of allocations
2022  // remain until the next failure and garbage collection.
2023  int allocation_timeout_;
2024 #endif // DEBUG
2025 
2026  // Indicates that the new space should be kept small due to high promotion
2027  // rates caused by the mutator allocating a lot of long-lived objects.
2028  // TODO(hpayer): change to bool if no longer accessed from generated code
2029  intptr_t new_space_high_promotion_mode_active_;
2030 
2031  // Limit that triggers a global GC on the next (normally caused) GC. This
2032  // is checked when we have already decided to do a GC to help determine
2033  // which collector to invoke, before expanding a paged space in the old
2034  // generation and on every allocation in large object space.
2035  intptr_t old_generation_allocation_limit_;
2036 
2037  // Used to adjust the limits that control the timing of the next GC.
2038  intptr_t size_of_old_gen_at_last_old_space_gc_;
2039 
2040  // Limit on the amount of externally allocated memory allowed
2041  // between global GCs. If reached a global GC is forced.
2042  intptr_t external_allocation_limit_;
2043 
2044  // The amount of external memory registered through the API kept alive
2045  // by global handles
2046  int64_t amount_of_external_allocated_memory_;
2047 
2048  // Caches the amount of external memory registered at the last global gc.
2049  int64_t amount_of_external_allocated_memory_at_last_global_gc_;
2050 
2051  // Indicates that an allocation has failed in the old generation since the
2052  // last GC.
2053  bool old_gen_exhausted_;
2054 
2055  // Indicates that inline bump-pointer allocation has been globally disabled
2056  // for all spaces. This is used to disable allocations in generated code.
2057  bool inline_allocation_disabled_;
2058 
2059  // Weak list heads, threaded through the objects.
2060  // List heads are initilized lazily and contain the undefined_value at start.
2061  Object* native_contexts_list_;
2062  Object* array_buffers_list_;
2063  Object* allocation_sites_list_;
2064 
2065  // WeakHashTable that maps objects embedded in optimized code to dependent
2066  // code list. It is initilized lazily and contains the undefined_value at
2067  // start.
2068  Object* weak_object_to_code_table_;
2069 
2070  StoreBufferRebuilder store_buffer_rebuilder_;
2071 
2072  struct StringTypeTable {
2073  InstanceType type;
2074  int size;
2075  RootListIndex index;
2076  };
2077 
2078  struct ConstantStringTable {
2079  const char* contents;
2080  RootListIndex index;
2081  };
2082 
2083  struct StructTable {
2084  InstanceType type;
2085  int size;
2086  RootListIndex index;
2087  };
2088 
2089  static const StringTypeTable string_type_table[];
2090  static const ConstantStringTable constant_string_table[];
2091  static const StructTable struct_table[];
2092 
2093  // The special hidden string which is an empty string, but does not match
2094  // any string when looked up in properties.
2095  String* hidden_string_;
2096 
2097  // GC callback function, called before and after mark-compact GC.
2098  // Allocations in the callback function are disallowed.
2099  struct GCPrologueCallbackPair {
2100  GCPrologueCallbackPair(v8::Isolate::GCPrologueCallback callback,
2101  GCType gc_type,
2102  bool pass_isolate)
2103  : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {
2104  }
2105  bool operator==(const GCPrologueCallbackPair& pair) const {
2106  return pair.callback == callback;
2107  }
2109  GCType gc_type;
2110  // TODO(dcarney): remove variable
2111  bool pass_isolate_;
2112  };
2113  List<GCPrologueCallbackPair> gc_prologue_callbacks_;
2114 
2115  struct GCEpilogueCallbackPair {
2116  GCEpilogueCallbackPair(v8::Isolate::GCPrologueCallback callback,
2117  GCType gc_type,
2118  bool pass_isolate)
2119  : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {
2120  }
2121  bool operator==(const GCEpilogueCallbackPair& pair) const {
2122  return pair.callback == callback;
2123  }
2125  GCType gc_type;
2126  // TODO(dcarney): remove variable
2127  bool pass_isolate_;
2128  };
2129  List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
2130 
2131  // Support for computing object sizes during GC.
2132  HeapObjectCallback gc_safe_size_of_old_object_;
2133  static int GcSafeSizeOfOldObject(HeapObject* object);
2134 
2135  // Update the GC state. Called from the mark-compact collector.
2136  void MarkMapPointersAsEncoded(bool encoded) {
2137  ASSERT(!encoded);
2138  gc_safe_size_of_old_object_ = &GcSafeSizeOfOldObject;
2139  }
2140 
2141  // Code that should be run before and after each GC. Includes some
2142  // reporting/verification activities when compiled with DEBUG set.
2143  void GarbageCollectionPrologue();
2144  void GarbageCollectionEpilogue();
2145 
2146  // Pretenuring decisions are made based on feedback collected during new
2147  // space evacuation. Note that between feedback collection and calling this
2148  // method object in old space must not move.
2149  // Right now we only process pretenuring feedback in high promotion mode.
2150  void ProcessPretenuringFeedback();
2151 
2152  // Checks whether a global GC is necessary
2153  GarbageCollector SelectGarbageCollector(AllocationSpace space,
2154  const char** reason);
2155 
2156  // Make sure there is a filler value behind the top of the new space
2157  // so that the GC does not confuse some unintialized/stale memory
2158  // with the allocation memento of the object at the top
2159  void EnsureFillerObjectAtTop();
2160 
2161  // Performs garbage collection operation.
2162  // Returns whether there is a chance that another major GC could
2163  // collect more garbage.
2164  bool CollectGarbage(
2165  GarbageCollector collector,
2166  const char* gc_reason,
2167  const char* collector_reason,
2168  const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
2169 
2170  // Performs garbage collection
2171  // Returns whether there is a chance another major GC could
2172  // collect more garbage.
2173  bool PerformGarbageCollection(
2174  GarbageCollector collector,
2175  GCTracer* tracer,
2176  const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
2177 
2178  inline void UpdateOldSpaceLimits();
2179 
2180  // Selects the proper allocation space depending on the given object
2181  // size, pretenuring decision, and preferred old-space.
2182  static AllocationSpace SelectSpace(int object_size,
2183  AllocationSpace preferred_old_space,
2184  PretenureFlag pretenure) {
2185  ASSERT(preferred_old_space == OLD_POINTER_SPACE ||
2186  preferred_old_space == OLD_DATA_SPACE);
2187  if (object_size > Page::kMaxRegularHeapObjectSize) return LO_SPACE;
2188  return (pretenure == TENURED) ? preferred_old_space : NEW_SPACE;
2189  }
2190 
2191  // Allocate an uninitialized fixed array.
2192  MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(
2193  int length, PretenureFlag pretenure);
2194 
2195  // Allocate an uninitialized fixed double array.
2196  MUST_USE_RESULT MaybeObject* AllocateRawFixedDoubleArray(
2197  int length, PretenureFlag pretenure);
2198 
2199  // Allocate an initialized fixed array with the given filler value.
2200  MUST_USE_RESULT MaybeObject* AllocateFixedArrayWithFiller(
2201  int length, PretenureFlag pretenure, Object* filler);
2202 
2203  // Initializes a JSObject based on its map.
2204  void InitializeJSObjectFromMap(JSObject* obj,
2205  FixedArray* properties,
2206  Map* map);
2207  void InitializeAllocationMemento(AllocationMemento* memento,
2208  AllocationSite* allocation_site);
2209 
2210  bool CreateInitialMaps();
2211  bool CreateInitialObjects();
2212 
2213  // These five Create*EntryStub functions are here and forced to not be inlined
2214  // because of a gcc-4.4 bug that assigns wrong vtable entries.
2215  NO_INLINE(void CreateJSEntryStub());
2216  NO_INLINE(void CreateJSConstructEntryStub());
2217 
2218  void CreateFixedStubs();
2219 
2220  MUST_USE_RESULT MaybeObject* CreateOddball(const char* to_string,
2221  Object* to_number,
2222  byte kind);
2223 
2224  // Allocate a JSArray with no elements
2225  MUST_USE_RESULT MaybeObject* AllocateJSArray(
2226  ElementsKind elements_kind,
2227  PretenureFlag pretenure = NOT_TENURED);
2228 
2229  // Allocate empty fixed array.
2230  MUST_USE_RESULT MaybeObject* AllocateEmptyFixedArray();
2231 
2232  // Allocate empty external array of given type.
2233  MUST_USE_RESULT MaybeObject* AllocateEmptyExternalArray(
2234  ExternalArrayType array_type);
2235 
2236  // Allocate empty fixed typed array of given type.
2237  MUST_USE_RESULT MaybeObject* AllocateEmptyFixedTypedArray(
2238  ExternalArrayType array_type);
2239 
2240  // Allocate empty fixed double array.
2241  MUST_USE_RESULT MaybeObject* AllocateEmptyFixedDoubleArray();
2242 
2243  // Allocate empty constant pool array.
2244  MUST_USE_RESULT MaybeObject* AllocateEmptyConstantPoolArray();
2245 
2246  // Allocate a tenured simple cell.
2247  MUST_USE_RESULT MaybeObject* AllocateCell(Object* value);
2248 
2249  // Allocate a tenured JS global property cell initialized with the hole.
2250  MUST_USE_RESULT MaybeObject* AllocatePropertyCell();
2251 
2252  // Allocate Box.
2253  MUST_USE_RESULT MaybeObject* AllocateBox(Object* value,
2254  PretenureFlag pretenure);
2255 
2256  // Performs a minor collection in new generation.
2257  void Scavenge();
2258 
2259  // Commits from space if it is uncommitted.
2260  void EnsureFromSpaceIsCommitted();
2261 
2262  // Uncommit unused semi space.
2263  bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
2264 
2265  // Fill in bogus values in from space
2266  void ZapFromSpace();
2267 
2268  static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
2269  Heap* heap,
2270  Object** pointer);
2271 
2272  Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
2273  static void ScavengeStoreBufferCallback(Heap* heap,
2274  MemoryChunk* page,
2275  StoreBufferEvent event);
2276 
2277  // Performs a major collection in the whole heap.
2278  void MarkCompact(GCTracer* tracer);
2279 
2280  // Code to be run before and after mark-compact.
2281  void MarkCompactPrologue();
2282 
2283  void ProcessNativeContexts(WeakObjectRetainer* retainer, bool record_slots);
2284  void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool record_slots);
2285  void ProcessAllocationSites(WeakObjectRetainer* retainer, bool record_slots);
2286 
2287  // Deopts all code that contains allocation instruction which are tenured or
2288  // not tenured. Moreover it clears the pretenuring allocation site statistics.
2289  void ResetAllAllocationSitesDependentCode(PretenureFlag flag);
2290 
2291  // Evaluates local pretenuring for the old space and calls
2292  // ResetAllTenuredAllocationSitesDependentCode if too many objects died in
2293  // the old space.
2294  void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
2295 
2296  // Called on heap tear-down.
2297  void TearDownArrayBuffers();
2298 
2299  // Record statistics before and after garbage collection.
2300  void ReportStatisticsBeforeGC();
2301  void ReportStatisticsAfterGC();
2302 
2303  // Slow part of scavenge object.
2304  static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
2305 
2306  // Initializes a function with a shared part and prototype.
2307  // Note: this code was factored out of AllocateFunction such that
2308  // other parts of the VM could use it. Specifically, a function that creates
2309  // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
2310  // Please note this does not perform a garbage collection.
2311  inline void InitializeFunction(
2312  JSFunction* function,
2313  SharedFunctionInfo* shared,
2314  Object* prototype);
2315 
2316  // Total RegExp code ever generated
2317  double total_regexp_code_generated_;
2318 
2319  GCTracer* tracer_;
2320 
2321  // Allocates a small number to string cache.
2322  MUST_USE_RESULT MaybeObject* AllocateInitialNumberStringCache();
2323  // Creates and installs the full-sized number string cache.
2324  void AllocateFullSizeNumberStringCache();
2325  // Get the length of the number to string cache based on the max semispace
2326  // size.
2327  int FullSizeNumberStringCacheLength();
2328  // Flush the number to string cache.
2329  void FlushNumberStringCache();
2330 
2331  // Allocates a fixed-size allocation sites scratchpad.
2332  MUST_USE_RESULT MaybeObject* AllocateAllocationSitesScratchpad();
2333 
2334  // Sets used allocation sites entries to undefined.
2335  void FlushAllocationSitesScratchpad();
2336 
2337  // Initializes the allocation sites scratchpad with undefined values.
2338  void InitializeAllocationSitesScratchpad();
2339 
2340  // Adds an allocation site to the scratchpad if there is space left.
2341  void AddAllocationSiteToScratchpad(AllocationSite* site,
2343 
2344  void UpdateSurvivalRateTrend(int start_new_space_size);
2345 
2346  enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING };
2347 
2348  static const int kYoungSurvivalRateHighThreshold = 90;
2349  static const int kYoungSurvivalRateLowThreshold = 10;
2350  static const int kYoungSurvivalRateAllowedDeviation = 15;
2351 
2352  static const int kOldSurvivalRateLowThreshold = 20;
2353 
2354  int young_survivors_after_last_gc_;
2355  int high_survival_rate_period_length_;
2356  int low_survival_rate_period_length_;
2357  double survival_rate_;
2358  SurvivalRateTrend previous_survival_rate_trend_;
2359  SurvivalRateTrend survival_rate_trend_;
2360 
2361  void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) {
2362  ASSERT(survival_rate_trend != FLUCTUATING);
2363  previous_survival_rate_trend_ = survival_rate_trend_;
2364  survival_rate_trend_ = survival_rate_trend;
2365  }
2366 
2367  SurvivalRateTrend survival_rate_trend() {
2368  if (survival_rate_trend_ == STABLE) {
2369  return STABLE;
2370  } else if (previous_survival_rate_trend_ == STABLE) {
2371  return survival_rate_trend_;
2372  } else if (survival_rate_trend_ != previous_survival_rate_trend_) {
2373  return FLUCTUATING;
2374  } else {
2375  return survival_rate_trend_;
2376  }
2377  }
2378 
2379  bool IsStableOrIncreasingSurvivalTrend() {
2380  switch (survival_rate_trend()) {
2381  case STABLE:
2382  case INCREASING:
2383  return true;
2384  default:
2385  return false;
2386  }
2387  }
2388 
2389  bool IsStableOrDecreasingSurvivalTrend() {
2390  switch (survival_rate_trend()) {
2391  case STABLE:
2392  case DECREASING:
2393  return true;
2394  default:
2395  return false;
2396  }
2397  }
2398 
2399  bool IsIncreasingSurvivalTrend() {
2400  return survival_rate_trend() == INCREASING;
2401  }
2402 
2403  bool IsHighSurvivalRate() {
2404  return high_survival_rate_period_length_ > 0;
2405  }
2406 
2407  bool IsLowSurvivalRate() {
2408  return low_survival_rate_period_length_ > 0;
2409  }
2410 
2411  void SelectScavengingVisitorsTable();
2412 
2413  void StartIdleRound() {
2414  mark_sweeps_since_idle_round_started_ = 0;
2415  }
2416 
2417  void FinishIdleRound() {
2418  mark_sweeps_since_idle_round_started_ = kMaxMarkSweepsInIdleRound;
2419  scavenges_since_last_idle_round_ = 0;
2420  }
2421 
2422  bool EnoughGarbageSinceLastIdleRound() {
2423  return (scavenges_since_last_idle_round_ >= kIdleScavengeThreshold);
2424  }
2425 
2426  // Estimates how many milliseconds a Mark-Sweep would take to complete.
2427  // In idle notification handler we assume that this function will return:
2428  // - a number less than 10 for small heaps, which are less than 8Mb.
2429  // - a number greater than 10 for large heaps, which are greater than 32Mb.
2430  int TimeMarkSweepWouldTakeInMs() {
2431  // Rough estimate of how many megabytes of heap can be processed in 1 ms.
2432  static const int kMbPerMs = 2;
2433 
2434  int heap_size_mb = static_cast<int>(SizeOfObjects() / MB);
2435  return heap_size_mb / kMbPerMs;
2436  }
2437 
2438  // Returns true if no more GC work is left.
2439  bool IdleGlobalGC();
2440 
2441  void AdvanceIdleIncrementalMarking(intptr_t step_size);
2442 
2443  void ClearObjectStats(bool clear_last_time_stats = false);
2444 
2445  void set_weak_object_to_code_table(Object* value) {
2446  ASSERT(!InNewSpace(value));
2447  weak_object_to_code_table_ = value;
2448  }
2449 
2450  Object** weak_object_to_code_table_address() {
2451  return &weak_object_to_code_table_;
2452  }
2453 
2454  static const int kInitialStringTableSize = 2048;
2455  static const int kInitialEvalCacheSize = 64;
2456  static const int kInitialNumberStringCacheSize = 256;
2457 
2458  // Object counts and used memory by InstanceType
2459  size_t object_counts_[OBJECT_STATS_COUNT];
2460  size_t object_counts_last_time_[OBJECT_STATS_COUNT];
2461  size_t object_sizes_[OBJECT_STATS_COUNT];
2462  size_t object_sizes_last_time_[OBJECT_STATS_COUNT];
2463 
2464  // Maximum GC pause.
2465  double max_gc_pause_;
2466 
2467  // Total time spent in GC.
2468  double total_gc_time_ms_;
2469 
2470  // Maximum size of objects alive after GC.
2471  intptr_t max_alive_after_gc_;
2472 
2473  // Minimal interval between two subsequent collections.
2474  double min_in_mutator_;
2475 
2476  // Size of objects alive after last GC.
2477  intptr_t alive_after_last_gc_;
2478 
2479  double last_gc_end_timestamp_;
2480 
2481  // Cumulative GC time spent in marking
2482  double marking_time_;
2483 
2484  // Cumulative GC time spent in sweeping
2485  double sweeping_time_;
2486 
2487  MarkCompactCollector mark_compact_collector_;
2488 
2489  StoreBuffer store_buffer_;
2490 
2491  Marking marking_;
2492 
2493  IncrementalMarking incremental_marking_;
2494 
2495  int number_idle_notifications_;
2496  unsigned int last_idle_notification_gc_count_;
2497  bool last_idle_notification_gc_count_init_;
2498 
2499  int mark_sweeps_since_idle_round_started_;
2500  unsigned int gc_count_at_last_idle_gc_;
2501  int scavenges_since_last_idle_round_;
2502 
2503  // These two counters are monotomically increasing and never reset.
2504  size_t full_codegen_bytes_generated_;
2505  size_t crankshaft_codegen_bytes_generated_;
2506 
2507  // If the --deopt_every_n_garbage_collections flag is set to a positive value,
2508  // this variable holds the number of garbage collections since the last
2509  // deoptimization triggered by garbage collection.
2510  int gcs_since_last_deopt_;
2511 
2512 #ifdef VERIFY_HEAP
2513  int no_weak_object_verification_scope_depth_;
2514 #endif
2515 
2516  static const int kAllocationSiteScratchpadSize = 256;
2517  int allocation_sites_scratchpad_length_;
2518 
2519  static const int kMaxMarkSweepsInIdleRound = 7;
2520  static const int kIdleScavengeThreshold = 5;
2521 
2522  // Shared state read by the scavenge collector and set by ScavengeObject.
2523  PromotionQueue promotion_queue_;
2524 
2525  // Flag is set when the heap has been configured. The heap can be repeatedly
2526  // configured through the API until it is set up.
2527  bool configured_;
2528 
2529  ExternalStringTable external_string_table_;
2530 
2531  VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
2532 
2533  MemoryChunk* chunks_queued_for_free_;
2534 
2535  Mutex relocation_mutex_;
2536 
2537  int gc_callbacks_depth_;
2538 
2539  friend class Factory;
2540  friend class GCTracer;
2541  friend class AlwaysAllocateScope;
2542  friend class Page;
2543  friend class Isolate;
2544  friend class MarkCompactCollector;
2546  friend class MapCompact;
2547 #ifdef VERIFY_HEAP
2548  friend class NoWeakObjectVerificationScope;
2549 #endif
2550  friend class GCCallbacksScope;
2551 
2553 };
2554 
2555 
2556 class HeapStats {
2557  public:
2558  static const int kStartMarker = 0xDECADE00;
2559  static const int kEndMarker = 0xDECADE01;
2560 
2561  int* start_marker; // 0
2562  int* new_space_size; // 1
2564  intptr_t* old_pointer_space_size; // 3
2566  intptr_t* old_data_space_size; // 5
2567  intptr_t* old_data_space_capacity; // 6
2568  intptr_t* code_space_size; // 7
2569  intptr_t* code_space_capacity; // 8
2570  intptr_t* map_space_size; // 9
2571  intptr_t* map_space_capacity; // 10
2572  intptr_t* cell_space_size; // 11
2573  intptr_t* cell_space_capacity; // 12
2574  intptr_t* lo_space_size; // 13
2580  intptr_t* memory_allocator_size; // 19
2581  intptr_t* memory_allocator_capacity; // 20
2582  int* objects_per_type; // 21
2583  int* size_per_type; // 22
2584  int* os_error; // 23
2585  int* end_marker; // 24
2586  intptr_t* property_cell_space_size; // 25
2588 };
2589 
2590 
2592  public:
2593  explicit inline AlwaysAllocateScope(Isolate* isolate);
2594  inline ~AlwaysAllocateScope();
2595 
2596  private:
2597  // Implicitly disable artificial allocation failures.
2598  Heap* heap_;
2600 };
2601 
2602 
2603 #ifdef VERIFY_HEAP
2604 class NoWeakObjectVerificationScope {
2605  public:
2606  inline NoWeakObjectVerificationScope();
2607  inline ~NoWeakObjectVerificationScope();
2608 };
2609 #endif
2610 
2611 
2613  public:
2614  explicit inline GCCallbacksScope(Heap* heap);
2615  inline ~GCCallbacksScope();
2616 
2617  inline bool CheckReenter();
2618 
2619  private:
2620  Heap* heap_;
2621 };
2622 
2623 
2624 // Visitor class to verify interior pointers in spaces that do not contain
2625 // or care about intergenerational references. All heap object pointers have to
2626 // point into the heap to a location that has a map pointer at its first word.
2627 // Caveat: Heap::Contains is an approximation because it can return true for
2628 // objects in a heap space but above the allocation pointer.
2629 class VerifyPointersVisitor: public ObjectVisitor {
2630  public:
2631  inline void VisitPointers(Object** start, Object** end);
2632 };
2633 
2634 
2635 // Verify that all objects are Smis.
2636 class VerifySmisVisitor: public ObjectVisitor {
2637  public:
2638  inline void VisitPointers(Object** start, Object** end);
2639 };
2640 
2641 
2642 // Space iterator for iterating over all spaces of the heap. Returns each space
2643 // in turn, and null when it is done.
2644 class AllSpaces BASE_EMBEDDED {
2645  public:
2646  explicit AllSpaces(Heap* heap) : heap_(heap), counter_(FIRST_SPACE) {}
2647  Space* next();
2648  private:
2649  Heap* heap_;
2650  int counter_;
2651 };
2652 
2653 
2654 // Space iterator for iterating over all old spaces of the heap: Old pointer
2655 // space, old data space and code space. Returns each space in turn, and null
2656 // when it is done.
2657 class OldSpaces BASE_EMBEDDED {
2658  public:
2659  explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
2660  OldSpace* next();
2661  private:
2662  Heap* heap_;
2663  int counter_;
2664 };
2665 
2666 
2667 // Space iterator for iterating over all the paged spaces of the heap: Map
2668 // space, old pointer space, old data space, code space and cell space. Returns
2669 // each space in turn, and null when it is done.
2670 class PagedSpaces BASE_EMBEDDED {
2671  public:
2672  explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
2673  PagedSpace* next();
2674  private:
2675  Heap* heap_;
2676  int counter_;
2677 };
2678 
2679 
2680 // Space iterator for iterating over all spaces of the heap.
2681 // For each space an object iterator is provided. The deallocation of the
2682 // returned object iterators is handled by the space iterator.
2683 class SpaceIterator : public Malloced {
2684  public:
2685  explicit SpaceIterator(Heap* heap);
2686  SpaceIterator(Heap* heap, HeapObjectCallback size_func);
2687  virtual ~SpaceIterator();
2688 
2689  bool has_next();
2690  ObjectIterator* next();
2691 
2692  private:
2693  ObjectIterator* CreateIterator();
2694 
2695  Heap* heap_;
2696  int current_space_; // from enum AllocationSpace.
2697  ObjectIterator* iterator_; // object iterator for the current space.
2698  HeapObjectCallback size_func_;
2699 };
2700 
2701 
2702 // A HeapIterator provides iteration over the whole heap. It
2703 // aggregates the specific iterators for the different spaces as
2704 // these can only iterate over one space only.
2705 //
2706 // HeapIterator can skip free list nodes (that is, de-allocated heap
2707 // objects that still remain in the heap). As implementation of free
2708 // nodes filtering uses GC marks, it can't be used during MS/MC GC
2709 // phases. Also, it is forbidden to interrupt iteration in this mode,
2710 // as this will leave heap objects marked (and thus, unusable).
2711 class HeapObjectsFilter;
2712 
2713 class HeapIterator BASE_EMBEDDED {
2714  public:
2717  kFilterUnreachable
2718  };
2719 
2720  explicit HeapIterator(Heap* heap);
2721  HeapIterator(Heap* heap, HeapObjectsFiltering filtering);
2722  ~HeapIterator();
2723 
2724  HeapObject* next();
2725  void reset();
2726 
2727  private:
2728  // Perform the initialization.
2729  void Init();
2730  // Perform all necessary shutdown (destruction) work.
2731  void Shutdown();
2732  HeapObject* NextObject();
2733 
2734  Heap* heap_;
2735  HeapObjectsFiltering filtering_;
2736  HeapObjectsFilter* filter_;
2737  // Space iterator for iterating all the spaces.
2738  SpaceIterator* space_iterator_;
2739  // Object iterator for the space currently being iterated.
2740  ObjectIterator* object_iterator_;
2741 };
2742 
2743 
2744 // Cache for mapping (map, property name) into field offset.
2745 // Cleared at startup and prior to mark sweep collection.
2747  public:
2748  // Lookup field offset for (map, name). If absent, -1 is returned.
2749  int Lookup(Map* map, Name* name);
2750 
2751  // Update an element in the cache.
2752  void Update(Map* map, Name* name, int field_offset);
2753 
2754  // Clear the cache.
2755  void Clear();
2756 
2757  static const int kLength = 256;
2758  static const int kCapacityMask = kLength - 1;
2759  static const int kMapHashShift = 5;
2760  static const int kHashMask = -4; // Zero the last two bits.
2761  static const int kEntriesPerBucket = 4;
2762  static const int kNotFound = -1;
2763 
2764  // kEntriesPerBucket should be a power of 2.
2767 
2768  private:
2769  KeyedLookupCache() {
2770  for (int i = 0; i < kLength; ++i) {
2771  keys_[i].map = NULL;
2772  keys_[i].name = NULL;
2773  field_offsets_[i] = kNotFound;
2774  }
2775  }
2776 
2777  static inline int Hash(Map* map, Name* name);
2778 
2779  // Get the address of the keys and field_offsets arrays. Used in
2780  // generated code to perform cache lookups.
2781  Address keys_address() {
2782  return reinterpret_cast<Address>(&keys_);
2783  }
2784 
2785  Address field_offsets_address() {
2786  return reinterpret_cast<Address>(&field_offsets_);
2787  }
2788 
2789  struct Key {
2790  Map* map;
2791  Name* name;
2792  };
2793 
2794  Key keys_[kLength];
2795  int field_offsets_[kLength];
2796 
2797  friend class ExternalReference;
2798  friend class Isolate;
2800 };
2801 
2802 
2803 // Cache for mapping (map, property name) into descriptor index.
2804 // The cache contains both positive and negative results.
2805 // Descriptor index equals kNotFound means the property is absent.
2806 // Cleared at startup and prior to any gc.
2808  public:
2809  // Lookup descriptor index for (map, name).
2810  // If absent, kAbsent is returned.
2811  int Lookup(Map* source, Name* name) {
2812  if (!name->IsUniqueName()) return kAbsent;
2813  int index = Hash(source, name);
2814  Key& key = keys_[index];
2815  if ((key.source == source) && (key.name == name)) return results_[index];
2816  return kAbsent;
2817  }
2818 
2819  // Update an element in the cache.
2820  void Update(Map* source, Name* name, int result) {
2821  ASSERT(result != kAbsent);
2822  if (name->IsUniqueName()) {
2823  int index = Hash(source, name);
2824  Key& key = keys_[index];
2825  key.source = source;
2826  key.name = name;
2827  results_[index] = result;
2828  }
2829  }
2830 
2831  // Clear the cache.
2832  void Clear();
2833 
2834  static const int kAbsent = -2;
2835 
2836  private:
2838  for (int i = 0; i < kLength; ++i) {
2839  keys_[i].source = NULL;
2840  keys_[i].name = NULL;
2841  results_[i] = kAbsent;
2842  }
2843  }
2844 
2845  static int Hash(Object* source, Name* name) {
2846  // Uses only lower 32 bits if pointers are larger.
2847  uint32_t source_hash =
2848  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source))
2849  >> kPointerSizeLog2;
2850  uint32_t name_hash =
2851  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name))
2852  >> kPointerSizeLog2;
2853  return (source_hash ^ name_hash) % kLength;
2854  }
2855 
2856  static const int kLength = 64;
2857  struct Key {
2858  Map* source;
2859  Name* name;
2860  };
2861 
2862  Key keys_[kLength];
2863  int results_[kLength];
2864 
2865  friend class Isolate;
2867 };
2868 
2869 
2870 // GCTracer collects and prints ONE line after each garbage collector
2871 // invocation IFF --trace_gc is used.
2872 
2873 class GCTracer BASE_EMBEDDED {
2874  public:
2875  class Scope BASE_EMBEDDED {
2876  public:
2877  enum ScopeId {
2893  kNumberOfScopes
2894  };
2895 
2896  Scope(GCTracer* tracer, ScopeId scope)
2897  : tracer_(tracer),
2898  scope_(scope) {
2899  start_time_ = OS::TimeCurrentMillis();
2900  }
2901 
2903  ASSERT(scope_ < kNumberOfScopes); // scope_ is unsigned.
2904  tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_;
2905  }
2906 
2907  private:
2908  GCTracer* tracer_;
2909  ScopeId scope_;
2910  double start_time_;
2911  };
2912 
2913  explicit GCTracer(Heap* heap,
2914  const char* gc_reason,
2915  const char* collector_reason);
2916  ~GCTracer();
2917 
2918  // Sets the collector.
2919  void set_collector(GarbageCollector collector) { collector_ = collector; }
2920 
2921  // Sets the GC count.
2922  void set_gc_count(unsigned int count) { gc_count_ = count; }
2923 
2924  // Sets the full GC count.
2925  void set_full_gc_count(int count) { full_gc_count_ = count; }
2926 
2927  void increment_promoted_objects_size(int object_size) {
2928  promoted_objects_size_ += object_size;
2929  }
2930 
2932  nodes_died_in_new_space_++;
2933  }
2934 
2936  nodes_copied_in_new_space_++;
2937  }
2938 
2940  nodes_promoted_++;
2941  }
2942 
2943  private:
2944  // Returns a string matching the collector.
2945  const char* CollectorString();
2946 
2947  // Returns size of object in heap (in MB).
2948  inline double SizeOfHeapObjects();
2949 
2950  // Timestamp set in the constructor.
2951  double start_time_;
2952 
2953  // Size of objects in heap set in constructor.
2954  intptr_t start_object_size_;
2955 
2956  // Size of memory allocated from OS set in constructor.
2957  intptr_t start_memory_size_;
2958 
2959  // Type of collector.
2960  GarbageCollector collector_;
2961 
2962  // A count (including this one, e.g. the first collection is 1) of the
2963  // number of garbage collections.
2964  unsigned int gc_count_;
2965 
2966  // A count (including this one) of the number of full garbage collections.
2967  int full_gc_count_;
2968 
2969  // Amounts of time spent in different scopes during GC.
2970  double scopes_[Scope::kNumberOfScopes];
2971 
2972  // Total amount of space either wasted or contained in one of free lists
2973  // before the current GC.
2974  intptr_t in_free_list_or_wasted_before_gc_;
2975 
2976  // Difference between space used in the heap at the beginning of the current
2977  // collection and the end of the previous collection.
2978  intptr_t allocated_since_last_gc_;
2979 
2980  // Amount of time spent in mutator that is time elapsed between end of the
2981  // previous collection and the beginning of the current one.
2982  double spent_in_mutator_;
2983 
2984  // Size of objects promoted during the current collection.
2985  intptr_t promoted_objects_size_;
2986 
2987  // Number of died nodes in the new space.
2988  int nodes_died_in_new_space_;
2989 
2990  // Number of copied nodes to the new space.
2991  int nodes_copied_in_new_space_;
2992 
2993  // Number of promoted nodes to the old space.
2994  int nodes_promoted_;
2995 
2996  // Incremental marking steps counters.
2997  int steps_count_;
2998  double steps_took_;
2999  double longest_step_;
3000  int steps_count_since_last_gc_;
3001  double steps_took_since_last_gc_;
3002 
3003  Heap* heap_;
3004 
3005  const char* gc_reason_;
3006  const char* collector_reason_;
3007 };
3008 
3009 
3011  public:
3013 
3014  // Attempt to retrieve a cached result. On failure, 0 is returned as a Smi.
3015  // On success, the returned result is guaranteed to be a COW-array.
3016  static Object* Lookup(Heap* heap,
3017  String* key_string,
3018  Object* key_pattern,
3019  ResultsCacheType type);
3020  // Attempt to add value_array to the cache specified by type. On success,
3021  // value_array is turned into a COW-array.
3022  static void Enter(Heap* heap,
3023  String* key_string,
3024  Object* key_pattern,
3025  FixedArray* value_array,
3026  ResultsCacheType type);
3027  static void Clear(FixedArray* cache);
3028  static const int kRegExpResultsCacheSize = 0x100;
3029 
3030  private:
3031  static const int kArrayEntriesPerCacheEntry = 4;
3032  static const int kStringOffset = 0;
3033  static const int kPatternOffset = 1;
3034  static const int kArrayOffset = 2;
3035 };
3036 
3037 
3038 // Abstract base class for checking whether a weak object should be retained.
3040  public:
3041  virtual ~WeakObjectRetainer() {}
3042 
3043  // Return whether this object should be retained. If NULL is returned the
3044  // object has no references. Otherwise the address of the retained object
3045  // should be returned as in some GC situations the object has been moved.
3046  virtual Object* RetainAs(Object* object) = 0;
3047 };
3048 
3049 
3050 // Intrusive object marking uses least significant bit of
3051 // heap object's map word to mark objects.
3052 // Normally all map words have least significant bit set
3053 // because they contain tagged map pointer.
3054 // If the bit is not set object is marked.
3055 // All objects should be unmarked before resuming
3056 // JavaScript execution.
3058  public:
3059  static bool IsMarked(HeapObject* object) {
3060  return (object->map_word().ToRawValue() & kNotMarkedBit) == 0;
3061  }
3062 
3063  static void ClearMark(HeapObject* object) {
3064  uintptr_t map_word = object->map_word().ToRawValue();
3065  object->set_map_word(MapWord::FromRawValue(map_word | kNotMarkedBit));
3066  ASSERT(!IsMarked(object));
3067  }
3068 
3069  static void SetMark(HeapObject* object) {
3070  uintptr_t map_word = object->map_word().ToRawValue();
3071  object->set_map_word(MapWord::FromRawValue(map_word & ~kNotMarkedBit));
3072  ASSERT(IsMarked(object));
3073  }
3074 
3075  static Map* MapOfMarkedObject(HeapObject* object) {
3076  uintptr_t map_word = object->map_word().ToRawValue();
3077  return MapWord::FromRawValue(map_word | kNotMarkedBit).ToMap();
3078  }
3079 
3080  static int SizeOfMarkedObject(HeapObject* object) {
3081  return object->SizeFromMap(MapOfMarkedObject(object));
3082  }
3083 
3084  private:
3085  static const uintptr_t kNotMarkedBit = 0x1;
3086  STATIC_ASSERT((kHeapObjectTag & kNotMarkedBit) != 0);
3087 };
3088 
3089 
3090 #ifdef DEBUG
3091 // Helper class for tracing paths to a search target Object from all roots.
3092 // The TracePathFrom() method can be used to trace paths from a specific
3093 // object to the search target object.
3094 class PathTracer : public ObjectVisitor {
3095  public:
3096  enum WhatToFind {
3097  FIND_ALL, // Will find all matches.
3098  FIND_FIRST // Will stop the search after first match.
3099  };
3100 
3101  // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
3102  // after the first match. If FIND_ALL is specified, then tracing will be
3103  // done for all matches.
3104  PathTracer(Object* search_target,
3105  WhatToFind what_to_find,
3106  VisitMode visit_mode)
3107  : search_target_(search_target),
3108  found_target_(false),
3109  found_target_in_trace_(false),
3110  what_to_find_(what_to_find),
3111  visit_mode_(visit_mode),
3112  object_stack_(20),
3113  no_allocation() {}
3114 
3115  virtual void VisitPointers(Object** start, Object** end);
3116 
3117  void Reset();
3118  void TracePathFrom(Object** root);
3119 
3120  bool found() const { return found_target_; }
3121 
3122  static Object* const kAnyGlobalObject;
3123 
3124  protected:
3125  class MarkVisitor;
3126  class UnmarkVisitor;
3127 
3128  void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
3129  void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
3130  virtual void ProcessResults();
3131 
3132  // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
3133  static const int kMarkTag = 2;
3134 
3135  Object* search_target_;
3136  bool found_target_;
3137  bool found_target_in_trace_;
3138  WhatToFind what_to_find_;
3139  VisitMode visit_mode_;
3140  List<Object*> object_stack_;
3141 
3142  DisallowHeapAllocation no_allocation; // i.e. no gc allowed.
3143 
3144  private:
3145  DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
3146 };
3147 #endif // DEBUG
3148 
3149 } } // namespace v8::internal
3150 
3151 #endif // V8_HEAP_H_
static int SizeOfMarkedObject(HeapObject *object)
Definition: heap.h:3080
MUST_USE_RESULT MaybeObject * CopyConstantPoolArray(ConstantPoolArray *src)
Definition: heap-inl.h:212
byte * Address
Definition: globals.h:186
Object ** roots_array_start()
Definition: heap.h:1451
Address NewSpaceStart()
Definition: heap.h:633
MUST_USE_RESULT MaybeObject * AllocateJSModule(Context *context, ScopeInfo *scope_info)
Definition: heap.cc:4549
void RecordFixedArraySubTypeStats(int array_sub_type, size_t size)
Definition: heap.h:1910
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
MUST_USE_RESULT MaybeObject * CopyCode(Code *code)
Definition: heap.cc:4220
static void Clear(FixedArray *cache)
Definition: heap.cc:3534
MUST_USE_RESULT MaybeObject * AllocateFixedTypedArray(int length, ExternalArrayType array_type, PretenureFlag pretenure)
Definition: heap.cc:4087
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8Slow(Vector< const char > str, int non_ascii_start, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4905
MUST_USE_RESULT MaybeObject * AllocateSymbol()
Definition: heap.cc:5469
double total_regexp_code_generated()
Definition: heap.h:1729
void TearDown()
Definition: heap.cc:6705
void SetStackLimits()
Definition: heap.cc:6688
PromotionQueue(Heap *heap)
Definition: heap.h:417
void SetNewLimit(Address limit)
Definition: heap.h:438
bool NextGCIsLikelyToBeFull()
Definition: heap.h:1691
MUST_USE_RESULT MaybeObject * AllocateOneByteInternalizedString(Vector< const uint8_t > str, uint32_t hash_field)
Definition: heap-inl.h:138
MUST_USE_RESULT MaybeObject * AllocateExternalStringFromAscii(const ExternalAsciiString::Resource *resource)
Definition: heap.cc:3912
void set_full_gc_count(int count)
Definition: heap.h:2925
MUST_USE_RESULT MaybeObject * AllocateRawOneByteString(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:5087
intptr_t OldGenerationCapacityAvailable()
Definition: heap.h:1589
void Callback(MemoryChunk *page, StoreBufferEvent event)
Definition: heap.cc:1399
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
Definition: heap-inl.h:207
intptr_t * old_pointer_space_size
Definition: heap.h:2564
int Lookup(Map *map, Name *name)
Definition: heap.cc:7614
Object ** native_contexts_list_address()
Definition: heap.h:1458
intptr_t * cell_space_size
Definition: heap.h:2572
static const int kMapHashShift
Definition: heap.h:2759
void DeoptMarkedAllocationSites()
Definition: heap.cc:571
int ReservedSemiSpaceSize()
Definition: heap.h:597
void PrintF(const char *format,...)
Definition: v8utils.cc:40
void SetNewSpaceHighPromotionModeActive(bool mode)
Definition: heap.h:1563
void CollectAllGarbage(int flags, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
Definition: heap.cc:731
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
bool InOldDataSpace(Address address)
Definition: heap-inl.h:341
bool InNewSpace(Object *object)
Definition: heap-inl.h:307
Address * OldPointerSpaceAllocationTopAddress()
Definition: heap.h:683
void AddGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback, GCType gc_type_filter, bool pass_isolate=true)
Definition: heap.cc:6834
bool IsHeapIterable()
Definition: heap.cc:5673
MUST_USE_RESULT MaybeObject * AllocateFunctionContext(int length, JSFunction *function)
Definition: heap.cc:5556
RootListIndex RootIndexForEmptyExternalArray(ElementsKind kind)
Definition: heap.cc:3774
int * new_space_capacity
Definition: heap.h:2563
void(* ObjectSlotCallback)(HeapObject **from, HeapObject *to)
Definition: store-buffer.h:44
void SetConstructStubDeoptPCOffset(int pc_offset)
Definition: heap.h:1842
void IterateWeakRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:6272
uint32_t HashSeed()
Definition: heap.h:1831
Object * ToBoolean(bool condition)
Definition: heap-inl.h:759
Isolate * isolate()
Definition: heap-inl.h:624
MUST_USE_RESULT MaybeObject * ReinitializeJSGlobalProxy(JSFunction *constructor, JSGlobalProxy *global)
Definition: heap.cc:4859
int64_t AdjustAmountOfExternalAllocatedMemory(int64_t change_in_bytes)
Definition: heap-inl.h:582
void RemoveGCPrologueCallback(v8::Isolate::GCPrologueCallback callback)
Definition: heap.cc:6822
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArrayWithMap(FixedDoubleArray *src, Map *map)
Definition: heap.cc:5224
static Smi * FromInt(int value)
Definition: objects-inl.h:1209
bool CreateHeapObjects()
Definition: heap.cc:6672
MUST_USE_RESULT MaybeObject * AllocateJSFunctionProxy(Object *handler, Object *call_trap, Object *construct_trap, Object *prototype)
Definition: heap.cc:4686
bool flush_monomorphic_ics()
Definition: heap.h:1870
void FinalizeExternalString(String *string)
Definition: heap-inl.h:291
bool EnsureSweepersProgressed(int step_size)
Definition: heap.h:1793
MUST_USE_RESULT MaybeObject * CopyJSObject(JSObject *source, AllocationSite *site=NULL)
Definition: heap.cc:4712
int sweep_generation()
Definition: heap.h:1804
void Update(Map *source, Name *name, int result)
Definition: heap.h:2820
intptr_t MaxReserved()
Definition: heap.h:593
Map * MapForFixedTypedArray(ExternalArrayType array_type)
Definition: heap.cc:3752
void CompletelyClearInstanceofCache()
Definition: heap-inl.h:764
Address * OldDataSpaceAllocationLimitAddress()
Definition: heap.h:693
Map * MapForExternalArrayType(ExternalArrayType array_type)
Definition: heap.cc:3730
void SetNumberStringCache(Object *number, String *str)
Definition: heap.cc:3619
static const int kNullValueRootIndex
Definition: v8.h:5571
static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index)
Definition: heap.cc:3409
void AgeInlineCaches()
Definition: heap.h:1866
MUST_USE_RESULT MaybeObject * AllocateModuleContext(ScopeInfo *scope_info)
Definition: heap.cc:5542
MUST_USE_RESULT MaybeObject * AllocateCodeCache()
Definition: heap.cc:2677
void AddString(String *string)
Definition: heap-inl.h:697
T Max(T a, T b)
Definition: utils.h:227
MUST_USE_RESULT MaybeObject * AllocateNativeContext()
Definition: heap.cc:5508
RootListIndex RootIndexForExternalArrayType(ExternalArrayType array_type)
Definition: heap.cc:3735
void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags)
Definition: heap.cc:1238
static const int kOldSpaceRoots
Definition: heap.h:1829
void VisitExternalResources(v8::ExternalResourceVisitor *visitor)
Definition: heap.cc:2073
static bool IsOneByte(T t, int chars)
MaybeObject * AddWeakObjectToCodeDependency(Object *obj, DependentCode *dep)
Definition: heap.cc:6856
PretenureFlag GetPretenureMode()
Definition: heap.h:1569
kSerializedDataOffset Object
Definition: objects-inl.h:5016
static const intptr_t kMinimumOldGenerationAllocationLimit
Definition: heap.h:1593
Address * allocation_top_address()
Definition: spaces.h:2534
MUST_USE_RESULT MaybeObject * AllocateJSMessageObject(String *type, JSArray *arguments, int start_position, int end_position, Object *script, Object *stack_frames)
Definition: heap.cc:3888
void DoScavengeObject(Map *map, HeapObject **slot, HeapObject *obj)
Definition: heap.h:1815
int int32_t
Definition: unicode.cc:47
void ClearInstanceofCache()
Definition: heap-inl.h:754
HeapObjectCallback GcSafeSizeOfOldObjectFunction()
Definition: heap.h:1429
bool InFromSpace(Object *object)
Definition: heap-inl.h:321
MUST_USE_RESULT MaybeObject * Uint32ToString(uint32_t value, bool check_number_string_cache=true)
Definition: heap.cc:3673
Object * weak_object_to_code_table()
Definition: heap.h:1367
PromotionQueue * promotion_queue()
Definition: heap.h:1302
const int kMaxInt
Definition: globals.h:248
void SetGetterStubDeoptPCOffset(int pc_offset)
Definition: heap.h:1847
RootListIndex RootIndexForFixedTypedArray(ExternalArrayType array_type)
Definition: heap.cc:3757
Marking * marking()
Definition: heap.h:1777
intptr_t * code_space_size
Definition: heap.h:2568
uint32_t Flags
Definition: objects.h:5184
MUST_USE_RESULT MaybeObject * InternalizeStringWithKey(HashTableKey *key)
Definition: heap.cc:6034
MUST_USE_RESULT MaybeObject * AllocateExternal(void *value)
Definition: heap.cc:5637
bool InternalizeTwoCharsStringIfExists(String *str, String **result)
static void MoveBlock(Address dst, Address src, int byte_size)
Definition: heap-inl.h:469
bool linear_allocation()
Definition: heap.h:672
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:6292
intptr_t MaximumCommittedMemory()
Definition: heap.h:616
#define ASSERT(condition)
Definition: checks.h:329
bool InSpace(Address addr, AllocationSpace space)
Definition: heap.cc:5948
MUST_USE_RESULT MaybeObject * AllocateGlobalContext(JSFunction *function, ScopeInfo *scope_info)
Definition: heap.cc:5523
void public_set_code_stubs(UnseededNumberDictionary *value)
Definition: heap.h:1422
bool InNewSpacePage(Address address)
static const int kReduceMemoryFootprintMask
Definition: heap.h:1259
OldSpace * TargetSpace(HeapObject *object)
Definition: heap-inl.h:384
void set_collector(GarbageCollector collector)
Definition: heap.h:2919
const int kPointerSizeLog2
Definition: globals.h:281
ExternalArrayType
Definition: v8.h:2113
unsigned short uint16_t
Definition: unicode.cc:46
void RecordObjectStats(InstanceType type, size_t size)
Definition: heap.h:1890
Address * NewSpaceAllocationLimitAddress()
Definition: heap.h:679
#define STRONG_ROOT_LIST(V)
Definition: heap.h:50
Address * OldDataSpaceAllocationTopAddress()
Definition: heap.h:690
MUST_USE_RESULT MaybeObject * AllocateBlockContext(JSFunction *function, Context *previous, ScopeInfo *info)
Definition: heap.cc:5610
int global_ic_age()
Definition: heap.h:1862
void InitializeWeakObjectToCodeTable()
Definition: heap.h:1939
MUST_USE_RESULT MaybeObject * AllocateTwoByteInternalizedString(Vector< const uc16 > str, uint32_t hash_field)
Definition: heap-inl.h:171
void(* ScavengingCallback)(Map *map, HeapObject **slot, HeapObject *object)
Definition: heap.h:509
intptr_t CommittedMemoryExecutable()
Definition: heap.cc:230
ObjectIterator * next()
Definition: heap.cc:7000
friend class ExternalReference
Definition: heap.h:2797
#define INTERNALIZED_STRING_LIST(V)
Definition: heap.h:276
PagedSpaces(Heap *heap)
Definition: heap.h:2672
static const int kPageSize
Definition: spaces.h:814
int * pending_global_handle_count
Definition: heap.h:2577
Address * store_buffer_top_address()
Definition: heap.h:1453
void IterateSmiRoots(ObjectVisitor *v)
Definition: heap.cc:6284
friend class GCTracer
Definition: heap.h:2540
MUST_USE_RESULT MaybeObject * AllocateJSObject(JSFunction *constructor, PretenureFlag pretenure=NOT_TENURED, AllocationSite *allocation_site=NULL)
Definition: heap.cc:4530
void AdjustLiveBytes(Address address, int by, InvocationMode mode)
Definition: heap.cc:4034
Address always_allocate_scope_depth_address()
Definition: heap.h:669
void IterateAndMarkPointersToFromSpace(Address start, Address end, ObjectSlotCallback callback)
Definition: heap.cc:6063
void public_set_materialized_objects(FixedArray *objects)
Definition: heap.h:1446
ArrayStorageAllocationMode
Definition: heap.h:554
STATIC_CHECK(kUndefinedValueRootIndex==Internals::kUndefinedValueRootIndex)
void increment_nodes_copied_in_new_space()
Definition: heap.h:2935
virtual Object * RetainAs(Object *object)=0
StoreBuffer * store_buffer()
Definition: heap.h:1773
MUST_USE_RESULT MaybeObject * AllocateStringFromOneByte(Vector< const uint8_t > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4885
Address * NewSpaceHighPromotionModeActiveAddress()
Definition: heap.h:1574
FixedTypedArrayBase * EmptyFixedTypedArrayForMap(Map *map)
Definition: heap.cc:3813
kInstanceClassNameOffset flag
Definition: objects-inl.h:5115
intptr_t * lo_space_size
Definition: heap.h:2574
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
Definition: heap.cc:5198
uint8_t byte
Definition: globals.h:185
void EnsureWeakObjectToCodeTable()
Definition: heap.cc:6880
INLINE(void RecordWrite(Address address, int offset))
int NotifyContextDisposed()
Definition: heap.cc:872
MUST_USE_RESULT MaybeObject * AllocateHeapNumber(double value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:2969
#define STRUCT_MAP_ACCESSOR(NAME, Name, name)
Definition: heap.h:1334
void RepairFreeListsAfterBoot()
Definition: heap.cc:497
MUST_USE_RESULT MaybeObject * NumberToString(Object *number, bool check_number_string_cache=true)
Definition: heap.cc:3639
void public_set_empty_script(Script *script)
Definition: heap.h:1438
int * near_death_global_handle_count
Definition: heap.h:2578
STATIC_ASSERT((kEntriesPerBucket &(kEntriesPerBucket-1))==0)
String *(* ExternalStringTableUpdaterCallback)(Heap *heap, Object **pointer)
Definition: heap.h:388
double sweeping_time() const
Definition: heap.h:1765
void SetArgumentsAdaptorDeoptPCOffset(int pc_offset)
Definition: heap.h:1837
static void ClearMark(HeapObject *object)
Definition: heap.h:3063
unsigned int ms_count()
Definition: heap.h:1370
static const int kEndMarker
Definition: heap.h:2559
bool IdleNotification(int hint)
Definition: heap.cc:5710
intptr_t MaxOldGenerationSize()
Definition: heap.h:599
#define UNREACHABLE()
Definition: checks.h:52
void IncreaseTotalRegexpCodeGenerated(int size)
Definition: heap.h:1730
friend class MarkCompactCollector
Definition: heap.h:2544
void(* GCEpilogueCallback)(Isolate *isolate, GCType type, GCCallbackFlags flags)
Definition: v8.h:4348
friend class MapCompact
Definition: heap.h:2546
void EnsureHeapIsIterable()
Definition: heap.cc:5679
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Definition: flags.cc:211
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedDoubleArray(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:5338
bool InOldPointerSpace(Address address)
Definition: heap-inl.h:331
intptr_t * property_cell_space_capacity
Definition: heap.h:2587
int(* HeapObjectCallback)(HeapObject *obj)
Definition: v8globals.h:248
bool always_allocate()
Definition: heap.h:668
String * hidden_string()
Definition: heap.h:1349
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:88
intptr_t OldGenerationAllocationLimit(intptr_t old_gen_size)
Definition: heap.h:1596
Address * allocation_top_address()
Definition: spaces.h:1804
static bool IsMarked(HeapObject *object)
Definition: heap.h:3059
#define MUST_USE_RESULT
Definition: globals.h:381
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
Definition: flags.cc:665
static void UpdateAllocationSiteFeedback(HeapObject *object, ScratchpadSlotMode mode)
Definition: heap-inl.h:493
MUST_USE_RESULT MaybeObject * AllocateJSArrayWithElements(FixedArrayBase *array_base, ElementsKind elements_kind, int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4650
static NewSpacePage * FromAddress(Address address_in_page)
Definition: spaces.h:2089
AllSpaces(Heap *heap)
Definition: heap.h:2646
void ClearAllICsByKind(Code::Kind kind)
Definition: heap.cc:483
MUST_USE_RESULT MaybeObject * AllocateWithContext(JSFunction *function, Context *previous, JSReceiver *extension)
Definition: heap.cc:5593
void CollectAllAvailableGarbage(const char *gc_reason=NULL)
Definition: heap.cc:743
void increment_nodes_died_in_new_space()
Definition: heap.h:2931
bool AllowedToBeMigrated(HeapObject *object, AllocationSpace dest)
Definition: heap-inl.h:421
#define SMI_ROOT_LIST(V)
Definition: heap.h:217
bool ConfigureHeapDefault()
Definition: heap.cc:6456
PagedSpace * paged_space(int idx)
Definition: heap.h:647
#define ROOT_LIST(V)
Definition: heap.h:226
static const int kNoGCFlags
Definition: heap.h:1257
PropertyCellSpace * property_cell_space()
Definition: heap.h:643
MUST_USE_RESULT MaybeObject * AllocateFixedArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:5302
const int kPointerSize
Definition: globals.h:268
void VisitPointers(Object **start, Object **end)
Definition: heap-inl.h:827
int * global_handle_count
Definition: heap.h:2575
MUST_USE_RESULT MaybeObject * AllocateForeign(Address address, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:3837
void QueueMemoryChunkForFree(MemoryChunk *chunk)
Definition: heap.cc:7736
void CheckpointObjectStats()
Definition: heap.cc:7818
MUST_USE_RESULT MaybeObject * AllocateExternalArray(int length, ExternalArrayType array_type, void *external_pointer, PretenureFlag pretenure)
Definition: heap.cc:4046
const int kHeapObjectTag
Definition: v8.h:5473
static void ScavengePointer(HeapObject **p)
Definition: heap-inl.h:488
intptr_t * cell_space_capacity
Definition: heap.h:2573
intptr_t * memory_allocator_size
Definition: heap.h:2580
#define DISALLOW_IMPLICIT_CONSTRUCTORS(TypeName)
Definition: globals.h:370
void VisitPointers(Object **start, Object **end)
Definition: heap-inl.h:816
void decrement_scan_on_scavenge_pages()
Definition: heap.h:1295
void IncrementYoungSurvivorsCounter(int survived)
Definition: heap.h:1685
void set_allocation_sites_list(Object *object)
Definition: heap.h:1361
MUST_USE_RESULT MaybeObject * AllocatePolymorphicCodeCache()
Definition: heap.cc:2688
static const int kMaxRegularHeapObjectSize
Definition: spaces.h:820
intptr_t * code_space_capacity
Definition: heap.h:2569
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
MUST_USE_RESULT MaybeObject * InternalizeString(String *str)
Definition: heap.cc:6009
static void Enter(Heap *heap, String *key_string, Object *key_pattern, FixedArray *value_array, ResultsCacheType type)
Definition: heap.cc:3477
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition: globals.h:359
void ReserveSpace(int *sizes, Address *addresses)
Definition: heap.cc:941
bool inline_allocation_disabled()
Definition: heap.h:1607
static Map * MapOfMarkedObject(HeapObject *object)
Definition: heap.h:3075
ExternalArray * EmptyExternalArrayForMap(Map *map)
Definition: heap.cc:3807
OldSpace * old_pointer_space()
Definition: heap.h:638
Map * InternalizedStringMapForString(String *str)
Definition: heap.cc:4959
bool ConfigureHeap(int max_semispace_size, intptr_t max_old_gen_size, intptr_t max_executable_size)
Definition: heap.cc:6375
intptr_t * map_space_size
Definition: heap.h:2570
static double TimeCurrentMillis()
MUST_USE_RESULT MaybeObject * AllocateTypeFeedbackInfo()
Definition: heap.cc:2705
bool CreateApiObjects()
Definition: heap.cc:3056
GCType
Definition: v8.h:4067
HeapState gc_state()
Definition: heap.h:1508
MUST_USE_RESULT MaybeObject * AllocateByteArray(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:3987
OldSpace * code_space()
Definition: heap.h:640
static const int kMakeHeapIterableMask
Definition: heap.h:1264
MUST_USE_RESULT MaybeObject * AllocateJSObjectFromMap(Map *map, PretenureFlag pretenure=NOT_TENURED, bool alloc_props=true, AllocationSite *allocation_site=NULL)
Definition: heap.cc:4488
void public_set_store_buffer_top(Address *top)
Definition: heap.h:1442
MUST_USE_RESULT MaybeObject * AllocateJSArrayAndStorage(ElementsKind elements_kind, int length, int capacity, ArrayStorageAllocationMode mode=DONT_INITIALIZE_ARRAY_ELEMENTS, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4564
static const int kUndefinedValueRootIndex
Definition: v8.h:5570
void Iterate(ObjectVisitor *v)
Definition: heap-inl.h:707
bool InToSpace(Object *object)
Definition: heap-inl.h:326
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
Definition: heap.cc:6887
RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind)
Definition: heap.cc:3791
#define ROOT_ACCESSOR(type, name, camel_name)
Definition: heap.h:2009
bool HasBeenSetUp()
Definition: heap.cc:260
LargeObjectSpace * lo_space()
Definition: heap.h:646
#define BASE_EMBEDDED
Definition: allocation.h:68
bool RootCanBeTreatedAsConstant(RootListIndex root_index)
Definition: heap.cc:3438
static const int kFalseValueRootIndex
Definition: v8.h:5573
MUST_USE_RESULT MaybeObject * AllocateExternalStringFromTwoByte(const ExternalTwoByteString::Resource *resource)
Definition: heap.cc:3934
MUST_USE_RESULT MaybeObject * AllocatePartialMap(InstanceType instance_type, int instance_size)
Definition: heap.cc:2619
int * free_global_handle_count
Definition: heap.h:2579
static Object * Lookup(Heap *heap, String *key_string, Object *key_pattern, ResultsCacheType type)
Definition: heap.cc:3444
void set_gc_count(unsigned int count)
Definition: heap.h:2922
static const int kAbortIncrementalMarkingMask
Definition: heap.h:1260
Vector< const char > CStrVector(const char *data)
Definition: utils.h:574
bool CollectGarbage(AllocationSpace space, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
Definition: heap-inl.h:554
void FreeQueuedChunks()
Definition: heap.cc:7742
CellSpace * cell_space()
Definition: heap.h:642
intptr_t CommittedMemory()
Definition: heap.cc:202
void increment_promoted_objects_size(int object_size)
Definition: heap.h:2927
Object * GetNumberStringCache(Object *number)
Definition: heap.cc:3599
intptr_t SizeOfObjects()
Definition: heap.cc:473
#define T(name, string, precedence)
Definition: token.cc:48
MUST_USE_RESULT MaybeObject * AllocateEmptyJSArray(ElementsKind elements_kind, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.h:713
Address * NewSpaceAllocationTopAddress()
Definition: heap.h:676
MUST_USE_RESULT MaybeObject * AllocateJSArrayBuffer()
static const int kEmptyStringRootIndex
Definition: v8.h:5574
intptr_t get_max_alive_after_gc()
Definition: heap.h:1746
void UpdateReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
Definition: heap.cc:1708
MUST_USE_RESULT MaybeObject * AllocateJSProxy(Object *handler, Object *prototype)
Definition: heap.cc:4666
void ProcessWeakReferences(WeakObjectRetainer *retainer)
Definition: heap.cc:1896
void ClearNormalizedMapCaches()
Definition: heap.cc:1018
void RecordCodeSubTypeStats(int code_sub_type, int code_age, size_t size)
Definition: heap.h:1896
MUST_USE_RESULT MaybeObject * NumberFromDouble(double value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:3819
static const int kLength
Definition: heap.h:2757
MUST_USE_RESULT MaybeObject * CreateCode(const CodeDesc &desc, Code::Flags flags, Handle< Object > self_reference, bool immovable=false, bool crankshafted=false, int prologue_offset=Code::kPrologueOffsetNotSet)
Definition: heap.cc:4119
void Update(Map *map, Name *name, int field_offset)
Definition: heap.cc:7626
intptr_t * old_data_space_capacity
Definition: heap.h:2567
intptr_t Available()
Definition: heap.cc:247
MUST_USE_RESULT MaybeObject * AllocateArgumentsObject(Object *callee, int length)
Definition: heap.cc:4401
int InitialSemiSpaceSize()
Definition: heap.h:598
void RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback)
Definition: heap.cc:6844
void SetSetterStubDeoptPCOffset(int pc_offset)
Definition: heap.h:1852
intptr_t Capacity()
Definition: spaces.h:2455
Scope(GCTracer *tracer, ScopeId scope)
Definition: heap.h:2896
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
Definition: heap-inl.h:202
void IterateRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:6266
Address * allocation_limit_address()
Definition: spaces.h:1809
void IncrementCodeGeneratedBytes(bool is_crankshafted, int size)
Definition: heap.h:1734
Address NewSpaceTop()
Definition: heap.h:635
MUST_USE_RESULT MaybeObject * ReinitializeJSReceiver(JSReceiver *object, InstanceType type, int size)
Definition: heap.cc:4797
MUST_USE_RESULT MaybeObject * AllocateAccessorPair()
Definition: heap.cc:2693
int * weak_global_handle_count
Definition: heap.h:2576
MUST_USE_RESULT MaybeObject * AllocateConstantPoolArray(int number_of_int64_entries, int number_of_code_ptr_entries, int number_of_heap_ptr_entries, int number_of_int32_entries)
Definition: heap.cc:5396
MUST_USE_RESULT MaybeObject * AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field)
MUST_USE_RESULT MaybeObject * AllocateCatchContext(JSFunction *function, Context *previous, String *name, Object *thrown_object)
Definition: heap.cc:5572
#define STRUCT_LIST(V)
Definition: objects.h:590
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
static const int kArgumentsLengthIndex
Definition: heap.h:1104
bool UncommitFromSpace()
Definition: spaces.h:2615
void CheckNewSpaceExpansionCriteria()
Definition: heap.cc:1372
#define STRING_INDEX_DECLARATION(name, str)
Definition: heap.h:1622
MUST_USE_RESULT MaybeObject * AllocateScopeInfo(int length)
Definition: heap.cc:5628
Object * native_contexts_list()
Definition: heap.h:1354
double get_min_in_mutator()
Definition: heap.h:1749
ExternalStringTable * external_string_table()
Definition: heap.h:1799
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:280
IncrementalMarking * incremental_marking()
Definition: heap.h:1781
double get_max_gc_pause()
Definition: heap.h:1743
bool Contains(Address addr)
Definition: heap.cc:5929
void EnableInlineAllocation()
Definition: heap.cc:6529
size_t CommittedPhysicalMemory()
Definition: heap.cc:216
void insert(HeapObject *target, int size)
Definition: heap-inl.h:46
Address * OldPointerSpaceAllocationLimitAddress()
Definition: heap.h:686
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedArray(int length)
Definition: heap.cc:5308
MUST_USE_RESULT MaybeObject * AllocateAllocationSite()
Definition: heap.cc:3031
void MoveElements(FixedArray *array, int dst_index, int src_index, int len)
Definition: heap.cc:883
static const int kStartMarker
Definition: heap.h:2558
static const int kPrologueOffsetNotSet
Definition: objects.h:5227
bool ShouldBePromoted(Address old_address, int object_size)
Definition: heap-inl.h:357
uintptr_t NewSpaceMask()
Definition: heap.h:634
bool IsLazySweepingComplete()
Definition: spaces.h:1929
int gc_count() const
Definition: heap.h:1822
int MaxSemiSpaceSize()
Definition: heap.h:596
void RememberUnmappedPage(Address page, bool compacted)
Definition: heap.cc:7790
static const int kNotFound
Definition: heap.h:2762
static const int kRegExpResultsCacheSize
Definition: heap.h:3028
MUST_USE_RESULT MaybeObject * LookupSingleCharacterStringFromCode(uint16_t code)
Definition: heap.cc:3962
intptr_t PromotedTotalSize()
Definition: heap.h:1578
static void CopyBlock(Address dst, Address src, int byte_size)
Definition: heap-inl.h:462
void AddSweepingTime(double sweeping_time)
Definition: heap.h:1761
static bool ShouldZapGarbage()
Definition: heap.h:1486
MUST_USE_RESULT MaybeObject * AllocateFixedDoubleArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:5355
MUST_USE_RESULT MaybeObject * AllocateFixedArray(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:5297
#define STRING_ACCESSOR(name, str)
Definition: heap.h:1341
static const int kArgumentsCalleeIndex
Definition: heap.h:1106
MUST_USE_RESULT MaybeObject * AllocateHashTable(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:5457
void public_set_non_monomorphic_cache(UnseededNumberDictionary *value)
Definition: heap.h:1434
PerThreadAssertScopeDebugOnly< HEAP_ALLOCATION_ASSERT, false > DisallowHeapAllocation
Definition: assert-scope.h:214
MUST_USE_RESULT MaybeObject * AllocateInternalizedStringFromUtf8(Vector< const char > str, int chars, uint32_t hash_field)
Definition: heap-inl.h:118
MUST_USE_RESULT MaybeObject * InternalizeUtf8String(const char *str)
Definition: heap.h:1222
static const int kHeaderSize
Definition: objects.h:2757
void Print(const v8::FunctionCallbackInfo< v8::Value > &args)
void DisableInlineAllocation()
Definition: heap.cc:6538
int64_t amount_of_external_allocated_memory()
Definition: heap.h:1872
#define DECLARE_STRUCT_MAP(NAME, Name, name)
Definition: heap.h:1627
MapSpace * map_space()
Definition: heap.h:641
intptr_t PromotedSpaceSizeOfObjects()
Definition: heap.cc:6502
intptr_t * old_pointer_space_capacity
Definition: heap.h:2565
bool OldGenerationAllocationLimitReached()
Definition: heap-inl.h:351
#define ROOT_INDEX_DECLARATION(type, name, camel_name)
Definition: heap.h:1633
MUST_USE_RESULT MaybeObject * NumberFromInt32(int32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:272
void ClearJSFunctionResultCaches()
Definition: heap.cc:995
GCCallbackFlags
Definition: v8.h:4073
void RecordStats(HeapStats *stats, bool take_snapshot=false)
Definition: heap.cc:6463
NewSpacePage * prev_page() const
Definition: spaces.h:2061
bool IsInGCPostProcessing()
Definition: heap.h:1510
HeapObject * obj
void CreateFillerObjectAt(Address addr, int size)
Definition: heap.cc:4005
Object * array_buffers_list()
Definition: heap.h:1359
MUST_USE_RESULT MaybeObject * AllocateSharedFunctionInfo(Object *name)
Definition: heap.cc:3849
bool AdvanceSweepers(int step_size)
Definition: heap.cc:6513
void increment_nodes_promoted()
Definition: heap.h:2939
Object ** allocation_sites_list_address()
Definition: heap.h:1365
void increment_scan_on_scavenge_pages()
Definition: heap.h:1288
Address * allocation_limit_address()
Definition: spaces.h:2539
MUST_USE_RESULT MaybeObject * Allocate(Map *map, AllocationSpace space, AllocationSite *allocation_site=NULL)
Definition: heap.cc:4345
intptr_t * map_space_capacity
Definition: heap.h:2571
static const int kTrueValueRootIndex
Definition: v8.h:5572
static const int kCapacityMask
Definition: heap.h:2758
static void ScavengeObject(HeapObject **p, HeapObject *object)
Definition: heap-inl.h:527
bool IsSweepingComplete()
Definition: heap.h:1785
MUST_USE_RESULT MaybeObject * CopyConstantPoolArrayWithMap(ConstantPoolArray *src, Map *map)
Definition: heap.cc:5241
AlwaysAllocateScope(Isolate *isolate)
Definition: heap-inl.h:770
MUST_USE_RESULT MaybeObject * AllocateStringFromOneByte(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.h:868
bool CanMoveObjectStart(HeapObject *object)
Definition: heap.cc:4019
MUST_USE_RESULT MaybeObject * PrepareForCompare(String *str)
Definition: heap-inl.h:563
static bool IsAtStart(Address addr)
Definition: spaces.h:2075
T Min(T a, T b)
Definition: utils.h:234
intptr_t * memory_allocator_capacity
Definition: heap.h:2581
bool EnsureSweeperProgress(intptr_t size_in_bytes)
Definition: spaces.cc:2632
MUST_USE_RESULT MaybeObject * AllocateJSArrayStorage(JSArray *array, int length, int capacity, ArrayStorageAllocationMode mode=DONT_INITIALIZE_ARRAY_ELEMENTS)
Definition: heap.cc:4610
OldSpaces(Heap *heap)
Definition: heap.h:2659
void AddMarkingTime(double marking_time)
Definition: heap.h:1752
double marking_time() const
Definition: heap.h:1756
static const int kSweepPreciselyMask
Definition: heap.h:1258
static const int kSloppyArgumentsObjectSize
Definition: heap.h:1098
void set_array_buffers_list(Object *object)
Definition: heap.h:1356
intptr_t Capacity()
Definition: heap.cc:189
MUST_USE_RESULT MaybeObject * AllocateStruct(InstanceType type)
Definition: heap.cc:5651
void(* GCPrologueCallback)(Isolate *isolate, GCType type, GCCallbackFlags flags)
Definition: v8.h:4345
intptr_t * old_data_space_size
Definition: heap.h:2566
intptr_t OldGenerationSpaceAvailable()
Definition: heap.h:1585
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
GCTracer * tracer()
Definition: heap.h:1724
NewSpace * new_space()
Definition: heap.h:637
SpaceIterator(Heap *heap)
Definition: heap.cc:6972
MUST_USE_RESULT MaybeObject * AllocateMap(InstanceType instance_type, int instance_size, ElementsKind elements_kind=TERMINAL_FAST_ELEMENTS_KIND)
Definition: heap.cc:2643
void UpdateMaximumCommitted()
Definition: heap.cc:237
intptr_t MaxExecutableSize()
Definition: heap.h:600
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
Definition: heap-inl.h:217
uintptr_t mask()
Definition: spaces.h:2520
Object * allocation_sites_list()
Definition: heap.h:1364
void PrintShortHeapStatistics()
Definition: heap.cc:354
static const int kStrictArgumentsObjectSize
Definition: heap.h:1101
void AddGCPrologueCallback(v8::Isolate::GCPrologueCallback callback, GCType gc_type_filter, bool pass_isolate=true)
Definition: heap.cc:6812
static const int kHashMask
Definition: heap.h:2760
static AllocationSpace TargetSpaceId(InstanceType type)
Definition: heap-inl.h:393
OldSpace * old_data_space()
Definition: heap.h:639
MUST_USE_RESULT MaybeObject * AllocateRawTwoByteString(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:5111
static void AssertValidRange(Address from, Address to)
Definition: spaces.h:2242
static void SetMark(HeapObject *object)
Definition: heap.h:3069
MarkCompactCollector * mark_compact_collector()
Definition: heap.h:1769
MUST_USE_RESULT MaybeObject * AllocatePrivateSymbol()
Definition: heap.cc:5499
int Lookup(Map *source, Name *name)
Definition: heap.h:2811
MUST_USE_RESULT MaybeObject * AllocateFunction(Map *function_map, SharedFunctionInfo *shared, Object *prototype, PretenureFlag pretenure=TENURED)
Definition: heap.cc:4386
void UpdateNewSpaceReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
Definition: heap.cc:1671
bool InternalizeStringIfExists(String *str, String **result)
Definition: heap.cc:6025
StoreBufferRebuilder(StoreBuffer *store_buffer)
Definition: heap.h:393
void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags)
Definition: heap.cc:1221
MUST_USE_RESULT MaybeObject * AllocateStringFromTwoByte(Vector< const uc16 > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4939
static const int kEntriesPerBucket
Definition: heap.h:2761
MUST_USE_RESULT MaybeObject * CopyAndTenureFixedCOWArray(FixedArray *src)
Definition: heap.cc:5166
DependentCode * LookupWeakObjectToCodeDependency(Object *obj)
Definition: heap.cc:6873
MUST_USE_RESULT MaybeObject * AllocateAliasedArgumentsEntry(int slot)
Definition: heap.cc:2716
intptr_t * property_cell_space_size
Definition: heap.h:2586
void set_native_contexts_list(Object *object)
Definition: heap.h:1351
const int MB
Definition: globals.h:246