v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
heap.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_HEAP_H_
29 #define V8_HEAP_H_
30 
31 #include <math.h>
32 
33 #include "allocation.h"
34 #include "globals.h"
35 #include "incremental-marking.h"
36 #include "list.h"
37 #include "mark-compact.h"
38 #include "objects-visiting.h"
39 #include "spaces.h"
40 #include "splay-tree-inl.h"
41 #include "store-buffer.h"
42 #include "v8-counters.h"
43 #include "v8globals.h"
44 
45 namespace v8 {
46 namespace internal {
47 
48 // Defines all the roots in Heap.
49 #define STRONG_ROOT_LIST(V) \
50  V(Map, byte_array_map, ByteArrayMap) \
51  V(Map, free_space_map, FreeSpaceMap) \
52  V(Map, one_pointer_filler_map, OnePointerFillerMap) \
53  V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
54  /* Cluster the most popular ones in a few cache lines here at the top. */ \
55  V(Smi, store_buffer_top, StoreBufferTop) \
56  V(Oddball, undefined_value, UndefinedValue) \
57  V(Oddball, the_hole_value, TheHoleValue) \
58  V(Oddball, null_value, NullValue) \
59  V(Oddball, true_value, TrueValue) \
60  V(Oddball, false_value, FalseValue) \
61  V(Map, global_property_cell_map, GlobalPropertyCellMap) \
62  V(Map, shared_function_info_map, SharedFunctionInfoMap) \
63  V(Map, meta_map, MetaMap) \
64  V(Map, ascii_symbol_map, AsciiSymbolMap) \
65  V(Map, ascii_string_map, AsciiStringMap) \
66  V(Map, heap_number_map, HeapNumberMap) \
67  V(Map, native_context_map, NativeContextMap) \
68  V(Map, fixed_array_map, FixedArrayMap) \
69  V(Map, code_map, CodeMap) \
70  V(Map, scope_info_map, ScopeInfoMap) \
71  V(Map, fixed_cow_array_map, FixedCOWArrayMap) \
72  V(Map, fixed_double_array_map, FixedDoubleArrayMap) \
73  V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
74  V(Map, hash_table_map, HashTableMap) \
75  V(FixedArray, empty_fixed_array, EmptyFixedArray) \
76  V(ByteArray, empty_byte_array, EmptyByteArray) \
77  V(String, empty_string, EmptyString) \
78  V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
79  V(Smi, stack_limit, StackLimit) \
80  V(Oddball, arguments_marker, ArgumentsMarker) \
81  /* The first 32 roots above this line should be boring from a GC point of */ \
82  /* view. This means they are never in new space and never on a page that */ \
83  /* is being compacted. */ \
84  V(FixedArray, number_string_cache, NumberStringCache) \
85  V(Object, instanceof_cache_function, InstanceofCacheFunction) \
86  V(Object, instanceof_cache_map, InstanceofCacheMap) \
87  V(Object, instanceof_cache_answer, InstanceofCacheAnswer) \
88  V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
89  V(FixedArray, string_split_cache, StringSplitCache) \
90  V(FixedArray, regexp_multiple_cache, RegExpMultipleCache) \
91  V(Object, termination_exception, TerminationException) \
92  V(Smi, hash_seed, HashSeed) \
93  V(Map, string_map, StringMap) \
94  V(Map, symbol_map, SymbolMap) \
95  V(Map, cons_string_map, ConsStringMap) \
96  V(Map, cons_ascii_string_map, ConsAsciiStringMap) \
97  V(Map, sliced_string_map, SlicedStringMap) \
98  V(Map, sliced_ascii_string_map, SlicedAsciiStringMap) \
99  V(Map, cons_symbol_map, ConsSymbolMap) \
100  V(Map, cons_ascii_symbol_map, ConsAsciiSymbolMap) \
101  V(Map, external_symbol_map, ExternalSymbolMap) \
102  V(Map, external_symbol_with_ascii_data_map, ExternalSymbolWithAsciiDataMap) \
103  V(Map, external_ascii_symbol_map, ExternalAsciiSymbolMap) \
104  V(Map, external_string_map, ExternalStringMap) \
105  V(Map, external_string_with_ascii_data_map, ExternalStringWithAsciiDataMap) \
106  V(Map, external_ascii_string_map, ExternalAsciiStringMap) \
107  V(Map, short_external_symbol_map, ShortExternalSymbolMap) \
108  V(Map, \
109  short_external_symbol_with_ascii_data_map, \
110  ShortExternalSymbolWithAsciiDataMap) \
111  V(Map, short_external_ascii_symbol_map, ShortExternalAsciiSymbolMap) \
112  V(Map, short_external_string_map, ShortExternalStringMap) \
113  V(Map, \
114  short_external_string_with_ascii_data_map, \
115  ShortExternalStringWithAsciiDataMap) \
116  V(Map, short_external_ascii_string_map, ShortExternalAsciiStringMap) \
117  V(Map, undetectable_string_map, UndetectableStringMap) \
118  V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap) \
119  V(Map, external_pixel_array_map, ExternalPixelArrayMap) \
120  V(Map, external_byte_array_map, ExternalByteArrayMap) \
121  V(Map, external_unsigned_byte_array_map, ExternalUnsignedByteArrayMap) \
122  V(Map, external_short_array_map, ExternalShortArrayMap) \
123  V(Map, external_unsigned_short_array_map, ExternalUnsignedShortArrayMap) \
124  V(Map, external_int_array_map, ExternalIntArrayMap) \
125  V(Map, external_unsigned_int_array_map, ExternalUnsignedIntArrayMap) \
126  V(Map, external_float_array_map, ExternalFloatArrayMap) \
127  V(Map, external_double_array_map, ExternalDoubleArrayMap) \
128  V(Map, non_strict_arguments_elements_map, NonStrictArgumentsElementsMap) \
129  V(Map, function_context_map, FunctionContextMap) \
130  V(Map, catch_context_map, CatchContextMap) \
131  V(Map, with_context_map, WithContextMap) \
132  V(Map, block_context_map, BlockContextMap) \
133  V(Map, module_context_map, ModuleContextMap) \
134  V(Map, global_context_map, GlobalContextMap) \
135  V(Map, oddball_map, OddballMap) \
136  V(Map, message_object_map, JSMessageObjectMap) \
137  V(Map, foreign_map, ForeignMap) \
138  V(HeapNumber, nan_value, NanValue) \
139  V(HeapNumber, infinity_value, InfinityValue) \
140  V(HeapNumber, minus_zero_value, MinusZeroValue) \
141  V(Map, neander_map, NeanderMap) \
142  V(JSObject, message_listeners, MessageListeners) \
143  V(Foreign, prototype_accessors, PrototypeAccessors) \
144  V(UnseededNumberDictionary, code_stubs, CodeStubs) \
145  V(UnseededNumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
146  V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache) \
147  V(Code, js_entry_code, JsEntryCode) \
148  V(Code, js_construct_entry_code, JsConstructEntryCode) \
149  V(FixedArray, natives_source_cache, NativesSourceCache) \
150  V(Object, last_script_id, LastScriptId) \
151  V(Script, empty_script, EmptyScript) \
152  V(Smi, real_stack_limit, RealStackLimit) \
153  V(StringDictionary, intrinsic_function_names, IntrinsicFunctionNames) \
154  V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
155  V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset) \
156  V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset) \
157  V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)
158 
159 #define ROOT_LIST(V) \
160  STRONG_ROOT_LIST(V) \
161  V(SymbolTable, symbol_table, SymbolTable)
162 
163 #define SYMBOL_LIST(V) \
164  V(Array_symbol, "Array") \
165  V(Object_symbol, "Object") \
166  V(Proto_symbol, "__proto__") \
167  V(StringImpl_symbol, "StringImpl") \
168  V(arguments_symbol, "arguments") \
169  V(Arguments_symbol, "Arguments") \
170  V(call_symbol, "call") \
171  V(apply_symbol, "apply") \
172  V(caller_symbol, "caller") \
173  V(boolean_symbol, "boolean") \
174  V(Boolean_symbol, "Boolean") \
175  V(callee_symbol, "callee") \
176  V(constructor_symbol, "constructor") \
177  V(code_symbol, ".code") \
178  V(result_symbol, ".result") \
179  V(dot_for_symbol, ".for.") \
180  V(catch_var_symbol, ".catch-var") \
181  V(empty_symbol, "") \
182  V(eval_symbol, "eval") \
183  V(function_symbol, "function") \
184  V(length_symbol, "length") \
185  V(module_symbol, "module") \
186  V(name_symbol, "name") \
187  V(native_symbol, "native") \
188  V(null_symbol, "null") \
189  V(number_symbol, "number") \
190  V(Number_symbol, "Number") \
191  V(nan_symbol, "NaN") \
192  V(RegExp_symbol, "RegExp") \
193  V(source_symbol, "source") \
194  V(global_symbol, "global") \
195  V(ignore_case_symbol, "ignoreCase") \
196  V(multiline_symbol, "multiline") \
197  V(input_symbol, "input") \
198  V(index_symbol, "index") \
199  V(last_index_symbol, "lastIndex") \
200  V(object_symbol, "object") \
201  V(prototype_symbol, "prototype") \
202  V(string_symbol, "string") \
203  V(String_symbol, "String") \
204  V(Date_symbol, "Date") \
205  V(this_symbol, "this") \
206  V(to_string_symbol, "toString") \
207  V(char_at_symbol, "CharAt") \
208  V(undefined_symbol, "undefined") \
209  V(value_of_symbol, "valueOf") \
210  V(InitializeVarGlobal_symbol, "InitializeVarGlobal") \
211  V(InitializeConstGlobal_symbol, "InitializeConstGlobal") \
212  V(KeyedLoadElementMonomorphic_symbol, \
213  "KeyedLoadElementMonomorphic") \
214  V(KeyedStoreElementMonomorphic_symbol, \
215  "KeyedStoreElementMonomorphic") \
216  V(KeyedStoreAndGrowElementMonomorphic_symbol, \
217  "KeyedStoreAndGrowElementMonomorphic") \
218  V(stack_overflow_symbol, "kStackOverflowBoilerplate") \
219  V(illegal_access_symbol, "illegal access") \
220  V(out_of_memory_symbol, "out-of-memory") \
221  V(illegal_execution_state_symbol, "illegal execution state") \
222  V(get_symbol, "get") \
223  V(set_symbol, "set") \
224  V(function_class_symbol, "Function") \
225  V(illegal_argument_symbol, "illegal argument") \
226  V(MakeReferenceError_symbol, "MakeReferenceError") \
227  V(MakeSyntaxError_symbol, "MakeSyntaxError") \
228  V(MakeTypeError_symbol, "MakeTypeError") \
229  V(invalid_lhs_in_assignment_symbol, "invalid_lhs_in_assignment") \
230  V(invalid_lhs_in_for_in_symbol, "invalid_lhs_in_for_in") \
231  V(invalid_lhs_in_postfix_op_symbol, "invalid_lhs_in_postfix_op") \
232  V(invalid_lhs_in_prefix_op_symbol, "invalid_lhs_in_prefix_op") \
233  V(illegal_return_symbol, "illegal_return") \
234  V(illegal_break_symbol, "illegal_break") \
235  V(illegal_continue_symbol, "illegal_continue") \
236  V(unknown_label_symbol, "unknown_label") \
237  V(redeclaration_symbol, "redeclaration") \
238  V(failure_symbol, "<failure>") \
239  V(space_symbol, " ") \
240  V(exec_symbol, "exec") \
241  V(zero_symbol, "0") \
242  V(global_eval_symbol, "GlobalEval") \
243  V(identity_hash_symbol, "v8::IdentityHash") \
244  V(closure_symbol, "(closure)") \
245  V(use_strict, "use strict") \
246  V(dot_symbol, ".") \
247  V(anonymous_function_symbol, "(anonymous function)") \
248  V(compare_ic_symbol, "==") \
249  V(strict_compare_ic_symbol, "===") \
250  V(infinity_symbol, "Infinity") \
251  V(minus_infinity_symbol, "-Infinity") \
252  V(hidden_stack_trace_symbol, "v8::hidden_stack_trace") \
253  V(query_colon_symbol, "(?:)")
254 
255 // Forward declarations.
256 class GCTracer;
257 class HeapStats;
258 class Isolate;
259 class WeakObjectRetainer;
260 
261 
262 typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
263  Object** pointer);
264 
266  public:
267  explicit StoreBufferRebuilder(StoreBuffer* store_buffer)
268  : store_buffer_(store_buffer) {
269  }
270 
271  void Callback(MemoryChunk* page, StoreBufferEvent event);
272 
273  private:
274  StoreBuffer* store_buffer_;
275 
276  // We record in this variable how full the store buffer was when we started
277  // iterating over the current page, finding pointers to new space. If the
278  // store buffer overflows again we can exempt the page from the store buffer
279  // by rewinding to this point instead of having to search the store buffer.
280  Object*** start_of_current_page_;
281  // The current page we are scanning in the store buffer iterator.
282  MemoryChunk* current_page_;
283 };
284 
285 
286 
287 // The all static Heap captures the interface to the global object heap.
288 // All JavaScript contexts by this process share the same object heap.
289 
290 #ifdef DEBUG
291 class HeapDebugUtils;
292 #endif
293 
294 
295 // A queue of objects promoted during scavenge. Each object is accompanied
296 // by it's size to avoid dereferencing a map pointer for scanning.
298  public:
299  explicit PromotionQueue(Heap* heap)
300  : front_(NULL),
301  rear_(NULL),
302  limit_(NULL),
303  emergency_stack_(0),
304  heap_(heap) { }
305 
306  void Initialize();
307 
308  void Destroy() {
309  ASSERT(is_empty());
310  delete emergency_stack_;
311  emergency_stack_ = NULL;
312  }
313 
314  inline void ActivateGuardIfOnTheSamePage();
315 
317  return Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
318  }
319 
320  void SetNewLimit(Address limit) {
321  if (!guard_) {
322  return;
323  }
324 
325  ASSERT(GetHeadPage() == Page::FromAllocationTop(limit));
326  limit_ = reinterpret_cast<intptr_t*>(limit);
327 
328  if (limit_ <= rear_) {
329  return;
330  }
331 
332  RelocateQueueHead();
333  }
334 
335  bool is_empty() {
336  return (front_ == rear_) &&
337  (emergency_stack_ == NULL || emergency_stack_->length() == 0);
338  }
339 
340  inline void insert(HeapObject* target, int size);
341 
342  void remove(HeapObject** target, int* size) {
343  ASSERT(!is_empty());
344  if (front_ == rear_) {
345  Entry e = emergency_stack_->RemoveLast();
346  *target = e.obj_;
347  *size = e.size_;
348  return;
349  }
350 
351  if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(front_))) {
352  NewSpacePage* front_page =
353  NewSpacePage::FromAddress(reinterpret_cast<Address>(front_));
354  ASSERT(!front_page->prev_page()->is_anchor());
355  front_ =
356  reinterpret_cast<intptr_t*>(front_page->prev_page()->area_end());
357  }
358  *target = reinterpret_cast<HeapObject*>(*(--front_));
359  *size = static_cast<int>(*(--front_));
360  // Assert no underflow.
361  SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
362  reinterpret_cast<Address>(front_));
363  }
364 
365  private:
366  // The front of the queue is higher in the memory page chain than the rear.
367  intptr_t* front_;
368  intptr_t* rear_;
369  intptr_t* limit_;
370 
371  bool guard_;
372 
373  static const int kEntrySizeInWords = 2;
374 
375  struct Entry {
376  Entry(HeapObject* obj, int size) : obj_(obj), size_(size) { }
377 
378  HeapObject* obj_;
379  int size_;
380  };
381  List<Entry>* emergency_stack_;
382 
383  Heap* heap_;
384 
385  void RelocateQueueHead();
386 
387  DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
388 };
389 
390 
391 typedef void (*ScavengingCallback)(Map* map,
392  HeapObject** slot,
393  HeapObject* object);
394 
395 
396 // External strings table is a place where all external strings are
397 // registered. We need to keep track of such strings to properly
398 // finalize them.
400  public:
401  // Registers an external string.
402  inline void AddString(String* string);
403 
404  inline void Iterate(ObjectVisitor* v);
405 
406  // Restores internal invariant and gets rid of collected strings.
407  // Must be called after each Iterate() that modified the strings.
408  void CleanUp();
409 
410  // Destroys all allocated memory.
411  void TearDown();
412 
413  private:
414  ExternalStringTable() { }
415 
416  friend class Heap;
417 
418  inline void Verify();
419 
420  inline void AddOldString(String* string);
421 
422  // Notifies the table that only a prefix of the new list is valid.
423  inline void ShrinkNewStrings(int position);
424 
425  // To speed up scavenge collections new space string are kept
426  // separate from old space strings.
427  List<Object*> new_space_strings_;
428  List<Object*> old_space_strings_;
429 
430  Heap* heap_;
431 
433 };
434 
435 
439 };
440 
441 class Heap {
442  public:
443  // Configure heap size before setup. Return false if the heap has been
444  // set up already.
445  bool ConfigureHeap(int max_semispace_size,
446  intptr_t max_old_gen_size,
447  intptr_t max_executable_size);
448  bool ConfigureHeapDefault();
449 
450  // Initializes the global object heap. If create_heap_objects is true,
451  // also creates the basic non-mutable objects.
452  // Returns whether it succeeded.
453  bool SetUp(bool create_heap_objects);
454 
455  // Destroys all memory allocated by the heap.
456  void TearDown();
457 
458  // Set the stack limit in the roots_ array. Some architectures generate
459  // code that looks here, because it is faster than loading from the static
460  // jslimit_/real_jslimit_ variable in the StackGuard.
461  void SetStackLimits();
462 
463  // Returns whether SetUp has been called.
464  bool HasBeenSetUp();
465 
466  // Returns the maximum amount of memory reserved for the heap. For
467  // the young generation, we reserve 4 times the amount needed for a
468  // semi space. The young generation consists of two semi spaces and
469  // we reserve twice the amount needed for those in order to ensure
470  // that new space can be aligned to its size.
471  intptr_t MaxReserved() {
472  return 4 * reserved_semispace_size_ + max_old_generation_size_;
473  }
474  int MaxSemiSpaceSize() { return max_semispace_size_; }
475  int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
476  int InitialSemiSpaceSize() { return initial_semispace_size_; }
477  intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
478  intptr_t MaxExecutableSize() { return max_executable_size_; }
479 
480  // Returns the capacity of the heap in bytes w/o growing. Heap grows when
481  // more spaces are needed until it reaches the limit.
482  intptr_t Capacity();
483 
484  // Returns the amount of memory currently committed for the heap.
485  intptr_t CommittedMemory();
486 
487  // Returns the amount of executable memory currently committed for the heap.
488  intptr_t CommittedMemoryExecutable();
489 
490  // Returns the available bytes in space w/o growing.
491  // Heap doesn't guarantee that it can allocate an object that requires
492  // all available bytes. Check MaxHeapObjectSize() instead.
493  intptr_t Available();
494 
495  // Returns of size of all objects residing in the heap.
496  intptr_t SizeOfObjects();
497 
498  // Return the starting address and a mask for the new space. And-masking an
499  // address with the mask will result in the start address of the new space
500  // for all addresses in either semispace.
501  Address NewSpaceStart() { return new_space_.start(); }
502  uintptr_t NewSpaceMask() { return new_space_.mask(); }
503  Address NewSpaceTop() { return new_space_.top(); }
504 
505  NewSpace* new_space() { return &new_space_; }
506  OldSpace* old_pointer_space() { return old_pointer_space_; }
507  OldSpace* old_data_space() { return old_data_space_; }
508  OldSpace* code_space() { return code_space_; }
509  MapSpace* map_space() { return map_space_; }
510  CellSpace* cell_space() { return cell_space_; }
511  LargeObjectSpace* lo_space() { return lo_space_; }
513  switch (idx) {
514  case OLD_POINTER_SPACE:
515  return old_pointer_space();
516  case OLD_DATA_SPACE:
517  return old_data_space();
518  case MAP_SPACE:
519  return map_space();
520  case CELL_SPACE:
521  return cell_space();
522  case CODE_SPACE:
523  return code_space();
524  case NEW_SPACE:
525  case LO_SPACE:
526  UNREACHABLE();
527  }
528  return NULL;
529  }
530 
531  bool always_allocate() { return always_allocate_scope_depth_ != 0; }
533  return reinterpret_cast<Address>(&always_allocate_scope_depth_);
534  }
536  return linear_allocation_scope_depth_ != 0;
537  }
538 
540  return new_space_.allocation_top_address();
541  }
543  return new_space_.allocation_limit_address();
544  }
545 
546  // Uncommit unused semi space.
547  bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
548 
549  // Allocates and initializes a new JavaScript object based on a
550  // constructor.
551  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
552  // failed.
553  // Please note this does not perform a garbage collection.
554  MUST_USE_RESULT MaybeObject* AllocateJSObject(
555  JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED);
556 
557  MUST_USE_RESULT MaybeObject* AllocateJSModule(Context* context,
558  ScopeInfo* scope_info);
559 
560  // Allocate a JSArray with no elements
562  ElementsKind elements_kind,
563  PretenureFlag pretenure = NOT_TENURED) {
564  return AllocateJSArrayAndStorage(elements_kind, 0, 0,
566  pretenure);
567  }
568 
569  // Allocate a JSArray with a specified length but elements that are left
570  // uninitialized.
572  ElementsKind elements_kind,
573  int length,
574  int capacity,
576  PretenureFlag pretenure = NOT_TENURED);
577 
578  // Allocate a JSArray with no elements
580  FixedArrayBase* array_base,
581  ElementsKind elements_kind,
582  PretenureFlag pretenure = NOT_TENURED);
583 
584  // Allocates and initializes a new global object based on a constructor.
585  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
586  // failed.
587  // Please note this does not perform a garbage collection.
588  MUST_USE_RESULT MaybeObject* AllocateGlobalObject(JSFunction* constructor);
589 
590  // Returns a deep copy of the JavaScript object.
591  // Properties and elements are copied too.
592  // Returns failure if allocation failed.
593  MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source);
594 
595  // Allocates the function prototype.
596  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
597  // failed.
598  // Please note this does not perform a garbage collection.
599  MUST_USE_RESULT MaybeObject* AllocateFunctionPrototype(JSFunction* function);
600 
601  // Allocates a Harmony proxy or function proxy.
602  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
603  // failed.
604  // Please note this does not perform a garbage collection.
605  MUST_USE_RESULT MaybeObject* AllocateJSProxy(Object* handler,
606  Object* prototype);
607 
608  MUST_USE_RESULT MaybeObject* AllocateJSFunctionProxy(Object* handler,
609  Object* call_trap,
610  Object* construct_trap,
611  Object* prototype);
612 
613  // Reinitialize a JSReceiver into an (empty) JS object of respective type and
614  // size, but keeping the original prototype. The receiver must have at least
615  // the size of the new object. The object is reinitialized and behaves as an
616  // object that has been freshly allocated.
617  // Returns failure if an error occured, otherwise object.
619  InstanceType type,
620  int size);
621 
622  // Reinitialize an JSGlobalProxy based on a constructor. The object
623  // must have the same size as objects allocated using the
624  // constructor. The object is reinitialized and behaves as an
625  // object that has been freshly allocated using the constructor.
627  JSFunction* constructor, JSGlobalProxy* global);
628 
629  // Allocates and initializes a new JavaScript object based on a map.
630  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
631  // failed.
632  // Please note this does not perform a garbage collection.
634  Map* map, PretenureFlag pretenure = NOT_TENURED);
635 
636  // Allocates a heap object based on the map.
637  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
638  // failed.
639  // Please note this function does not perform a garbage collection.
640  MUST_USE_RESULT MaybeObject* Allocate(Map* map, AllocationSpace space);
641 
642  // Allocates a JS Map in the heap.
643  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
644  // failed.
645  // Please note this function does not perform a garbage collection.
646  MUST_USE_RESULT MaybeObject* AllocateMap(
647  InstanceType instance_type,
648  int instance_size,
649  ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
650 
651  // Allocates a partial map for bootstrapping.
652  MUST_USE_RESULT MaybeObject* AllocatePartialMap(InstanceType instance_type,
653  int instance_size);
654 
655  // Allocate a map for the specified function
656  MUST_USE_RESULT MaybeObject* AllocateInitialMap(JSFunction* fun);
657 
658  // Allocates an empty code cache.
659  MUST_USE_RESULT MaybeObject* AllocateCodeCache();
660 
661  // Allocates a serialized scope info.
662  MUST_USE_RESULT MaybeObject* AllocateScopeInfo(int length);
663 
664  // Allocates an empty PolymorphicCodeCache.
666 
667  // Allocates a pre-tenured empty AccessorPair.
668  MUST_USE_RESULT MaybeObject* AllocateAccessorPair();
669 
670  // Allocates an empty TypeFeedbackInfo.
672 
673  // Allocates an AliasedArgumentsEntry.
674  MUST_USE_RESULT MaybeObject* AllocateAliasedArgumentsEntry(int slot);
675 
676  // Clear the Instanceof cache (used when a prototype changes).
677  inline void ClearInstanceofCache();
678 
679  // For use during bootup.
681 
682  // Allocates and fully initializes a String. There are two String
683  // encodings: ASCII and two byte. One should choose between the three string
684  // allocation functions based on the encoding of the string buffer used to
685  // initialized the string.
686  // - ...FromAscii initializes the string from a buffer that is ASCII
687  // encoded (it does not check that the buffer is ASCII encoded) and the
688  // result will be ASCII encoded.
689  // - ...FromUTF8 initializes the string from a buffer that is UTF-8
690  // encoded. If the characters are all single-byte characters, the
691  // result will be ASCII encoded, otherwise it will converted to two
692  // byte.
693  // - ...FromTwoByte initializes the string from a buffer that is two-byte
694  // encoded. If the characters are all single-byte characters, the
695  // result will be converted to ASCII, otherwise it will be left as
696  // two-byte.
697  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
698  // failed.
699  // Please note this does not perform a garbage collection.
701  Vector<const char> str,
702  PretenureFlag pretenure = NOT_TENURED);
703  MUST_USE_RESULT inline MaybeObject* AllocateStringFromUtf8(
704  Vector<const char> str,
705  PretenureFlag pretenure = NOT_TENURED);
707  Vector<const char> str,
708  int non_ascii_start,
709  PretenureFlag pretenure = NOT_TENURED);
711  Vector<const uc16> str,
712  PretenureFlag pretenure = NOT_TENURED);
713 
714  // Allocates a symbol in old space based on the character stream.
715  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
716  // failed.
717  // Please note this function does not perform a garbage collection.
718  MUST_USE_RESULT inline MaybeObject* AllocateSymbol(Vector<const char> str,
719  int chars,
720  uint32_t hash_field);
721 
722  MUST_USE_RESULT inline MaybeObject* AllocateAsciiSymbol(
723  Vector<const char> str,
724  uint32_t hash_field);
725 
726  MUST_USE_RESULT inline MaybeObject* AllocateTwoByteSymbol(
727  Vector<const uc16> str,
728  uint32_t hash_field);
729 
731  unibrow::CharacterStream* buffer, int chars, uint32_t hash_field);
732 
734  Vector<const char> str,
735  int chars);
736 
737  // Allocates and partially initializes a String. There are two String
738  // encodings: ASCII and two byte. These functions allocate a string of the
739  // given length and set its map and length fields. The characters of the
740  // string are uninitialized.
741  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
742  // failed.
743  // Please note this does not perform a garbage collection.
745  int length,
746  PretenureFlag pretenure = NOT_TENURED);
748  int length,
749  PretenureFlag pretenure = NOT_TENURED);
750 
751  // Computes a single character string where the character has code.
752  // A cache is used for ASCII codes.
753  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
754  // failed. Please note this does not perform a garbage collection.
756  uint16_t code);
757 
758  // Allocate a byte array of the specified length
759  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
760  // failed.
761  // Please note this does not perform a garbage collection.
762  MUST_USE_RESULT MaybeObject* AllocateByteArray(int length,
763  PretenureFlag pretenure);
764 
765  // Allocate a non-tenured byte array of the specified length
766  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
767  // failed.
768  // Please note this does not perform a garbage collection.
769  MUST_USE_RESULT MaybeObject* AllocateByteArray(int length);
770 
771  // Allocates an external array of the specified length and type.
772  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
773  // failed.
774  // Please note this does not perform a garbage collection.
776  int length,
777  ExternalArrayType array_type,
778  void* external_pointer,
779  PretenureFlag pretenure);
780 
781  // Allocate a tenured JS global property cell.
782  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
783  // failed.
784  // Please note this does not perform a garbage collection.
786 
787  // Allocates a fixed array initialized with undefined values
788  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
789  // failed.
790  // Please note this does not perform a garbage collection.
791  MUST_USE_RESULT MaybeObject* AllocateFixedArray(int length,
792  PretenureFlag pretenure);
793  // Allocates a fixed array initialized with undefined values
794  MUST_USE_RESULT MaybeObject* AllocateFixedArray(int length);
795 
796  // Allocates an uninitialized fixed array. It must be filled by the caller.
797  //
798  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
799  // failed.
800  // Please note this does not perform a garbage collection.
801  MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedArray(int length);
802 
803  // Make a copy of src and return it. Returns
804  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
805  MUST_USE_RESULT inline MaybeObject* CopyFixedArray(FixedArray* src);
806 
807  // Make a copy of src, set the map, and return the copy. Returns
808  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
809  MUST_USE_RESULT MaybeObject* CopyFixedArrayWithMap(FixedArray* src, Map* map);
810 
811  // Make a copy of src and return it. Returns
812  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
813  MUST_USE_RESULT inline MaybeObject* CopyFixedDoubleArray(
814  FixedDoubleArray* src);
815 
816  // Make a copy of src, set the map, and return the copy. Returns
817  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
819  FixedDoubleArray* src, Map* map);
820 
821  // Allocates a fixed array initialized with the hole values.
822  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
823  // failed.
824  // Please note this does not perform a garbage collection.
826  int length,
827  PretenureFlag pretenure = NOT_TENURED);
828 
830  int length,
831  PretenureFlag pretenure);
832 
833  // Allocates a fixed double array with uninitialized values. Returns
834  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
835  // Please note this does not perform a garbage collection.
837  int length,
838  PretenureFlag pretenure = NOT_TENURED);
839 
840  // Allocates a fixed double array with hole values. Returns
841  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
842  // Please note this does not perform a garbage collection.
844  int length,
845  PretenureFlag pretenure = NOT_TENURED);
846 
847  // AllocateHashTable is identical to AllocateFixedArray except
848  // that the resulting object has hash_table_map as map.
849  MUST_USE_RESULT MaybeObject* AllocateHashTable(
850  int length, PretenureFlag pretenure = NOT_TENURED);
851 
852  // Allocate a native (but otherwise uninitialized) context.
853  MUST_USE_RESULT MaybeObject* AllocateNativeContext();
854 
855  // Allocate a global context.
856  MUST_USE_RESULT MaybeObject* AllocateGlobalContext(JSFunction* function,
857  ScopeInfo* scope_info);
858 
859  // Allocate a module context.
860  MUST_USE_RESULT MaybeObject* AllocateModuleContext(ScopeInfo* scope_info);
861 
862  // Allocate a function context.
863  MUST_USE_RESULT MaybeObject* AllocateFunctionContext(int length,
864  JSFunction* function);
865 
866  // Allocate a catch context.
867  MUST_USE_RESULT MaybeObject* AllocateCatchContext(JSFunction* function,
868  Context* previous,
869  String* name,
870  Object* thrown_object);
871  // Allocate a 'with' context.
872  MUST_USE_RESULT MaybeObject* AllocateWithContext(JSFunction* function,
873  Context* previous,
874  JSObject* extension);
875 
876  // Allocate a block context.
877  MUST_USE_RESULT MaybeObject* AllocateBlockContext(JSFunction* function,
878  Context* previous,
879  ScopeInfo* info);
880 
881  // Allocates a new utility object in the old generation.
882  MUST_USE_RESULT MaybeObject* AllocateStruct(InstanceType type);
883 
884  // Allocates a function initialized with a shared part.
885  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
886  // failed.
887  // Please note this does not perform a garbage collection.
888  MUST_USE_RESULT MaybeObject* AllocateFunction(
889  Map* function_map,
890  SharedFunctionInfo* shared,
891  Object* prototype,
892  PretenureFlag pretenure = TENURED);
893 
894  // Arguments object size.
895  static const int kArgumentsObjectSize =
897  // Strict mode arguments has no callee so it is smaller.
898  static const int kArgumentsObjectSizeStrict =
900  // Indicies for direct access into argument objects.
901  static const int kArgumentsLengthIndex = 0;
902  // callee is only valid in non-strict mode.
903  static const int kArgumentsCalleeIndex = 1;
904 
905  // Allocates an arguments object - optionally with an elements array.
906  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
907  // failed.
908  // Please note this does not perform a garbage collection.
910  Object* callee, int length);
911 
912  // Same as NewNumberFromDouble, but may return a preallocated/immutable
913  // number object (e.g., minus_zero_value_, nan_value_)
914  MUST_USE_RESULT MaybeObject* NumberFromDouble(
915  double value, PretenureFlag pretenure = NOT_TENURED);
916 
917  // Allocated a HeapNumber from value.
918  MUST_USE_RESULT MaybeObject* AllocateHeapNumber(
919  double value,
920  PretenureFlag pretenure);
921  // pretenure = NOT_TENURED
922  MUST_USE_RESULT MaybeObject* AllocateHeapNumber(double value);
923 
924  // Converts an int into either a Smi or a HeapNumber object.
925  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
926  // failed.
927  // Please note this does not perform a garbage collection.
928  MUST_USE_RESULT inline MaybeObject* NumberFromInt32(
929  int32_t value, PretenureFlag pretenure = NOT_TENURED);
930 
931  // Converts an int into either a Smi or a HeapNumber object.
932  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
933  // failed.
934  // Please note this does not perform a garbage collection.
935  MUST_USE_RESULT inline MaybeObject* NumberFromUint32(
936  uint32_t value, PretenureFlag pretenure = NOT_TENURED);
937 
938  // Allocates a new foreign object.
939  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
940  // failed.
941  // Please note this does not perform a garbage collection.
942  MUST_USE_RESULT MaybeObject* AllocateForeign(
943  Address address, PretenureFlag pretenure = NOT_TENURED);
944 
945  // Allocates a new SharedFunctionInfo object.
946  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
947  // failed.
948  // Please note this does not perform a garbage collection.
950 
951  // Allocates a new JSMessageObject object.
952  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
953  // failed.
954  // Please note that this does not perform a garbage collection.
956  String* type,
957  JSArray* arguments,
958  int start_position,
959  int end_position,
960  Object* script,
961  Object* stack_trace,
962  Object* stack_frames);
963 
964  // Allocates a new cons string object.
965  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
966  // failed.
967  // Please note this does not perform a garbage collection.
968  MUST_USE_RESULT MaybeObject* AllocateConsString(String* first,
969  String* second);
970 
971  // Allocates a new sub string object which is a substring of an underlying
972  // string buffer stretching from the index start (inclusive) to the index
973  // end (exclusive).
974  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
975  // failed.
976  // Please note this does not perform a garbage collection.
977  MUST_USE_RESULT MaybeObject* AllocateSubString(
978  String* buffer,
979  int start,
980  int end,
981  PretenureFlag pretenure = NOT_TENURED);
982 
983  // Allocate a new external string object, which is backed by a string
984  // resource that resides outside the V8 heap.
985  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
986  // failed.
987  // Please note this does not perform a garbage collection.
989  const ExternalAsciiString::Resource* resource);
991  const ExternalTwoByteString::Resource* resource);
992 
993  // Finalizes an external string by deleting the associated external
994  // data and clearing the resource pointer.
995  inline void FinalizeExternalString(String* string);
996 
997  // Allocates an uninitialized object. The memory is non-executable if the
998  // hardware and OS allow.
999  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1000  // failed.
1001  // Please note this function does not perform a garbage collection.
1002  MUST_USE_RESULT inline MaybeObject* AllocateRaw(int size_in_bytes,
1003  AllocationSpace space,
1004  AllocationSpace retry_space);
1005 
1006  // Initialize a filler object to keep the ability to iterate over the heap
1007  // when shortening objects.
1008  void CreateFillerObjectAt(Address addr, int size);
1009 
1010  // Makes a new native code object
1011  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1012  // failed. On success, the pointer to the Code object is stored in the
1013  // self_reference. This allows generated code to reference its own Code
1014  // object by containing this pointer.
1015  // Please note this function does not perform a garbage collection.
1016  MUST_USE_RESULT MaybeObject* CreateCode(const CodeDesc& desc,
1018  Handle<Object> self_reference,
1019  bool immovable = false);
1020 
1021  MUST_USE_RESULT MaybeObject* CopyCode(Code* code);
1022 
1023  // Copy the code and scope info part of the code object, but insert
1024  // the provided data as the relocation information.
1025  MUST_USE_RESULT MaybeObject* CopyCode(Code* code, Vector<byte> reloc_info);
1026 
1027  // Finds the symbol for string in the symbol table.
1028  // If not found, a new symbol is added to the table and returned.
1029  // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
1030  // failed.
1031  // Please note this function does not perform a garbage collection.
1035  MUST_USE_RESULT MaybeObject* LookupAsciiSymbol(const char* str) {
1036  return LookupSymbol(CStrVector(str));
1037  }
1038  MUST_USE_RESULT MaybeObject* LookupSymbol(String* str);
1040  int from,
1041  int length);
1042 
1043  bool LookupSymbolIfExists(String* str, String** symbol);
1044  bool LookupTwoCharsSymbolIfExists(String* str, String** symbol);
1045 
1046  // Compute the matching symbol map for a string if possible.
1047  // NULL is returned if string is in new space or not flattened.
1048  Map* SymbolMapForString(String* str);
1049 
1050  // Tries to flatten a string before compare operation.
1051  //
1052  // Returns a failure in case it was decided that flattening was
1053  // necessary and failed. Note, if flattening is not necessary the
1054  // string might stay non-flat even when not a failure is returned.
1055  //
1056  // Please note this function does not perform a garbage collection.
1057  MUST_USE_RESULT inline MaybeObject* PrepareForCompare(String* str);
1058 
1059  // Converts the given boolean condition to JavaScript boolean value.
1060  inline Object* ToBoolean(bool condition);
1061 
1062  // Code that should be run before and after each GC. Includes some
1063  // reporting/verification activities when compiled with DEBUG set.
1066 
1067  // Performs garbage collection operation.
1068  // Returns whether there is a chance that another major GC could
1069  // collect more garbage.
1070  bool CollectGarbage(AllocationSpace space,
1071  GarbageCollector collector,
1072  const char* gc_reason,
1073  const char* collector_reason);
1074 
1075  // Performs garbage collection operation.
1076  // Returns whether there is a chance that another major GC could
1077  // collect more garbage.
1078  inline bool CollectGarbage(AllocationSpace space,
1079  const char* gc_reason = NULL);
1080 
1081  static const int kNoGCFlags = 0;
1082  static const int kSweepPreciselyMask = 1;
1083  static const int kReduceMemoryFootprintMask = 2;
1084  static const int kAbortIncrementalMarkingMask = 4;
1085 
1086  // Making the heap iterable requires us to sweep precisely and abort any
1087  // incremental marking as well.
1088  static const int kMakeHeapIterableMask =
1090 
1091  // Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is
1092  // non-zero, then the slower precise sweeper is used, which leaves the heap
1093  // in a state where we can iterate over the heap visiting all objects.
1094  void CollectAllGarbage(int flags, const char* gc_reason = NULL);
1095 
1096  // Last hope GC, should try to squeeze as much as possible.
1097  void CollectAllAvailableGarbage(const char* gc_reason = NULL);
1098 
1099  // Check whether the heap is currently iterable.
1100  bool IsHeapIterable();
1101 
1102  // Ensure that we have swept all spaces in such a way that we can iterate
1103  // over all objects. May cause a GC.
1104  void EnsureHeapIsIterable();
1105 
1106  // Notify the heap that a context has been disposed.
1108  flush_monomorphic_ics_ = true;
1109  return ++contexts_disposed_;
1110  }
1111 
1112  // Utility to invoke the scavenger. This is needed in test code to
1113  // ensure correct callback for weak global handles.
1114  void PerformScavenge();
1115 
1117  scan_on_scavenge_pages_++;
1118  if (FLAG_gc_verbose) {
1119  PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
1120  }
1121  }
1122 
1124  scan_on_scavenge_pages_--;
1125  if (FLAG_gc_verbose) {
1126  PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
1127  }
1128  }
1129 
1130  PromotionQueue* promotion_queue() { return &promotion_queue_; }
1131 
1132 #ifdef DEBUG
1133  // Utility used with flag gc-greedy.
1134  void GarbageCollectionGreedyCheck();
1135 #endif
1136 
1137  void AddGCPrologueCallback(
1138  GCPrologueCallback callback, GCType gc_type_filter);
1140 
1141  void AddGCEpilogueCallback(
1142  GCEpilogueCallback callback, GCType gc_type_filter);
1144 
1146  ASSERT((callback == NULL) ^ (global_gc_prologue_callback_ == NULL));
1147  global_gc_prologue_callback_ = callback;
1148  }
1150  ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL));
1151  global_gc_epilogue_callback_ = callback;
1152  }
1153 
1154  // Heap root getters. We have versions with and without type::cast() here.
1155  // You can't use type::cast during GC because the assert fails.
1156  // TODO(1490): Try removing the unchecked accessors, now that GC marking does
1157  // not corrupt the map.
1158 #define ROOT_ACCESSOR(type, name, camel_name) \
1159  type* name() { \
1160  return type::cast(roots_[k##camel_name##RootIndex]); \
1161  } \
1162  type* raw_unchecked_##name() { \
1163  return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
1164  }
1166 #undef ROOT_ACCESSOR
1167 
1168 // Utility type maps
1169 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
1170  Map* name##_map() { \
1171  return Map::cast(roots_[k##Name##MapRootIndex]); \
1172  }
1174 #undef STRUCT_MAP_ACCESSOR
1175 
1176 #define SYMBOL_ACCESSOR(name, str) String* name() { \
1177  return String::cast(roots_[k##name##RootIndex]); \
1178  }
1180 #undef SYMBOL_ACCESSOR
1181 
1182  // The hidden_symbol is special because it is the empty string, but does
1183  // not match the empty string.
1184  String* hidden_symbol() { return hidden_symbol_; }
1185 
1187  native_contexts_list_ = object;
1188  }
1189  Object* native_contexts_list() { return native_contexts_list_; }
1190 
1191  // Number of mark-sweeps.
1192  unsigned int ms_count() { return ms_count_; }
1193 
1194  // Iterates over all roots in the heap.
1195  void IterateRoots(ObjectVisitor* v, VisitMode mode);
1196  // Iterates over all strong roots in the heap.
1197  void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
1198  // Iterates over all the other roots in the heap.
1199  void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
1200 
1201  // Iterate pointers to from semispace of new space found in memory interval
1202  // from start to end.
1204  Address end,
1205  ObjectSlotCallback callback);
1206 
1207  // Returns whether the object resides in new space.
1208  inline bool InNewSpace(Object* object);
1209  inline bool InNewSpace(Address addr);
1210  inline bool InNewSpacePage(Address addr);
1211  inline bool InFromSpace(Object* object);
1212  inline bool InToSpace(Object* object);
1213 
1214  // Checks whether an address/object in the heap (including auxiliary
1215  // area and unused area).
1216  bool Contains(Address addr);
1217  bool Contains(HeapObject* value);
1218 
1219  // Checks whether an address/object in a space.
1220  // Currently used by tests, serialization and heap verification only.
1221  bool InSpace(Address addr, AllocationSpace space);
1222  bool InSpace(HeapObject* value, AllocationSpace space);
1223 
1224  // Finds out which space an object should get promoted to based on its type.
1225  inline OldSpace* TargetSpace(HeapObject* object);
1227 
1228  // Sets the stub_cache_ (only used when expanding the dictionary).
1230  roots_[kCodeStubsRootIndex] = value;
1231  }
1232 
1233  // Support for computing object sizes for old objects during GCs. Returns
1234  // a function that is guaranteed to be safe for computing object sizes in
1235  // the current GC phase.
1237  return gc_safe_size_of_old_object_;
1238  }
1239 
1240  // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
1242  roots_[kNonMonomorphicCacheRootIndex] = value;
1243  }
1244 
1246  roots_[kEmptyScriptRootIndex] = script;
1247  }
1248 
1250  roots_[kStoreBufferTopRootIndex] = reinterpret_cast<Smi*>(top);
1251  }
1252 
1253  // Update the next script id.
1254  inline void SetLastScriptId(Object* last_script_id);
1255 
1256  // Generated code can embed this address to get access to the roots.
1257  Object** roots_array_start() { return roots_; }
1258 
1260  return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
1261  }
1262 
1263  // Get address of native contexts list for serialization support.
1265  return &native_contexts_list_;
1266  }
1267 
1268 #ifdef VERIFY_HEAP
1269  // Verify the heap is in its normal state before or after a GC.
1270  void Verify();
1271 #endif
1272 
1273 #ifdef DEBUG
1274  void Print();
1275  void PrintHandles();
1276 
1277  void OldPointerSpaceCheckStoreBuffer();
1278  void MapSpaceCheckStoreBuffer();
1279  void LargeObjectSpaceCheckStoreBuffer();
1280 
1281  // Report heap statistics.
1282  void ReportHeapStatistics(const char* title);
1283  void ReportCodeStatistics(const char* title);
1284 #endif
1285 
1286  // Zapping is needed for verify heap, and always done in debug builds.
1287  static inline bool ShouldZapGarbage() {
1288 #ifdef DEBUG
1289  return true;
1290 #else
1291 #ifdef VERIFY_HEAP
1292  return FLAG_verify_heap;
1293 #else
1294  return false;
1295 #endif
1296 #endif
1297  }
1298 
1299  // Fill in bogus values in from space
1300  void ZapFromSpace();
1301 
1302  // Print short heap statistics.
1303  void PrintShortHeapStatistics();
1304 
1305  // Makes a new symbol object
1306  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1307  // failed.
1308  // Please note this function does not perform a garbage collection.
1309  MUST_USE_RESULT MaybeObject* CreateSymbol(
1310  const char* str, int length, int hash);
1311  MUST_USE_RESULT MaybeObject* CreateSymbol(String* str);
1312 
1313  // Write barrier support for address[offset] = o.
1314  inline void RecordWrite(Address address, int offset);
1315 
1316  // Write barrier support for address[start : start + len[ = o.
1317  inline void RecordWrites(Address address, int start, int len);
1318 
1319  // Given an address occupied by a live code object, return that object.
1321 
1322  // Invoke Shrink on shrinkable spaces.
1323  void Shrink();
1324 
1326  inline HeapState gc_state() { return gc_state_; }
1327 
1328  inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
1329 
1330 #ifdef DEBUG
1331  bool IsAllocationAllowed() { return allocation_allowed_; }
1332  inline bool allow_allocation(bool enable);
1333 
1334  bool disallow_allocation_failure() {
1335  return disallow_allocation_failure_;
1336  }
1337 
1338  void TracePathToObjectFrom(Object* target, Object* root);
1339  void TracePathToObject(Object* target);
1340  void TracePathToGlobal();
1341 #endif
1342 
1343  // Callback function passed to Heap::Iterate etc. Copies an object if
1344  // necessary, the object might be promoted to an old space. The caller must
1345  // ensure the precondition that the object is (a) a heap object and (b) in
1346  // the heap's from space.
1347  static inline void ScavengePointer(HeapObject** p);
1348  static inline void ScavengeObject(HeapObject** p, HeapObject* object);
1349 
1350  // Commits from space if it is uncommitted.
1352 
1353  // Support for partial snapshots. After calling this we have a linear
1354  // space to write objects in each space.
1355  void ReserveSpace(int *sizes, Address* addresses);
1356 
1357  //
1358  // Support for the API.
1359  //
1360 
1361  bool CreateApiObjects();
1362 
1363  // Attempt to find the number in a small cache. If we finds it, return
1364  // the string representation of the number. Otherwise return undefined.
1366 
1367  // Update the cache with a new number-string pair.
1368  void SetNumberStringCache(Object* number, String* str);
1369 
1370  // Adjusts the amount of registered external memory.
1371  // Returns the adjusted value.
1372  inline intptr_t AdjustAmountOfExternalAllocatedMemory(
1373  intptr_t change_in_bytes);
1374 
1375  // Allocate uninitialized fixed array.
1376  MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length);
1377  MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length,
1378  PretenureFlag pretenure);
1379 
1380  inline intptr_t PromotedTotalSize() {
1381  return PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
1382  }
1383 
1384  // True if we have reached the allocation limit in the old generation that
1385  // should force the next GC (caused normally) to be a full one.
1387  return PromotedTotalSize() > old_gen_promotion_limit_;
1388  }
1389 
1390  inline intptr_t OldGenerationSpaceAvailable() {
1391  return old_gen_allocation_limit_ - PromotedTotalSize();
1392  }
1393 
1395  return max_old_generation_size_ - PromotedTotalSize();
1396  }
1397 
1398  static const intptr_t kMinimumPromotionLimit = 5 * Page::kPageSize;
1399  static const intptr_t kMinimumAllocationLimit =
1400  8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
1401 
1402  intptr_t OldGenPromotionLimit(intptr_t old_gen_size) {
1403  const int divisor = FLAG_stress_compaction ? 10 : 3;
1404  intptr_t limit =
1405  Max(old_gen_size + old_gen_size / divisor, kMinimumPromotionLimit);
1406  limit += new_space_.Capacity();
1407  limit *= old_gen_limit_factor_;
1408  intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2;
1409  return Min(limit, halfway_to_the_max);
1410  }
1411 
1412  intptr_t OldGenAllocationLimit(intptr_t old_gen_size) {
1413  const int divisor = FLAG_stress_compaction ? 8 : 2;
1414  intptr_t limit =
1415  Max(old_gen_size + old_gen_size / divisor, kMinimumAllocationLimit);
1416  limit += new_space_.Capacity();
1417  limit *= old_gen_limit_factor_;
1418  intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2;
1419  return Min(limit, halfway_to_the_max);
1420  }
1421 
1422  // Implements the corresponding V8 API function.
1423  bool IdleNotification(int hint);
1424 
1425  // Declare all the root indices.
1427 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1429 #undef ROOT_INDEX_DECLARATION
1430 
1431 #define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
1433 #undef SYMBOL_DECLARATION
1434 
1435  // Utility type maps
1436 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
1438 #undef DECLARE_STRUCT_MAP
1439 
1443  };
1444 
1445  STATIC_CHECK(kUndefinedValueRootIndex == Internals::kUndefinedValueRootIndex);
1446  STATIC_CHECK(kNullValueRootIndex == Internals::kNullValueRootIndex);
1447  STATIC_CHECK(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
1448  STATIC_CHECK(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
1449  STATIC_CHECK(kempty_symbolRootIndex == Internals::kEmptySymbolRootIndex);
1450 
1451  MUST_USE_RESULT MaybeObject* NumberToString(
1452  Object* number, bool check_number_string_cache = true);
1453  MUST_USE_RESULT MaybeObject* Uint32ToString(
1454  uint32_t value, bool check_number_string_cache = true);
1455 
1458  ExternalArrayType array_type);
1459 
1460  void RecordStats(HeapStats* stats, bool take_snapshot = false);
1461 
1462  // Copy block of memory from src to dst. Size of block should be aligned
1463  // by pointer size.
1464  static inline void CopyBlock(Address dst, Address src, int byte_size);
1465 
1466  // Optimized version of memmove for blocks with pointer size aligned sizes and
1467  // pointer size aligned addresses.
1468  static inline void MoveBlock(Address dst, Address src, int byte_size);
1469 
1470  // Check new space expansion criteria and expand semispaces if it was hit.
1472 
1473  inline void IncrementYoungSurvivorsCounter(int survived) {
1474  ASSERT(survived >= 0);
1475  young_survivors_after_last_gc_ = survived;
1476  survived_since_last_expansion_ += survived;
1477  }
1478 
1479  inline bool NextGCIsLikelyToBeFull() {
1480  if (FLAG_gc_global) return true;
1481 
1482  if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
1483 
1484  intptr_t total_promoted = PromotedTotalSize();
1485 
1486  intptr_t adjusted_promotion_limit =
1487  old_gen_promotion_limit_ - new_space_.Capacity();
1488 
1489  if (total_promoted >= adjusted_promotion_limit) return true;
1490 
1491  intptr_t adjusted_allocation_limit =
1492  old_gen_allocation_limit_ - new_space_.Capacity() / 5;
1493 
1494  if (PromotedSpaceSizeOfObjects() >= adjusted_allocation_limit) return true;
1495 
1496  return false;
1497  }
1498 
1499 
1501  ExternalStringTableUpdaterCallback updater_func);
1502 
1504  ExternalStringTableUpdaterCallback updater_func);
1505 
1506  void ProcessWeakReferences(WeakObjectRetainer* retainer);
1507 
1509 
1510  // Helper function that governs the promotion policy from new space to
1511  // old. If the object's old address lies below the new space's age
1512  // mark or if we've already filled the bottom 1/16th of the to space,
1513  // we try to promote this object.
1514  inline bool ShouldBePromoted(Address old_address, int object_size);
1515 
1516  int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; }
1517 
1519 
1520  void ClearNormalizedMapCaches();
1521 
1522  GCTracer* tracer() { return tracer_; }
1523 
1524  // Returns the size of objects residing in non new spaces.
1525  intptr_t PromotedSpaceSizeOfObjects();
1526 
1527  double total_regexp_code_generated() { return total_regexp_code_generated_; }
1529  total_regexp_code_generated_ += size;
1530  }
1531 
1532  // Returns maximum GC pause.
1533  int get_max_gc_pause() { return max_gc_pause_; }
1534 
1535  // Returns maximum size of objects alive after GC.
1536  intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1537 
1538  // Returns minimal interval between two subsequent collections.
1539  int get_min_in_mutator() { return min_in_mutator_; }
1540 
1542  return &mark_compact_collector_;
1543  }
1544 
1546  return &store_buffer_;
1547  }
1548 
1550  return &marking_;
1551  }
1552 
1554  return &incremental_marking_;
1555  }
1556 
1558  return old_data_space()->IsSweepingComplete() &&
1560  }
1561 
1562  bool AdvanceSweepers(int step_size) {
1563  bool sweeping_complete = old_data_space()->AdvanceSweeper(step_size);
1564  sweeping_complete &= old_pointer_space()->AdvanceSweeper(step_size);
1565  return sweeping_complete;
1566  }
1567 
1569  return &external_string_table_;
1570  }
1571 
1572  // Returns the current sweep generation.
1574  return sweep_generation_;
1575  }
1576 
1577  inline Isolate* isolate();
1578 
1580  if (global_gc_prologue_callback_ != NULL) global_gc_prologue_callback_();
1581  }
1582 
1584  if (global_gc_epilogue_callback_ != NULL) global_gc_epilogue_callback_();
1585  }
1586 
1588 
1589  inline void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
1590  scavenging_visitors_table_.GetVisitor(map)(map, slot, obj);
1591  }
1592 
1593  void QueueMemoryChunkForFree(MemoryChunk* chunk);
1594  void FreeQueuedChunks();
1595 
1596  // Completely clear the Instanceof cache (to stop it keeping objects alive
1597  // around a GC).
1598  inline void CompletelyClearInstanceofCache();
1599 
1600  // The roots that have an index less than this are always in old space.
1601  static const int kOldSpaceRoots = 0x20;
1602 
1603  uint32_t HashSeed() {
1604  uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
1605  ASSERT(FLAG_randomize_hashes || seed == 0);
1606  return seed;
1607  }
1608 
1609  void SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
1610  ASSERT(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
1611  set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
1612  }
1613 
1614  void SetConstructStubDeoptPCOffset(int pc_offset) {
1615  ASSERT(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
1616  set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1617  }
1618 
1619  void SetGetterStubDeoptPCOffset(int pc_offset) {
1620  ASSERT(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
1621  set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1622  }
1623 
1624  void SetSetterStubDeoptPCOffset(int pc_offset) {
1625  ASSERT(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
1626  set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1627  }
1628 
1629  // For post mortem debugging.
1630  void RememberUnmappedPage(Address page, bool compacted);
1631 
1632  // Global inline caching age: it is incremented on some GCs after context
1633  // disposal. We use it to flush inline caches.
1635  return global_ic_age_;
1636  }
1637 
1639  global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
1640  }
1641 
1642  bool flush_monomorphic_ics() { return flush_monomorphic_ics_; }
1643 
1645  return amount_of_external_allocated_memory_;
1646  }
1647 
1648  // ObjectStats are kept in two arrays, counts and sizes. Related stats are
1649  // stored in a contiguous linear buffer. Stats groups are stored one after
1650  // another.
1651  enum {
1657  };
1658 
1659  void RecordObjectStats(InstanceType type, int sub_type, size_t size) {
1660  ASSERT(type <= LAST_TYPE);
1661  if (sub_type < 0) {
1662  object_counts_[type]++;
1663  object_sizes_[type] += size;
1664  } else {
1665  if (type == CODE_TYPE) {
1666  ASSERT(sub_type <= Code::LAST_CODE_KIND);
1667  object_counts_[FIRST_CODE_KIND_SUB_TYPE + sub_type]++;
1668  object_sizes_[FIRST_CODE_KIND_SUB_TYPE + sub_type] += size;
1669  } else if (type == FIXED_ARRAY_TYPE) {
1670  ASSERT(sub_type <= LAST_FIXED_ARRAY_SUB_TYPE);
1671  object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + sub_type]++;
1672  object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + sub_type] += size;
1673  }
1674  }
1675  }
1676 
1677  void CheckpointObjectStats();
1678 
1679  // We don't use a ScopedLock here since we want to lock the heap
1680  // only when FLAG_parallel_recompilation is true.
1682  public:
1683  explicit RelocationLock(Heap* heap) : heap_(heap) {
1684  if (FLAG_parallel_recompilation) {
1685  heap_->relocation_mutex_->Lock();
1686  }
1687  }
1689  if (FLAG_parallel_recompilation) {
1690  heap_->relocation_mutex_->Unlock();
1691  }
1692  }
1693 
1694  private:
1695  Heap* heap_;
1696  };
1697 
1698  private:
1699  Heap();
1700 
1701  // This can be calculated directly from a pointer to the heap; however, it is
1702  // more expedient to get at the isolate directly from within Heap methods.
1703  Isolate* isolate_;
1704 
1705  Object* roots_[kRootListLength];
1706 
1707  intptr_t code_range_size_;
1708  int reserved_semispace_size_;
1709  int max_semispace_size_;
1710  int initial_semispace_size_;
1711  intptr_t max_old_generation_size_;
1712  intptr_t max_executable_size_;
1713 
1714  // For keeping track of how much data has survived
1715  // scavenge since last new space expansion.
1716  int survived_since_last_expansion_;
1717 
1718  // For keeping track on when to flush RegExp code.
1719  int sweep_generation_;
1720 
1721  int always_allocate_scope_depth_;
1722  int linear_allocation_scope_depth_;
1723 
1724  // For keeping track of context disposals.
1725  int contexts_disposed_;
1726 
1727  int global_ic_age_;
1728 
1729  bool flush_monomorphic_ics_;
1730 
1731  int scan_on_scavenge_pages_;
1732 
1733 #if defined(V8_TARGET_ARCH_X64)
1734  static const int kMaxObjectSizeInNewSpace = 1024*KB;
1735 #else
1736  static const int kMaxObjectSizeInNewSpace = 512*KB;
1737 #endif
1738 
1739  NewSpace new_space_;
1740  OldSpace* old_pointer_space_;
1741  OldSpace* old_data_space_;
1742  OldSpace* code_space_;
1743  MapSpace* map_space_;
1744  CellSpace* cell_space_;
1745  LargeObjectSpace* lo_space_;
1746  HeapState gc_state_;
1747  int gc_post_processing_depth_;
1748 
1749  // Returns the amount of external memory registered since last global gc.
1750  intptr_t PromotedExternalMemorySize();
1751 
1752  unsigned int ms_count_; // how many mark-sweep collections happened
1753  unsigned int gc_count_; // how many gc happened
1754 
1755  // For post mortem debugging.
1756  static const int kRememberedUnmappedPages = 128;
1757  int remembered_unmapped_pages_index_;
1758  Address remembered_unmapped_pages_[kRememberedUnmappedPages];
1759 
1760  // Total length of the strings we failed to flatten since the last GC.
1761  int unflattened_strings_length_;
1762 
1763 #define ROOT_ACCESSOR(type, name, camel_name) \
1764  inline void set_##name(type* value) { \
1765  /* The deserializer makes use of the fact that these common roots are */ \
1766  /* never in new space and never on a page that is being compacted. */ \
1767  ASSERT(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \
1768  roots_[k##camel_name##RootIndex] = value; \
1769  }
1771 #undef ROOT_ACCESSOR
1772 
1773 #ifdef DEBUG
1774  bool allocation_allowed_;
1775 
1776  // If the --gc-interval flag is set to a positive value, this
1777  // variable holds the value indicating the number of allocations
1778  // remain until the next failure and garbage collection.
1779  int allocation_timeout_;
1780 
1781  // Do we expect to be able to handle allocation failure at this
1782  // time?
1783  bool disallow_allocation_failure_;
1784 
1785  HeapDebugUtils* debug_utils_;
1786 #endif // DEBUG
1787 
1788  // Indicates that the new space should be kept small due to high promotion
1789  // rates caused by the mutator allocating a lot of long-lived objects.
1790  bool new_space_high_promotion_mode_active_;
1791 
1792  // Limit that triggers a global GC on the next (normally caused) GC. This
1793  // is checked when we have already decided to do a GC to help determine
1794  // which collector to invoke.
1795  intptr_t old_gen_promotion_limit_;
1796 
1797  // Limit that triggers a global GC as soon as is reasonable. This is
1798  // checked before expanding a paged space in the old generation and on
1799  // every allocation in large object space.
1800  intptr_t old_gen_allocation_limit_;
1801 
1802  // Sometimes the heuristics dictate that those limits are increased. This
1803  // variable records that fact.
1804  int old_gen_limit_factor_;
1805 
1806  // Used to adjust the limits that control the timing of the next GC.
1807  intptr_t size_of_old_gen_at_last_old_space_gc_;
1808 
1809  // Limit on the amount of externally allocated memory allowed
1810  // between global GCs. If reached a global GC is forced.
1811  intptr_t external_allocation_limit_;
1812 
1813  // The amount of external memory registered through the API kept alive
1814  // by global handles
1815  intptr_t amount_of_external_allocated_memory_;
1816 
1817  // Caches the amount of external memory registered at the last global gc.
1818  intptr_t amount_of_external_allocated_memory_at_last_global_gc_;
1819 
1820  // Indicates that an allocation has failed in the old generation since the
1821  // last GC.
1822  int old_gen_exhausted_;
1823 
1824  Object* native_contexts_list_;
1825 
1826  StoreBufferRebuilder store_buffer_rebuilder_;
1827 
1828  struct StringTypeTable {
1829  InstanceType type;
1830  int size;
1831  RootListIndex index;
1832  };
1833 
1834  struct ConstantSymbolTable {
1835  const char* contents;
1836  RootListIndex index;
1837  };
1838 
1839  struct StructTable {
1840  InstanceType type;
1841  int size;
1842  RootListIndex index;
1843  };
1844 
1845  static const StringTypeTable string_type_table[];
1846  static const ConstantSymbolTable constant_symbol_table[];
1847  static const StructTable struct_table[];
1848 
1849  // The special hidden symbol which is an empty string, but does not match
1850  // any string when looked up in properties.
1851  String* hidden_symbol_;
1852 
1853  // GC callback function, called before and after mark-compact GC.
1854  // Allocations in the callback function are disallowed.
1855  struct GCPrologueCallbackPair {
1856  GCPrologueCallbackPair(GCPrologueCallback callback, GCType gc_type)
1857  : callback(callback), gc_type(gc_type) {
1858  }
1859  bool operator==(const GCPrologueCallbackPair& pair) const {
1860  return pair.callback == callback;
1861  }
1862  GCPrologueCallback callback;
1863  GCType gc_type;
1864  };
1865  List<GCPrologueCallbackPair> gc_prologue_callbacks_;
1866 
1867  struct GCEpilogueCallbackPair {
1868  GCEpilogueCallbackPair(GCEpilogueCallback callback, GCType gc_type)
1869  : callback(callback), gc_type(gc_type) {
1870  }
1871  bool operator==(const GCEpilogueCallbackPair& pair) const {
1872  return pair.callback == callback;
1873  }
1874  GCEpilogueCallback callback;
1875  GCType gc_type;
1876  };
1877  List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
1878 
1879  GCCallback global_gc_prologue_callback_;
1880  GCCallback global_gc_epilogue_callback_;
1881 
1882  // Support for computing object sizes during GC.
1883  HeapObjectCallback gc_safe_size_of_old_object_;
1884  static int GcSafeSizeOfOldObject(HeapObject* object);
1885 
1886  // Update the GC state. Called from the mark-compact collector.
1887  void MarkMapPointersAsEncoded(bool encoded) {
1888  ASSERT(!encoded);
1889  gc_safe_size_of_old_object_ = &GcSafeSizeOfOldObject;
1890  }
1891 
1892  // Checks whether a global GC is necessary
1893  GarbageCollector SelectGarbageCollector(AllocationSpace space,
1894  const char** reason);
1895 
1896  // Performs garbage collection
1897  // Returns whether there is a chance another major GC could
1898  // collect more garbage.
1899  bool PerformGarbageCollection(GarbageCollector collector,
1900  GCTracer* tracer);
1901 
1902 
1903  inline void UpdateOldSpaceLimits();
1904 
1905  // Allocate an uninitialized object in map space. The behavior is identical
1906  // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
1907  // have to test the allocation space argument and (b) can reduce code size
1908  // (since both AllocateRaw and AllocateRawMap are inlined).
1909  MUST_USE_RESULT inline MaybeObject* AllocateRawMap();
1910 
1911  // Allocate an uninitialized object in the global property cell space.
1912  MUST_USE_RESULT inline MaybeObject* AllocateRawCell();
1913 
1914  // Initializes a JSObject based on its map.
1915  void InitializeJSObjectFromMap(JSObject* obj,
1916  FixedArray* properties,
1917  Map* map);
1918 
1919  bool CreateInitialMaps();
1920  bool CreateInitialObjects();
1921 
1922  // These five Create*EntryStub functions are here and forced to not be inlined
1923  // because of a gcc-4.4 bug that assigns wrong vtable entries.
1924  NO_INLINE(void CreateJSEntryStub());
1925  NO_INLINE(void CreateJSConstructEntryStub());
1926 
1927  void CreateFixedStubs();
1928 
1929  MaybeObject* CreateOddball(const char* to_string,
1930  Object* to_number,
1931  byte kind);
1932 
1933  // Allocate a JSArray with no elements
1934  MUST_USE_RESULT MaybeObject* AllocateJSArray(
1935  ElementsKind elements_kind,
1936  PretenureFlag pretenure = NOT_TENURED);
1937 
1938  // Allocate empty fixed array.
1939  MUST_USE_RESULT MaybeObject* AllocateEmptyFixedArray();
1940 
1941  // Allocate empty fixed double array.
1942  MUST_USE_RESULT MaybeObject* AllocateEmptyFixedDoubleArray();
1943 
1944  // Performs a minor collection in new generation.
1945  void Scavenge();
1946 
1947  static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
1948  Heap* heap,
1949  Object** pointer);
1950 
1951  Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
1952  static void ScavengeStoreBufferCallback(Heap* heap,
1953  MemoryChunk* page,
1954  StoreBufferEvent event);
1955 
1956  // Performs a major collection in the whole heap.
1957  void MarkCompact(GCTracer* tracer);
1958 
1959  // Code to be run before and after mark-compact.
1960  void MarkCompactPrologue();
1961 
1962  // Record statistics before and after garbage collection.
1963  void ReportStatisticsBeforeGC();
1964  void ReportStatisticsAfterGC();
1965 
1966  // Slow part of scavenge object.
1967  static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
1968 
1969  // Initializes a function with a shared part and prototype.
1970  // Note: this code was factored out of AllocateFunction such that
1971  // other parts of the VM could use it. Specifically, a function that creates
1972  // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
1973  // Please note this does not perform a garbage collection.
1974  inline void InitializeFunction(
1975  JSFunction* function,
1976  SharedFunctionInfo* shared,
1977  Object* prototype);
1978 
1979  // Total RegExp code ever generated
1980  double total_regexp_code_generated_;
1981 
1982  GCTracer* tracer_;
1983 
1984 
1985  // Allocates a small number to string cache.
1986  MUST_USE_RESULT MaybeObject* AllocateInitialNumberStringCache();
1987  // Creates and installs the full-sized number string cache.
1988  void AllocateFullSizeNumberStringCache();
1989  // Get the length of the number to string cache based on the max semispace
1990  // size.
1991  int FullSizeNumberStringCacheLength();
1992  // Flush the number to string cache.
1993  void FlushNumberStringCache();
1994 
1995  void UpdateSurvivalRateTrend(int start_new_space_size);
1996 
1997  enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING };
1998 
1999  static const int kYoungSurvivalRateHighThreshold = 90;
2000  static const int kYoungSurvivalRateLowThreshold = 10;
2001  static const int kYoungSurvivalRateAllowedDeviation = 15;
2002 
2003  int young_survivors_after_last_gc_;
2004  int high_survival_rate_period_length_;
2005  int low_survival_rate_period_length_;
2006  double survival_rate_;
2007  SurvivalRateTrend previous_survival_rate_trend_;
2008  SurvivalRateTrend survival_rate_trend_;
2009 
2010  void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) {
2011  ASSERT(survival_rate_trend != FLUCTUATING);
2012  previous_survival_rate_trend_ = survival_rate_trend_;
2013  survival_rate_trend_ = survival_rate_trend;
2014  }
2015 
2016  SurvivalRateTrend survival_rate_trend() {
2017  if (survival_rate_trend_ == STABLE) {
2018  return STABLE;
2019  } else if (previous_survival_rate_trend_ == STABLE) {
2020  return survival_rate_trend_;
2021  } else if (survival_rate_trend_ != previous_survival_rate_trend_) {
2022  return FLUCTUATING;
2023  } else {
2024  return survival_rate_trend_;
2025  }
2026  }
2027 
2028  bool IsStableOrIncreasingSurvivalTrend() {
2029  switch (survival_rate_trend()) {
2030  case STABLE:
2031  case INCREASING:
2032  return true;
2033  default:
2034  return false;
2035  }
2036  }
2037 
2038  bool IsStableOrDecreasingSurvivalTrend() {
2039  switch (survival_rate_trend()) {
2040  case STABLE:
2041  case DECREASING:
2042  return true;
2043  default:
2044  return false;
2045  }
2046  }
2047 
2048  bool IsIncreasingSurvivalTrend() {
2049  return survival_rate_trend() == INCREASING;
2050  }
2051 
2052  bool IsHighSurvivalRate() {
2053  return high_survival_rate_period_length_ > 0;
2054  }
2055 
2056  bool IsLowSurvivalRate() {
2057  return low_survival_rate_period_length_ > 0;
2058  }
2059 
2060  void SelectScavengingVisitorsTable();
2061 
2062  void StartIdleRound() {
2063  mark_sweeps_since_idle_round_started_ = 0;
2064  ms_count_at_last_idle_notification_ = ms_count_;
2065  }
2066 
2067  void FinishIdleRound() {
2068  mark_sweeps_since_idle_round_started_ = kMaxMarkSweepsInIdleRound;
2069  scavenges_since_last_idle_round_ = 0;
2070  }
2071 
2072  bool EnoughGarbageSinceLastIdleRound() {
2073  return (scavenges_since_last_idle_round_ >= kIdleScavengeThreshold);
2074  }
2075 
2076  // Estimates how many milliseconds a Mark-Sweep would take to complete.
2077  // In idle notification handler we assume that this function will return:
2078  // - a number less than 10 for small heaps, which are less than 8Mb.
2079  // - a number greater than 10 for large heaps, which are greater than 32Mb.
2080  int TimeMarkSweepWouldTakeInMs() {
2081  // Rough estimate of how many megabytes of heap can be processed in 1 ms.
2082  static const int kMbPerMs = 2;
2083 
2084  int heap_size_mb = static_cast<int>(SizeOfObjects() / MB);
2085  return heap_size_mb / kMbPerMs;
2086  }
2087 
2088  // Returns true if no more GC work is left.
2089  bool IdleGlobalGC();
2090 
2091  void AdvanceIdleIncrementalMarking(intptr_t step_size);
2092 
2093  void ClearObjectStats(bool clear_last_time_stats = false);
2094 
2095  static const int kInitialSymbolTableSize = 2048;
2096  static const int kInitialEvalCacheSize = 64;
2097  static const int kInitialNumberStringCacheSize = 256;
2098 
2099  // Object counts and used memory by InstanceType
2100  size_t object_counts_[OBJECT_STATS_COUNT];
2101  size_t object_counts_last_time_[OBJECT_STATS_COUNT];
2102  size_t object_sizes_[OBJECT_STATS_COUNT];
2103  size_t object_sizes_last_time_[OBJECT_STATS_COUNT];
2104 
2105  // Maximum GC pause.
2106  int max_gc_pause_;
2107 
2108  // Total time spent in GC.
2109  int total_gc_time_ms_;
2110 
2111  // Maximum size of objects alive after GC.
2112  intptr_t max_alive_after_gc_;
2113 
2114  // Minimal interval between two subsequent collections.
2115  int min_in_mutator_;
2116 
2117  // Size of objects alive after last GC.
2118  intptr_t alive_after_last_gc_;
2119 
2120  double last_gc_end_timestamp_;
2121 
2122  MarkCompactCollector mark_compact_collector_;
2123 
2124  StoreBuffer store_buffer_;
2125 
2126  Marking marking_;
2127 
2128  IncrementalMarking incremental_marking_;
2129 
2130  int number_idle_notifications_;
2131  unsigned int last_idle_notification_gc_count_;
2132  bool last_idle_notification_gc_count_init_;
2133 
2134  int mark_sweeps_since_idle_round_started_;
2135  int ms_count_at_last_idle_notification_;
2136  unsigned int gc_count_at_last_idle_gc_;
2137  int scavenges_since_last_idle_round_;
2138 
2139  static const int kMaxMarkSweepsInIdleRound = 7;
2140  static const int kIdleScavengeThreshold = 5;
2141 
2142  // Shared state read by the scavenge collector and set by ScavengeObject.
2143  PromotionQueue promotion_queue_;
2144 
2145  // Flag is set when the heap has been configured. The heap can be repeatedly
2146  // configured through the API until it is set up.
2147  bool configured_;
2148 
2149  ExternalStringTable external_string_table_;
2150 
2151  VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
2152 
2153  MemoryChunk* chunks_queued_for_free_;
2154 
2155  Mutex* relocation_mutex_;
2156 
2157  friend class Factory;
2158  friend class GCTracer;
2160  friend class AlwaysAllocateScope;
2161  friend class Page;
2162  friend class Isolate;
2163  friend class MarkCompactCollector;
2165  friend class MapCompact;
2166 
2168 };
2169 
2170 
2171 class HeapStats {
2172  public:
2173  static const int kStartMarker = 0xDECADE00;
2174  static const int kEndMarker = 0xDECADE01;
2175 
2176  int* start_marker; // 0
2177  int* new_space_size; // 1
2179  intptr_t* old_pointer_space_size; // 3
2181  intptr_t* old_data_space_size; // 5
2182  intptr_t* old_data_space_capacity; // 6
2183  intptr_t* code_space_size; // 7
2184  intptr_t* code_space_capacity; // 8
2185  intptr_t* map_space_size; // 9
2186  intptr_t* map_space_capacity; // 10
2187  intptr_t* cell_space_size; // 11
2188  intptr_t* cell_space_capacity; // 12
2189  intptr_t* lo_space_size; // 13
2195  intptr_t* memory_allocator_size; // 19
2196  intptr_t* memory_allocator_capacity; // 20
2197  int* objects_per_type; // 21
2198  int* size_per_type; // 22
2199  int* os_error; // 23
2200  int* end_marker; // 24
2201 };
2202 
2203 
2205  public:
2206  inline DisallowAllocationFailure();
2207  inline ~DisallowAllocationFailure();
2208 
2209 #ifdef DEBUG
2210  private:
2211  bool old_state_;
2212 #endif
2213 };
2214 
2215 
2217  public:
2218  inline AlwaysAllocateScope();
2219  inline ~AlwaysAllocateScope();
2220 
2221  private:
2222  // Implicitly disable artificial allocation failures.
2223  DisallowAllocationFailure disallow_allocation_failure_;
2224 };
2225 
2226 
2227 // Visitor class to verify interior pointers in spaces that do not contain
2228 // or care about intergenerational references. All heap object pointers have to
2229 // point into the heap to a location that has a map pointer at its first word.
2230 // Caveat: Heap::Contains is an approximation because it can return true for
2231 // objects in a heap space but above the allocation pointer.
2232 class VerifyPointersVisitor: public ObjectVisitor {
2233  public:
2234  inline void VisitPointers(Object** start, Object** end);
2235 };
2236 
2237 
2238 // Space iterator for iterating over all spaces of the heap.
2239 // Returns each space in turn, and null when it is done.
2240 class AllSpaces BASE_EMBEDDED {
2241  public:
2242  Space* next();
2243  AllSpaces() { counter_ = FIRST_SPACE; }
2244  private:
2245  int counter_;
2246 };
2247 
2248 
2249 // Space iterator for iterating over all old spaces of the heap: Old pointer
2250 // space, old data space and code space.
2251 // Returns each space in turn, and null when it is done.
2252 class OldSpaces BASE_EMBEDDED {
2253  public:
2254  OldSpace* next();
2255  OldSpaces() { counter_ = OLD_POINTER_SPACE; }
2256  private:
2257  int counter_;
2258 };
2259 
2260 
2261 // Space iterator for iterating over all the paged spaces of the heap:
2262 // Map space, old pointer space, old data space, code space and cell space.
2263 // Returns each space in turn, and null when it is done.
2264 class PagedSpaces BASE_EMBEDDED {
2265  public:
2266  PagedSpace* next();
2267  PagedSpaces() { counter_ = OLD_POINTER_SPACE; }
2268  private:
2269  int counter_;
2270 };
2271 
2272 
2273 // Space iterator for iterating over all spaces of the heap.
2274 // For each space an object iterator is provided. The deallocation of the
2275 // returned object iterators is handled by the space iterator.
2276 class SpaceIterator : public Malloced {
2277  public:
2278  SpaceIterator();
2279  explicit SpaceIterator(HeapObjectCallback size_func);
2280  virtual ~SpaceIterator();
2281 
2282  bool has_next();
2283  ObjectIterator* next();
2284 
2285  private:
2286  ObjectIterator* CreateIterator();
2287 
2288  int current_space_; // from enum AllocationSpace.
2289  ObjectIterator* iterator_; // object iterator for the current space.
2290  HeapObjectCallback size_func_;
2291 };
2292 
2293 
2294 // A HeapIterator provides iteration over the whole heap. It
2295 // aggregates the specific iterators for the different spaces as
2296 // these can only iterate over one space only.
2297 //
2298 // HeapIterator can skip free list nodes (that is, de-allocated heap
2299 // objects that still remain in the heap). As implementation of free
2300 // nodes filtering uses GC marks, it can't be used during MS/MC GC
2301 // phases. Also, it is forbidden to interrupt iteration in this mode,
2302 // as this will leave heap objects marked (and thus, unusable).
2303 class HeapObjectsFilter;
2304 
2305 class HeapIterator BASE_EMBEDDED {
2306  public:
2309  kFilterUnreachable
2310  };
2311 
2312  HeapIterator();
2313  explicit HeapIterator(HeapObjectsFiltering filtering);
2314  ~HeapIterator();
2315 
2316  HeapObject* next();
2317  void reset();
2318 
2319  private:
2320  // Perform the initialization.
2321  void Init();
2322  // Perform all necessary shutdown (destruction) work.
2323  void Shutdown();
2324  HeapObject* NextObject();
2325 
2326  HeapObjectsFiltering filtering_;
2327  HeapObjectsFilter* filter_;
2328  // Space iterator for iterating all the spaces.
2329  SpaceIterator* space_iterator_;
2330  // Object iterator for the space currently being iterated.
2331  ObjectIterator* object_iterator_;
2332 };
2333 
2334 
2335 // Cache for mapping (map, property name) into field offset.
2336 // Cleared at startup and prior to mark sweep collection.
2338  public:
2339  // Lookup field offset for (map, name). If absent, -1 is returned.
2340  int Lookup(Map* map, String* name);
2341 
2342  // Update an element in the cache.
2343  void Update(Map* map, String* name, int field_offset);
2344 
2345  // Clear the cache.
2346  void Clear();
2347 
2348  static const int kLength = 256;
2349  static const int kCapacityMask = kLength - 1;
2350  static const int kMapHashShift = 5;
2351  static const int kHashMask = -4; // Zero the last two bits.
2352  static const int kEntriesPerBucket = 4;
2353  static const int kNotFound = -1;
2354 
2355  // kEntriesPerBucket should be a power of 2.
2358 
2359  private:
2360  KeyedLookupCache() {
2361  for (int i = 0; i < kLength; ++i) {
2362  keys_[i].map = NULL;
2363  keys_[i].name = NULL;
2364  field_offsets_[i] = kNotFound;
2365  }
2366  }
2367 
2368  static inline int Hash(Map* map, String* name);
2369 
2370  // Get the address of the keys and field_offsets arrays. Used in
2371  // generated code to perform cache lookups.
2372  Address keys_address() {
2373  return reinterpret_cast<Address>(&keys_);
2374  }
2375 
2376  Address field_offsets_address() {
2377  return reinterpret_cast<Address>(&field_offsets_);
2378  }
2379 
2380  struct Key {
2381  Map* map;
2382  String* name;
2383  };
2384 
2385  Key keys_[kLength];
2386  int field_offsets_[kLength];
2387 
2388  friend class ExternalReference;
2389  friend class Isolate;
2391 };
2392 
2393 
2394 // Cache for mapping (map, property name) into descriptor index.
2395 // The cache contains both positive and negative results.
2396 // Descriptor index equals kNotFound means the property is absent.
2397 // Cleared at startup and prior to any gc.
2399  public:
2400  // Lookup descriptor index for (map, name).
2401  // If absent, kAbsent is returned.
2402  int Lookup(Map* source, String* name) {
2403  if (!StringShape(name).IsSymbol()) return kAbsent;
2404  int index = Hash(source, name);
2405  Key& key = keys_[index];
2406  if ((key.source == source) && (key.name == name)) return results_[index];
2407  return kAbsent;
2408  }
2409 
2410  // Update an element in the cache.
2411  void Update(Map* source, String* name, int result) {
2412  ASSERT(result != kAbsent);
2413  if (StringShape(name).IsSymbol()) {
2414  int index = Hash(source, name);
2415  Key& key = keys_[index];
2416  key.source = source;
2417  key.name = name;
2418  results_[index] = result;
2419  }
2420  }
2421 
2422  // Clear the cache.
2423  void Clear();
2424 
2425  static const int kAbsent = -2;
2426 
2427  private:
2429  for (int i = 0; i < kLength; ++i) {
2430  keys_[i].source = NULL;
2431  keys_[i].name = NULL;
2432  results_[i] = kAbsent;
2433  }
2434  }
2435 
2436  static int Hash(Object* source, String* name) {
2437  // Uses only lower 32 bits if pointers are larger.
2438  uint32_t source_hash =
2439  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source))
2440  >> kPointerSizeLog2;
2441  uint32_t name_hash =
2442  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name))
2443  >> kPointerSizeLog2;
2444  return (source_hash ^ name_hash) % kLength;
2445  }
2446 
2447  static const int kLength = 64;
2448  struct Key {
2449  Map* source;
2450  String* name;
2451  };
2452 
2453  Key keys_[kLength];
2454  int results_[kLength];
2455 
2456  friend class Isolate;
2458 };
2459 
2460 
2461 // A helper class to document/test C++ scopes where we do not
2462 // expect a GC. Usage:
2463 //
2464 // /* Allocation not allowed: we cannot handle a GC in this scope. */
2465 // { AssertNoAllocation nogc;
2466 // ...
2467 // }
2469  public:
2470  inline AssertNoAllocation();
2471  inline ~AssertNoAllocation();
2472 
2473 #ifdef DEBUG
2474  private:
2475  bool old_state_;
2476  bool active_;
2477 #endif
2478 };
2479 
2480 
2482  public:
2483  inline DisableAssertNoAllocation();
2484  inline ~DisableAssertNoAllocation();
2485 
2486 #ifdef DEBUG
2487  private:
2488  bool old_state_;
2489  bool active_;
2490 #endif
2491 };
2492 
2493 // GCTracer collects and prints ONE line after each garbage collector
2494 // invocation IFF --trace_gc is used.
2495 
2496 class GCTracer BASE_EMBEDDED {
2497  public:
2498  class Scope BASE_EMBEDDED {
2499  public:
2500  enum ScopeId {
2501  EXTERNAL,
2502  MC_MARK,
2503  MC_SWEEP,
2504  MC_SWEEP_NEWSPACE,
2505  MC_EVACUATE_PAGES,
2506  MC_UPDATE_NEW_TO_NEW_POINTERS,
2507  MC_UPDATE_ROOT_TO_NEW_POINTERS,
2508  MC_UPDATE_OLD_TO_NEW_POINTERS,
2509  MC_UPDATE_POINTERS_TO_EVACUATED,
2510  MC_UPDATE_POINTERS_BETWEEN_EVACUATED,
2511  MC_UPDATE_MISC_POINTERS,
2512  MC_FLUSH_CODE,
2513  kNumberOfScopes
2514  };
2515 
2516  Scope(GCTracer* tracer, ScopeId scope)
2517  : tracer_(tracer),
2518  scope_(scope) {
2519  start_time_ = OS::TimeCurrentMillis();
2520  }
2521 
2522  ~Scope() {
2523  ASSERT(scope_ < kNumberOfScopes); // scope_ is unsigned.
2524  tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_;
2525  }
2526 
2527  private:
2528  GCTracer* tracer_;
2529  ScopeId scope_;
2530  double start_time_;
2531  };
2532 
2533  explicit GCTracer(Heap* heap,
2534  const char* gc_reason,
2535  const char* collector_reason);
2536  ~GCTracer();
2537 
2538  // Sets the collector.
2539  void set_collector(GarbageCollector collector) { collector_ = collector; }
2540 
2541  // Sets the GC count.
2542  void set_gc_count(unsigned int count) { gc_count_ = count; }
2543 
2544  // Sets the full GC count.
2545  void set_full_gc_count(int count) { full_gc_count_ = count; }
2546 
2547  void increment_promoted_objects_size(int object_size) {
2548  promoted_objects_size_ += object_size;
2549  }
2550 
2551  private:
2552  // Returns a string matching the collector.
2553  const char* CollectorString();
2554 
2555  // Returns size of object in heap (in MB).
2556  inline double SizeOfHeapObjects();
2557 
2558  // Timestamp set in the constructor.
2559  double start_time_;
2560 
2561  // Size of objects in heap set in constructor.
2562  intptr_t start_object_size_;
2563 
2564  // Size of memory allocated from OS set in constructor.
2565  intptr_t start_memory_size_;
2566 
2567  // Type of collector.
2568  GarbageCollector collector_;
2569 
2570  // A count (including this one, e.g. the first collection is 1) of the
2571  // number of garbage collections.
2572  unsigned int gc_count_;
2573 
2574  // A count (including this one) of the number of full garbage collections.
2575  int full_gc_count_;
2576 
2577  // Amounts of time spent in different scopes during GC.
2578  double scopes_[Scope::kNumberOfScopes];
2579 
2580  // Total amount of space either wasted or contained in one of free lists
2581  // before the current GC.
2582  intptr_t in_free_list_or_wasted_before_gc_;
2583 
2584  // Difference between space used in the heap at the beginning of the current
2585  // collection and the end of the previous collection.
2586  intptr_t allocated_since_last_gc_;
2587 
2588  // Amount of time spent in mutator that is time elapsed between end of the
2589  // previous collection and the beginning of the current one.
2590  double spent_in_mutator_;
2591 
2592  // Size of objects promoted during the current collection.
2593  intptr_t promoted_objects_size_;
2594 
2595  // Incremental marking steps counters.
2596  int steps_count_;
2597  double steps_took_;
2598  double longest_step_;
2599  int steps_count_since_last_gc_;
2600  double steps_took_since_last_gc_;
2601 
2602  Heap* heap_;
2603 
2604  const char* gc_reason_;
2605  const char* collector_reason_;
2606 };
2607 
2608 
2610  public:
2612 
2613  // Attempt to retrieve a cached result. On failure, 0 is returned as a Smi.
2614  // On success, the returned result is guaranteed to be a COW-array.
2615  static Object* Lookup(Heap* heap,
2616  String* key_string,
2617  Object* key_pattern,
2618  ResultsCacheType type);
2619  // Attempt to add value_array to the cache specified by type. On success,
2620  // value_array is turned into a COW-array.
2621  static void Enter(Heap* heap,
2622  String* key_string,
2623  Object* key_pattern,
2624  FixedArray* value_array,
2625  ResultsCacheType type);
2626  static void Clear(FixedArray* cache);
2627  static const int kRegExpResultsCacheSize = 0x100;
2628 
2629  private:
2630  static const int kArrayEntriesPerCacheEntry = 4;
2631  static const int kStringOffset = 0;
2632  static const int kPatternOffset = 1;
2633  static const int kArrayOffset = 2;
2634 };
2635 
2636 
2638  public:
2640  static const int kTranscendentalTypeBits = 3;
2642 
2643  // Returns a heap number with f(input), where f is a math function specified
2644  // by the 'type' argument.
2645  MUST_USE_RESULT inline MaybeObject* Get(Type type, double input);
2646 
2647  // The cache contains raw Object pointers. This method disposes of
2648  // them before a garbage collection.
2649  void Clear();
2650 
2651  private:
2652  class SubCache {
2653  static const int kCacheSize = 512;
2654 
2655  explicit SubCache(Type t);
2656 
2657  MUST_USE_RESULT inline MaybeObject* Get(double input);
2658 
2659  inline double Calculate(double input);
2660 
2661  struct Element {
2662  uint32_t in[2];
2663  Object* output;
2664  };
2665 
2666  union Converter {
2667  double dbl;
2668  uint32_t integers[2];
2669  };
2670 
2671  inline static int Hash(const Converter& c) {
2672  uint32_t hash = (c.integers[0] ^ c.integers[1]);
2673  hash ^= static_cast<int32_t>(hash) >> 16;
2674  hash ^= static_cast<int32_t>(hash) >> 8;
2675  return (hash & (kCacheSize - 1));
2676  }
2677 
2678  Element elements_[kCacheSize];
2679  Type type_;
2680  Isolate* isolate_;
2681 
2682  // Allow access to the caches_ array as an ExternalReference.
2683  friend class ExternalReference;
2684  // Inline implementation of the cache.
2685  friend class TranscendentalCacheStub;
2686  // For evaluating value.
2687  friend class TranscendentalCache;
2688 
2689  DISALLOW_COPY_AND_ASSIGN(SubCache);
2690  };
2691 
2692  TranscendentalCache() {
2693  for (int i = 0; i < kNumberOfCaches; ++i) caches_[i] = NULL;
2694  }
2695 
2696  // Used to create an external reference.
2697  inline Address cache_array_address();
2698 
2699  // Instantiation
2700  friend class Isolate;
2701  // Inline implementation of the caching.
2703  // Allow access to the caches_ array as an ExternalReference.
2704  friend class ExternalReference;
2705 
2706  SubCache* caches_[kNumberOfCaches];
2708 };
2709 
2710 
2711 // Abstract base class for checking whether a weak object should be retained.
2713  public:
2714  virtual ~WeakObjectRetainer() {}
2715 
2716  // Return whether this object should be retained. If NULL is returned the
2717  // object has no references. Otherwise the address of the retained object
2718  // should be returned as in some GC situations the object has been moved.
2719  virtual Object* RetainAs(Object* object) = 0;
2720 };
2721 
2722 
2723 // Intrusive object marking uses least significant bit of
2724 // heap object's map word to mark objects.
2725 // Normally all map words have least significant bit set
2726 // because they contain tagged map pointer.
2727 // If the bit is not set object is marked.
2728 // All objects should be unmarked before resuming
2729 // JavaScript execution.
2731  public:
2732  static bool IsMarked(HeapObject* object) {
2733  return (object->map_word().ToRawValue() & kNotMarkedBit) == 0;
2734  }
2735 
2736  static void ClearMark(HeapObject* object) {
2737  uintptr_t map_word = object->map_word().ToRawValue();
2738  object->set_map_word(MapWord::FromRawValue(map_word | kNotMarkedBit));
2739  ASSERT(!IsMarked(object));
2740  }
2741 
2742  static void SetMark(HeapObject* object) {
2743  uintptr_t map_word = object->map_word().ToRawValue();
2744  object->set_map_word(MapWord::FromRawValue(map_word & ~kNotMarkedBit));
2745  ASSERT(IsMarked(object));
2746  }
2747 
2748  static Map* MapOfMarkedObject(HeapObject* object) {
2749  uintptr_t map_word = object->map_word().ToRawValue();
2750  return MapWord::FromRawValue(map_word | kNotMarkedBit).ToMap();
2751  }
2752 
2753  static int SizeOfMarkedObject(HeapObject* object) {
2754  return object->SizeFromMap(MapOfMarkedObject(object));
2755  }
2756 
2757  private:
2758  static const uintptr_t kNotMarkedBit = 0x1;
2759  STATIC_ASSERT((kHeapObjectTag & kNotMarkedBit) != 0);
2760 };
2761 
2762 
2763 #if defined(DEBUG) || defined(LIVE_OBJECT_LIST)
2764 // Helper class for tracing paths to a search target Object from all roots.
2765 // The TracePathFrom() method can be used to trace paths from a specific
2766 // object to the search target object.
2767 class PathTracer : public ObjectVisitor {
2768  public:
2769  enum WhatToFind {
2770  FIND_ALL, // Will find all matches.
2771  FIND_FIRST // Will stop the search after first match.
2772  };
2773 
2774  // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
2775  // after the first match. If FIND_ALL is specified, then tracing will be
2776  // done for all matches.
2777  PathTracer(Object* search_target,
2778  WhatToFind what_to_find,
2779  VisitMode visit_mode)
2780  : search_target_(search_target),
2781  found_target_(false),
2782  found_target_in_trace_(false),
2783  what_to_find_(what_to_find),
2784  visit_mode_(visit_mode),
2785  object_stack_(20),
2786  no_alloc() {}
2787 
2788  virtual void VisitPointers(Object** start, Object** end);
2789 
2790  void Reset();
2791  void TracePathFrom(Object** root);
2792 
2793  bool found() const { return found_target_; }
2794 
2795  static Object* const kAnyGlobalObject;
2796 
2797  protected:
2798  class MarkVisitor;
2799  class UnmarkVisitor;
2800 
2801  void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
2802  void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
2803  virtual void ProcessResults();
2804 
2805  // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
2806  static const int kMarkTag = 2;
2807 
2808  Object* search_target_;
2809  bool found_target_;
2810  bool found_target_in_trace_;
2811  WhatToFind what_to_find_;
2812  VisitMode visit_mode_;
2813  List<Object*> object_stack_;
2814 
2815  AssertNoAllocation no_alloc; // i.e. no gc allowed.
2816 
2817  private:
2818  DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2819 };
2820 #endif // DEBUG || LIVE_OBJECT_LIST
2821 
2822 } } // namespace v8::internal
2823 
2824 #endif // V8_HEAP_H_
static int SizeOfMarkedObject(HeapObject *object)
Definition: heap.h:2753
byte * Address
Definition: globals.h:157
Object ** roots_array_start()
Definition: heap.h:1257
Address NewSpaceStart()
Definition: heap.h:501
MUST_USE_RESULT MaybeObject * AllocateJSModule(Context *context, ScopeInfo *scope_info)
Definition: heap.cc:4064
intptr_t OldGenPromotionLimit(intptr_t old_gen_size)
Definition: heap.h:1402
void GarbageCollectionEpilogue()
Definition: heap.cc:449
MUST_USE_RESULT MaybeObject * CopyCode(Code *code)
Definition: heap.cc:3667
static void Clear(FixedArray *cache)
Definition: heap.cc:2904
void(* GCCallback)()
Definition: v8.h:2762
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8Slow(Vector< const char > str, int non_ascii_start, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4430
double total_regexp_code_generated()
Definition: heap.h:1527
MUST_USE_RESULT MaybeObject * AllocateFixedArray(int length, PretenureFlag pretenure)
Definition: heap.cc:4819
void TearDown()
Definition: heap.cc:6270
void SetStackLimits()
Definition: heap.cc:6253
void CallGlobalGCEpilogueCallback()
Definition: heap.h:1583
PromotionQueue(Heap *heap)
Definition: heap.h:299
void SetNewLimit(Address limit)
Definition: heap.h:320
bool NextGCIsLikelyToBeFull()
Definition: heap.h:1479
MUST_USE_RESULT MaybeObject * AllocateExternalStringFromAscii(const ExternalAsciiString::Resource *resource)
Definition: heap.cc:3438
MUST_USE_RESULT MaybeObject * AllocateSymbol(Vector< const char > str, int chars, uint32_t hash_field)
Definition: heap-inl.h:101
void set_full_gc_count(int count)
Definition: heap.h:2545
intptr_t OldGenerationCapacityAvailable()
Definition: heap.h:1394
void Callback(MemoryChunk *page, StoreBufferEvent event)
Definition: heap.cc:1122
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
Definition: heap-inl.h:181
intptr_t * old_pointer_space_size
Definition: heap.h:2179
MUST_USE_RESULT MaybeObject * AllocateFunctionPrototype(JSFunction *function)
Definition: heap.cc:3782
Object ** native_contexts_list_address()
Definition: heap.h:1264
void RecordObjectStats(InstanceType type, int sub_type, size_t size)
Definition: heap.h:1659
intptr_t * cell_space_size
Definition: heap.h:2187
static const int kMapHashShift
Definition: heap.h:2350
int ReservedSemiSpaceSize()
Definition: heap.h:475
void RecordWrite(Address address, int offset)
Definition: heap-inl.h:331
void PrintF(const char *format,...)
Definition: v8utils.cc:40
bool OldGenerationPromotionLimitReached()
Definition: heap.h:1386
bool InNewSpace(Object *object)
Definition: heap-inl.h:288
static const int kArgumentsObjectSize
Definition: heap.h:895
bool IsHeapIterable()
Definition: heap.cc:5103
MUST_USE_RESULT MaybeObject * AllocateFunctionContext(int length, JSFunction *function)
Definition: heap.cc:4999
MUST_USE_RESULT MaybeObject * Allocate(Map *map, AllocationSpace space)
Definition: heap.cc:3749
MUST_USE_RESULT MaybeObject * AllocateSubString(String *buffer, int start, int end, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:3344
int * new_space_capacity
Definition: heap.h:2178
void(* ObjectSlotCallback)(HeapObject **from, HeapObject *to)
Definition: store-buffer.h:42
void SetConstructStubDeoptPCOffset(int pc_offset)
Definition: heap.h:1614
void IterateWeakRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:5746
uint32_t HashSeed()
Definition: heap.h:1603
Object * ToBoolean(bool condition)
Definition: heap-inl.h:652
Isolate * isolate()
Definition: heap-inl.h:503
MUST_USE_RESULT MaybeObject * ReinitializeJSGlobalProxy(JSFunction *constructor, JSGlobalProxy *global)
Definition: heap.cc:4386
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArrayWithMap(FixedDoubleArray *src, Map *map)
Definition: heap.cc:4737
MUST_USE_RESULT MaybeObject * AllocateGlobalObject(JSFunction *constructor)
Definition: heap.cc:4186
static Smi * FromInt(int value)
Definition: objects-inl.h:981
MUST_USE_RESULT MaybeObject * AllocateJSFunctionProxy(Object *handler, Object *call_trap, Object *construct_trap, Object *prototype)
Definition: heap.cc:4160
const int KB
Definition: globals.h:207
bool flush_monomorphic_ics()
Definition: heap.h:1642
void FinalizeExternalString(String *string)
Definition: heap-inl.h:250
int sweep_generation()
Definition: heap.h:1573
MUST_USE_RESULT MaybeObject * AllocateJSObject(JSFunction *constructor, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4041
intptr_t MaxReserved()
Definition: heap.h:471
void CompletelyClearInstanceofCache()
Definition: heap-inl.h:657
void CollectAllGarbage(int flags, const char *gc_reason=NULL)
Definition: heap.cc:538
Map * MapForExternalArrayType(ExternalArrayType array_type)
Definition: heap.cc:3048
void SetNumberStringCache(Object *number, String *str)
Definition: heap.cc:2989
static const int kNullValueRootIndex
Definition: v8.h:4091
void AgeInlineCaches()
Definition: heap.h:1638
MUST_USE_RESULT MaybeObject * AllocateModuleContext(ScopeInfo *scope_info)
Definition: heap.cc:4985
MUST_USE_RESULT MaybeObject * AllocateCodeCache()
Definition: heap.cc:2117
MUST_USE_RESULT MaybeObject * AllocateTwoByteSymbol(Vector< const uc16 > str, uint32_t hash_field)
Definition: heap-inl.h:144
void AddString(String *string)
Definition: heap-inl.h:580
T Max(T a, T b)
Definition: utils.h:222
MUST_USE_RESULT MaybeObject * AllocateNativeContext()
Definition: heap.cc:4951
void AddGCPrologueCallback(GCPrologueCallback callback, GCType gc_type_filter)
Definition: heap.cc:6356
RootListIndex RootIndexForExternalArrayType(ExternalArrayType array_type)
Definition: heap.cc:3053
MUST_USE_RESULT MaybeObject * LookupAsciiSymbol(Vector< const char > str)
Definition: heap.cc:5449
static const int kOldSpaceRoots
Definition: heap.h:1601
void VisitExternalResources(v8::ExternalResourceVisitor *visitor)
Definition: heap.cc:1548
intptr_t OldGenAllocationLimit(intptr_t old_gen_size)
Definition: heap.h:1412
MUST_USE_RESULT MaybeObject * AllocateByteArray(int length, PretenureFlag pretenure)
Definition: heap.cc:3517
Address * allocation_top_address()
Definition: spaces.h:2208
void DoScavengeObject(Map *map, HeapObject **slot, HeapObject *obj)
Definition: heap.h:1589
void SetGlobalGCPrologueCallback(GCCallback callback)
Definition: heap.h:1145
int int32_t
Definition: unicode.cc:47
void ClearInstanceofCache()
Definition: heap-inl.h:647
HeapObjectCallback GcSafeSizeOfOldObjectFunction()
Definition: heap.h:1236
bool InFromSpace(Object *object)
Definition: heap-inl.h:302
MUST_USE_RESULT MaybeObject * Uint32ToString(uint32_t value, bool check_number_string_cache=true)
Definition: heap.cc:3039
PromotionQueue * promotion_queue()
Definition: heap.h:1130
void SetGetterStubDeoptPCOffset(int pc_offset)
Definition: heap.h:1619
Map * SymbolMapForString(String *str)
Definition: heap.cc:4490
Marking * marking()
Definition: heap.h:1549
intptr_t * code_space_size
Definition: heap.h:2183
void AddGCEpilogueCallback(GCEpilogueCallback callback, GCType gc_type_filter)
Definition: heap.cc:6376
MUST_USE_RESULT MaybeObject * AllocateRawAsciiString(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4579
static void MoveBlock(Address dst, Address src, int byte_size)
Definition: heap-inl.h:386
virtual int Unlock()=0
bool linear_allocation()
Definition: heap.h:535
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:5758
#define ASSERT(condition)
Definition: checks.h:270
bool InSpace(Address addr, AllocationSpace space)
Definition: heap.cc:5385
void(* GCPrologueCallback)(GCType type, GCCallbackFlags flags)
Definition: v8.h:2759
v8::Handle< v8::Value > Print(const v8::Arguments &args)
MUST_USE_RESULT MaybeObject * AllocateGlobalContext(JSFunction *function, ScopeInfo *scope_info)
Definition: heap.cc:4966
void public_set_code_stubs(UnseededNumberDictionary *value)
Definition: heap.h:1229
static const int kReduceMemoryFootprintMask
Definition: heap.h:1083
OldSpace * TargetSpace(HeapObject *object)
Definition: heap-inl.h:345
void set_collector(GarbageCollector collector)
Definition: heap.h:2539
const int kPointerSizeLog2
Definition: globals.h:232
MUST_USE_RESULT MaybeObject * LookupTwoByteSymbol(Vector< const uc16 > str)
Definition: heap.cc:5484
ExternalArrayType
Definition: v8.h:1431
unsigned short uint16_t
Definition: unicode.cc:46
Address * NewSpaceAllocationLimitAddress()
Definition: heap.h:542
#define STRONG_ROOT_LIST(V)
Definition: heap.h:49
MUST_USE_RESULT MaybeObject * LookupSymbol(Vector< const char > str)
Definition: heap.cc:5434
bool SetUp(bool create_heap_objects)
Definition: heap.cc:6139
MUST_USE_RESULT MaybeObject * AllocateBlockContext(JSFunction *function, Context *previous, ScopeInfo *info)
Definition: heap.cc:5053
int global_ic_age()
Definition: heap.h:1634
void(* ScavengingCallback)(Map *map, HeapObject **slot, HeapObject *object)
Definition: heap.h:391
intptr_t CommittedMemoryExecutable()
Definition: heap.cc:215
ObjectIterator * next()
Definition: heap.cc:6498
friend class ExternalReference
Definition: heap.h:2388
#define SYMBOL_ACCESSOR(name, str)
Definition: heap.h:1176
MUST_USE_RESULT MaybeObject * CopyJSObject(JSObject *source)
Definition: heap.cc:4254
static const int kPageSize
Definition: spaces.h:711
int * pending_global_handle_count
Definition: heap.h:2192
Address * store_buffer_top_address()
Definition: heap.h:1259
void CallGlobalGCPrologueCallback()
Definition: heap.h:1579
friend class GCTracer
Definition: heap.h:2158
MUST_USE_RESULT MaybeObject * AllocateInternalSymbol(unibrow::CharacterStream *buffer, int chars, uint32_t hash_field)
Definition: heap.cc:4514
Address always_allocate_scope_depth_address()
Definition: heap.h:532
void IterateAndMarkPointersToFromSpace(Address start, Address end, ObjectSlotCallback callback)
Definition: heap.cc:5537
MUST_USE_RESULT MaybeObject * AllocateJSArrayWithElements(FixedArrayBase *array_base, ElementsKind elements_kind, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4125
ArrayStorageAllocationMode
Definition: heap.h:436
STATIC_CHECK(kUndefinedValueRootIndex==Internals::kUndefinedValueRootIndex)
virtual Object * RetainAs(Object *object)=0
StoreBuffer * store_buffer()
Definition: heap.h:1545
MUST_USE_RESULT MaybeObject * AllocateHeapNumber(double value, PretenureFlag pretenure)
Definition: heap.cc:2483
unsigned int seed
Definition: test-strings.cc:18
bool CollectGarbage(AllocationSpace space, GarbageCollector collector, const char *gc_reason, const char *collector_reason)
Definition: heap.cc:577
MUST_USE_RESULT MaybeObject * AllocateWithContext(JSFunction *function, Context *previous, JSObject *extension)
Definition: heap.cc:5036
intptr_t * lo_space_size
Definition: heap.h:2189
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
Definition: heap.cc:4711
uint8_t byte
Definition: globals.h:156
int NotifyContextDisposed()
Definition: heap.h:1107
MUST_USE_RESULT MaybeObject * AllocateConsString(String *first, String *second)
Definition: heap.cc:3225
#define STRUCT_MAP_ACCESSOR(NAME, Name, name)
Definition: heap.h:1169
void RepairFreeListsAfterBoot()
Definition: heap.cc:439
MUST_USE_RESULT MaybeObject * NumberToString(Object *number, bool check_number_string_cache=true)
Definition: heap.cc:3009
void public_set_empty_script(Script *script)
Definition: heap.h:1245
int * near_death_global_handle_count
Definition: heap.h:2193
STATIC_ASSERT((kEntriesPerBucket &(kEntriesPerBucket-1))==0)
String *(* ExternalStringTableUpdaterCallback)(Heap *heap, Object **pointer)
Definition: heap.h:262
static const intptr_t kMinimumPromotionLimit
Definition: heap.h:1398
void SetArgumentsAdaptorDeoptPCOffset(int pc_offset)
Definition: heap.h:1609
static void ClearMark(HeapObject *object)
Definition: heap.h:2736
unsigned int ms_count()
Definition: heap.h:1192
static const int kEndMarker
Definition: heap.h:2174
bool IdleNotification(int hint)
Definition: heap.cc:5139
MUST_USE_RESULT MaybeObject * AllocateStringFromAscii(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4412
intptr_t MaxOldGenerationSize()
Definition: heap.h:477
#define UNREACHABLE()
Definition: checks.h:50
void IncreaseTotalRegexpCodeGenerated(int size)
Definition: heap.h:1528
friend class MarkCompactCollector
Definition: heap.h:2163
friend class MapCompact
Definition: heap.h:2165
void EnsureHeapIsIterable()
Definition: heap.cc:5109
static const int kArgumentsObjectSizeStrict
Definition: heap.h:898
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedDoubleArray(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4866
int(* HeapObjectCallback)(HeapObject *obj)
Definition: v8globals.h:238
bool always_allocate()
Definition: heap.h:531
void SetLastScriptId(Object *last_script_id)
Definition: heap-inl.h:498
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:85
bool AdvanceSweeper(intptr_t bytes_to_sweep)
Definition: spaces.cc:2309
static bool IsMarked(HeapObject *object)
Definition: heap.h:2732
#define MUST_USE_RESULT
Definition: globals.h:346
void RemoveGCEpilogueCallback(GCEpilogueCallback callback)
Definition: heap.cc:6384
int MaxObjectSizeInNewSpace()
Definition: heap.h:1516
static NewSpacePage * FromAddress(Address address_in_page)
Definition: spaces.h:1796
void CollectAllAvailableGarbage(const char *gc_reason=NULL)
Definition: heap.cc:548
bool ConfigureHeapDefault()
Definition: heap.cc:5903
PagedSpace * paged_space(int idx)
Definition: heap.h:512
#define ROOT_LIST(V)
Definition: heap.h:159
static const int kNoGCFlags
Definition: heap.h:1081
MUST_USE_RESULT MaybeObject * AllocateFixedArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4827
const int kPointerSize
Definition: globals.h:220
bool LookupTwoCharsSymbolIfExists(String *str, String **symbol)
MUST_USE_RESULT MaybeObject * AllocateInitialMap(JSFunction *fun)
Definition: heap.cc:3901
friend class ExternalReference
Definition: heap.h:2704
int * global_handle_count
Definition: heap.h:2190
MUST_USE_RESULT MaybeObject * AllocateForeign(Address address, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:3102
void QueueMemoryChunkForFree(MemoryChunk *chunk)
Definition: heap.cc:7220
void CheckpointObjectStats()
Definition: heap.cc:7302
MUST_USE_RESULT MaybeObject * AllocateExternalArray(int length, ExternalArrayType array_type, void *external_pointer, PretenureFlag pretenure)
Definition: heap.cc:3572
const int kHeapObjectTag
Definition: v8.h:4009
static void ScavengePointer(HeapObject **p)
Definition: heap-inl.h:405
intptr_t * cell_space_capacity
Definition: heap.h:2188
intptr_t * memory_allocator_size
Definition: heap.h:2195
#define DISALLOW_IMPLICIT_CONSTRUCTORS(TypeName)
Definition: globals.h:318
void VisitPointers(Object **start, Object **end)
Definition: heap-inl.h:742
void decrement_scan_on_scavenge_pages()
Definition: heap.h:1123
void IncrementYoungSurvivorsCounter(int survived)
Definition: heap.h:1473
MUST_USE_RESULT MaybeObject * AllocatePolymorphicCodeCache()
Definition: heap.cc:2128
intptr_t * code_space_capacity
Definition: heap.h:2184
static void Enter(Heap *heap, String *key_string, Object *key_pattern, FixedArray *value_array, ResultsCacheType type)
Definition: heap.cc:2847
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition: globals.h:307
void Update(Map *map, String *name, int field_offset)
Definition: heap.cc:7107
void ReserveSpace(int *sizes, Address *addresses)
Definition: heap.cc:696
static Map * MapOfMarkedObject(HeapObject *object)
Definition: heap.h:2748
OldSpace * old_pointer_space()
Definition: heap.h:506
bool ConfigureHeap(int max_semispace_size, intptr_t max_old_gen_size, intptr_t max_executable_size)
Definition: heap.cc:5833
intptr_t * map_space_size
Definition: heap.h:2185
static double TimeCurrentMillis()
MUST_USE_RESULT MaybeObject * AllocateTypeFeedbackInfo()
Definition: heap.cc:2144
bool CreateApiObjects()
Definition: heap.cc:2542
GCType
Definition: v8.h:2748
HeapState gc_state()
Definition: heap.h:1326
#define SYMBOL_LIST(V)
Definition: heap.h:163
OldSpace * code_space()
Definition: heap.h:508
static const int kMakeHeapIterableMask
Definition: heap.h:1088
void public_set_store_buffer_top(Address *top)
Definition: heap.h:1249
intptr_t AdjustAmountOfExternalAllocatedMemory(intptr_t change_in_bytes)
Definition: heap-inl.h:459
MUST_USE_RESULT MaybeObject * AllocateJSArrayAndStorage(ElementsKind elements_kind, int length, int capacity, ArrayStorageAllocationMode mode=DONT_INITIALIZE_ARRAY_ELEMENTS, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4079
static const int kUndefinedValueRootIndex
Definition: v8.h:4090
void Iterate(ObjectVisitor *v)
Definition: heap-inl.h:590
bool InToSpace(Object *object)
Definition: heap-inl.h:307
bool UncommitFromSpace()
Definition: heap.h:547
void GarbageCollectionPrologue()
Definition: heap.cc:403
#define ROOT_ACCESSOR(type, name, camel_name)
Definition: heap.h:1763
bool HasBeenSetUp()
Definition: heap.cc:234
LargeObjectSpace * lo_space()
Definition: heap.h:511
#define BASE_EMBEDDED
Definition: allocation.h:68
MUST_USE_RESULT MaybeObject * AllocateJSMessageObject(String *type, JSArray *arguments, int start_position, int end_position, Object *script, Object *stack_trace, Object *stack_frames)
Definition: heap.cc:3156
static const int kFalseValueRootIndex
Definition: v8.h:4093
MUST_USE_RESULT MaybeObject * AllocateExternalStringFromTwoByte(const ExternalTwoByteString::Resource *resource)
Definition: heap.cc:3463
MUST_USE_RESULT MaybeObject * AllocatePartialMap(InstanceType instance_type, int instance_size)
Definition: heap.cc:2061
int * free_global_handle_count
Definition: heap.h:2194
MUST_USE_RESULT MaybeObject * CreateCode(const CodeDesc &desc, Code::Flags flags, Handle< Object > self_reference, bool immovable=false)
Definition: heap.cc:3594
void PerformScavenge()
Definition: heap.cc:652
static Object * Lookup(Heap *heap, String *key_string, Object *key_pattern, ResultsCacheType type)
Definition: heap.cc:2814
STATIC_ASSERT((1<< kTranscendentalTypeBits) >=kNumberOfCaches)
MUST_USE_RESULT MaybeObject * AllocateAsciiSymbol(Vector< const char > str, uint32_t hash_field)
Definition: heap-inl.h:110
activate correct semantics for inheriting readonliness false
Definition: flags.cc:141
void Update(Map *source, String *name, int result)
Definition: heap.h:2411
void set_gc_count(unsigned int count)
Definition: heap.h:2542
static const int kAbortIncrementalMarkingMask
Definition: heap.h:1084
void RemoveGCPrologueCallback(GCPrologueCallback callback)
Definition: heap.cc:6364
Vector< const char > CStrVector(const char *data)
Definition: utils.h:526
void FreeQueuedChunks()
Definition: heap.cc:7226
CellSpace * cell_space()
Definition: heap.h:510
intptr_t CommittedMemory()
Definition: heap.cc:203
void increment_promoted_objects_size(int object_size)
Definition: heap.h:2547
Object * GetNumberStringCache(Object *number)
Definition: heap.cc:2969
intptr_t SizeOfObjects()
Definition: heap.cc:429
MUST_USE_RESULT MaybeObject * AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure)
Definition: heap.cc:4904
MUST_USE_RESULT MaybeObject * AllocateEmptyJSArray(ElementsKind elements_kind, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.h:561
Address * NewSpaceAllocationTopAddress()
Definition: heap.h:539
void RecordWrites(Address address, int start, int len)
Definition: heap-inl.h:336
void(* GCEpilogueCallback)(GCType type, GCCallbackFlags flags)
Definition: v8.h:2760
intptr_t get_max_alive_after_gc()
Definition: heap.h:1536
void UpdateReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
Definition: heap.cc:1404
MUST_USE_RESULT MaybeObject * AllocateJSProxy(Object *handler, Object *prototype)
Definition: heap.cc:4140
void ProcessWeakReferences(WeakObjectRetainer *retainer)
Definition: heap.cc:1467
void ClearNormalizedMapCaches()
Definition: heap.cc:781
MUST_USE_RESULT MaybeObject * NumberFromDouble(double value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:3081
static const int kLength
Definition: heap.h:2348
MUST_USE_RESULT MaybeObject * AllocateExternalSymbol(Vector< const char > str, int chars)
intptr_t * old_data_space_capacity
Definition: heap.h:2182
intptr_t Available()
Definition: heap.cc:222
MUST_USE_RESULT MaybeObject * AllocateArgumentsObject(Object *callee, int length)
Definition: heap.cc:3827
int Lookup(Map *map, String *name)
Definition: heap.cc:7095
int InitialSemiSpaceSize()
Definition: heap.h:476
void SetSetterStubDeoptPCOffset(int pc_offset)
Definition: heap.h:1624
int get_min_in_mutator()
Definition: heap.h:1539
intptr_t Capacity()
Definition: spaces.h:2150
int get_max_gc_pause()
Definition: heap.h:1533
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
Definition: heap-inl.h:176
void IterateRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:5740
Address NewSpaceTop()
Definition: heap.h:503
MUST_USE_RESULT MaybeObject * ReinitializeJSReceiver(JSReceiver *object, InstanceType type, int size)
Definition: heap.cc:4326
MUST_USE_RESULT MaybeObject * AllocateAccessorPair()
Definition: heap.cc:2133
int * weak_global_handle_count
Definition: heap.h:2191
MUST_USE_RESULT MaybeObject * AllocateCatchContext(JSFunction *function, Context *previous, String *name, Object *thrown_object)
Definition: heap.cc:5015
MUST_USE_RESULT MaybeObject * Get(Type type, double input)
Definition: heap-inl.h:663
void SetGlobalGCEpilogueCallback(GCCallback callback)
Definition: heap.h:1149
static const int kEmptySymbolRootIndex
Definition: v8.h:4094
#define STRUCT_LIST(V)
Definition: objects.h:448
static const int kArgumentsLengthIndex
Definition: heap.h:901
bool UncommitFromSpace()
Definition: spaces.h:2295
MUST_USE_RESULT MaybeObject * LookupAsciiSymbol(const char *str)
Definition: heap.h:1035
void CheckNewSpaceExpansionCriteria()
Definition: heap.cc:1095
MUST_USE_RESULT MaybeObject * AllocateScopeInfo(int length)
Definition: heap.cc:5071
Object * native_contexts_list()
Definition: heap.h:1189
bool LookupSymbolIfExists(String *str, String **symbol)
Definition: heap.cc:5515
ExternalStringTable * external_string_table()
Definition: heap.h:1568
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:240
IncrementalMarking * incremental_marking()
Definition: heap.h:1553
bool Contains(Address addr)
Definition: heap.cc:5367
bool InNewSpacePage(Address addr)
void insert(HeapObject *target, int size)
Definition: heap-inl.h:43
String * hidden_symbol()
Definition: heap.h:1184
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedArray(int length)
Definition: heap.cc:4836
static const int kStartMarker
Definition: heap.h:2173
bool ShouldBePromoted(Address old_address, int object_size)
Definition: heap-inl.h:318
uintptr_t NewSpaceMask()
Definition: heap.h:502
static const intptr_t kMinimumAllocationLimit
Definition: heap.h:1399
int MaxSemiSpaceSize()
Definition: heap.h:474
MUST_USE_RESULT MaybeObject * AllocateJSObjectFromMap(Map *map, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4002
void RememberUnmappedPage(Address page, bool compacted)
Definition: heap.cc:7274
static const int kNotFound
Definition: heap.h:2353
static const int kRegExpResultsCacheSize
Definition: heap.h:2627
friend class TranscendentalCacheStub
Definition: heap.h:2702
MUST_USE_RESULT MaybeObject * LookupSingleCharacterStringFromCode(uint16_t code)
Definition: heap.cc:3492
intptr_t PromotedTotalSize()
Definition: heap.h:1380
static void CopyBlock(Address dst, Address src, int byte_size)
Definition: heap-inl.h:379
MUST_USE_RESULT MaybeObject * AllocateJSGlobalPropertyCell(Object *value)
Definition: heap.cc:2519
static bool ShouldZapGarbage()
Definition: heap.h:1287
MUST_USE_RESULT MaybeObject * CreateSymbol(const char *str, int length, int hash)
MUST_USE_RESULT MaybeObject * AllocateFixedDoubleArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4883
MUST_USE_RESULT MaybeObject * AllocateRawFixedArray(int length)
Definition: heap.cc:4696
static const int kArgumentsCalleeIndex
Definition: heap.h:903
MUST_USE_RESULT MaybeObject * AllocateHashTable(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4939
void public_set_non_monomorphic_cache(UnseededNumberDictionary *value)
Definition: heap.h:1241
static const int kHeaderSize
Definition: objects.h:2173
Object * FindCodeObject(Address a)
Definition: heap.cc:1029
#define DECLARE_STRUCT_MAP(NAME, Name, name)
Definition: heap.h:1436
MapSpace * map_space()
Definition: heap.h:509
intptr_t PromotedSpaceSizeOfObjects()
Definition: heap.cc:5947
intptr_t * old_pointer_space_capacity
Definition: heap.h:2180
bool OldGenerationAllocationLimitReached()
Definition: heap-inl.h:312
#define ROOT_INDEX_DECLARATION(type, name, camel_name)
Definition: heap.h:1427
MUST_USE_RESULT MaybeObject * NumberFromInt32(int32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:232
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
void ClearJSFunctionResultCaches()
Definition: heap.cc:757
#define SYMBOL_INDEX_DECLARATION(name, str)
Definition: heap.h:1431
void RecordStats(HeapStats *stats, bool take_snapshot=false)
Definition: heap.cc:5910
void ZapFromSpace()
Definition: heap.cc:5523
NewSpacePage * prev_page() const
Definition: spaces.h:1768
bool IsInGCPostProcessing()
Definition: heap.h:1328
void CreateFillerObjectAt(Address addr, int size)
Definition: heap.cc:3558
MUST_USE_RESULT MaybeObject * AllocateSharedFunctionInfo(Object *name)
Definition: heap.cc:3114
bool AdvanceSweepers(int step_size)
Definition: heap.h:1562
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
void increment_scan_on_scavenge_pages()
Definition: heap.h:1116
Address * allocation_limit_address()
Definition: spaces.h:2209
intptr_t * map_space_capacity
Definition: heap.h:2186
static const int kTrueValueRootIndex
Definition: v8.h:4092
static const int kCapacityMask
Definition: heap.h:2349
static void ScavengeObject(HeapObject **p, HeapObject *object)
Definition: heap-inl.h:410
bool IsSweepingComplete()
Definition: heap.h:1557
static const int kTranscendentalTypeBits
Definition: heap.h:2640
MUST_USE_RESULT MaybeObject * PrepareForCompare(String *str)
Definition: heap-inl.h:440
intptr_t amount_of_external_allocated_memory()
Definition: heap.h:1644
static bool IsAtStart(Address addr)
Definition: spaces.h:1782
T Min(T a, T b)
Definition: utils.h:229
intptr_t * memory_allocator_capacity
Definition: heap.h:2196
virtual int Lock()=0
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
Definition: flags.cc:495
static const int kSweepPreciselyMask
Definition: heap.h:1082
intptr_t Capacity()
Definition: heap.cc:191
MUST_USE_RESULT MaybeObject * AllocateStruct(InstanceType type)
Definition: heap.cc:5080
intptr_t * old_data_space_size
Definition: heap.h:2181
intptr_t OldGenerationSpaceAvailable()
Definition: heap.h:1390
GCTracer * tracer()
Definition: heap.h:1522
NewSpace * new_space()
Definition: heap.h:505
MUST_USE_RESULT MaybeObject * AllocateMap(InstanceType instance_type, int instance_size, ElementsKind elements_kind=TERMINAL_FAST_ELEMENTS_KIND)
Definition: heap.cc:2085
int Lookup(Map *source, String *name)
Definition: heap.h:2402
intptr_t MaxExecutableSize()
Definition: heap.h:478
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
Definition: heap-inl.h:186
uintptr_t mask()
Definition: spaces.h:2194
void PrintShortHeapStatistics()
Definition: heap.cc:327
static const int kHashMask
Definition: heap.h:2351
AllocationSpace TargetSpaceId(InstanceType type)
Definition: heap-inl.h:354
OldSpace * old_data_space()
Definition: heap.h:507
MUST_USE_RESULT MaybeObject * AllocateRawTwoByteString(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4626
static void AssertValidRange(Address from, Address to)
Definition: spaces.h:1948
static void SetMark(HeapObject *object)
Definition: heap.h:2742
MarkCompactCollector * mark_compact_collector()
Definition: heap.h:1541
MUST_USE_RESULT MaybeObject * AllocateFunction(Map *function_map, SharedFunctionInfo *shared, Object *prototype, PretenureFlag pretenure=TENURED)
Definition: heap.cc:3812
void UpdateNewSpaceReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
Definition: heap.cc:1367
StoreBufferRebuilder(StoreBuffer *store_buffer)
Definition: heap.h:267
MUST_USE_RESULT MaybeObject * AllocateStringFromTwoByte(Vector< const uc16 > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4470
static const int kEntriesPerBucket
Definition: heap.h:2352
void EnsureFromSpaceIsCommitted()
Definition: heap.cc:743
MUST_USE_RESULT MaybeObject * AllocateAliasedArgumentsEntry(int slot)
Definition: heap.cc:2156
void set_native_contexts_list(Object *object)
Definition: heap.h:1186
const int MB
Definition: globals.h:208