v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
heap.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_HEAP_H_
29 #define V8_HEAP_H_
30 
31 #include <math.h>
32 
33 #include "allocation.h"
34 #include "globals.h"
35 #include "incremental-marking.h"
36 #include "list.h"
37 #include "mark-compact.h"
38 #include "objects-visiting.h"
39 #include "spaces.h"
40 #include "splay-tree-inl.h"
41 #include "store-buffer.h"
42 #include "v8-counters.h"
43 #include "v8globals.h"
44 
45 namespace v8 {
46 namespace internal {
47 
48 // Defines all the roots in Heap.
49 #define STRONG_ROOT_LIST(V) \
50  V(Map, byte_array_map, ByteArrayMap) \
51  V(Map, free_space_map, FreeSpaceMap) \
52  V(Map, one_pointer_filler_map, OnePointerFillerMap) \
53  V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
54  /* Cluster the most popular ones in a few cache lines here at the top. */ \
55  V(Smi, store_buffer_top, StoreBufferTop) \
56  V(Oddball, undefined_value, UndefinedValue) \
57  V(Oddball, the_hole_value, TheHoleValue) \
58  V(Oddball, null_value, NullValue) \
59  V(Oddball, true_value, TrueValue) \
60  V(Oddball, false_value, FalseValue) \
61  V(Map, global_property_cell_map, GlobalPropertyCellMap) \
62  V(Map, shared_function_info_map, SharedFunctionInfoMap) \
63  V(Map, meta_map, MetaMap) \
64  V(Map, ascii_symbol_map, AsciiSymbolMap) \
65  V(Map, ascii_string_map, AsciiStringMap) \
66  V(Map, heap_number_map, HeapNumberMap) \
67  V(Map, global_context_map, GlobalContextMap) \
68  V(Map, fixed_array_map, FixedArrayMap) \
69  V(Map, code_map, CodeMap) \
70  V(Map, scope_info_map, ScopeInfoMap) \
71  V(Map, fixed_cow_array_map, FixedCOWArrayMap) \
72  V(Map, fixed_double_array_map, FixedDoubleArrayMap) \
73  V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
74  V(Map, hash_table_map, HashTableMap) \
75  V(FixedArray, empty_fixed_array, EmptyFixedArray) \
76  V(ByteArray, empty_byte_array, EmptyByteArray) \
77  V(String, empty_string, EmptyString) \
78  V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
79  V(Smi, stack_limit, StackLimit) \
80  V(Oddball, arguments_marker, ArgumentsMarker) \
81  /* The first 32 roots above this line should be boring from a GC point of */ \
82  /* view. This means they are never in new space and never on a page that */ \
83  /* is being compacted. */ \
84  V(FixedArray, number_string_cache, NumberStringCache) \
85  V(Object, instanceof_cache_function, InstanceofCacheFunction) \
86  V(Object, instanceof_cache_map, InstanceofCacheMap) \
87  V(Object, instanceof_cache_answer, InstanceofCacheAnswer) \
88  V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
89  V(FixedArray, string_split_cache, StringSplitCache) \
90  V(Object, termination_exception, TerminationException) \
91  V(Smi, hash_seed, HashSeed) \
92  V(Map, string_map, StringMap) \
93  V(Map, symbol_map, SymbolMap) \
94  V(Map, cons_string_map, ConsStringMap) \
95  V(Map, cons_ascii_string_map, ConsAsciiStringMap) \
96  V(Map, sliced_string_map, SlicedStringMap) \
97  V(Map, sliced_ascii_string_map, SlicedAsciiStringMap) \
98  V(Map, cons_symbol_map, ConsSymbolMap) \
99  V(Map, cons_ascii_symbol_map, ConsAsciiSymbolMap) \
100  V(Map, external_symbol_map, ExternalSymbolMap) \
101  V(Map, external_symbol_with_ascii_data_map, ExternalSymbolWithAsciiDataMap) \
102  V(Map, external_ascii_symbol_map, ExternalAsciiSymbolMap) \
103  V(Map, external_string_map, ExternalStringMap) \
104  V(Map, external_string_with_ascii_data_map, ExternalStringWithAsciiDataMap) \
105  V(Map, external_ascii_string_map, ExternalAsciiStringMap) \
106  V(Map, short_external_symbol_map, ShortExternalSymbolMap) \
107  V(Map, \
108  short_external_symbol_with_ascii_data_map, \
109  ShortExternalSymbolWithAsciiDataMap) \
110  V(Map, short_external_ascii_symbol_map, ShortExternalAsciiSymbolMap) \
111  V(Map, short_external_string_map, ShortExternalStringMap) \
112  V(Map, \
113  short_external_string_with_ascii_data_map, \
114  ShortExternalStringWithAsciiDataMap) \
115  V(Map, short_external_ascii_string_map, ShortExternalAsciiStringMap) \
116  V(Map, undetectable_string_map, UndetectableStringMap) \
117  V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap) \
118  V(Map, external_pixel_array_map, ExternalPixelArrayMap) \
119  V(Map, external_byte_array_map, ExternalByteArrayMap) \
120  V(Map, external_unsigned_byte_array_map, ExternalUnsignedByteArrayMap) \
121  V(Map, external_short_array_map, ExternalShortArrayMap) \
122  V(Map, external_unsigned_short_array_map, ExternalUnsignedShortArrayMap) \
123  V(Map, external_int_array_map, ExternalIntArrayMap) \
124  V(Map, external_unsigned_int_array_map, ExternalUnsignedIntArrayMap) \
125  V(Map, external_float_array_map, ExternalFloatArrayMap) \
126  V(Map, external_double_array_map, ExternalDoubleArrayMap) \
127  V(Map, non_strict_arguments_elements_map, NonStrictArgumentsElementsMap) \
128  V(Map, function_context_map, FunctionContextMap) \
129  V(Map, catch_context_map, CatchContextMap) \
130  V(Map, with_context_map, WithContextMap) \
131  V(Map, block_context_map, BlockContextMap) \
132  V(Map, module_context_map, ModuleContextMap) \
133  V(Map, oddball_map, OddballMap) \
134  V(Map, message_object_map, JSMessageObjectMap) \
135  V(Map, foreign_map, ForeignMap) \
136  V(HeapNumber, nan_value, NanValue) \
137  V(HeapNumber, infinity_value, InfinityValue) \
138  V(HeapNumber, minus_zero_value, MinusZeroValue) \
139  V(Map, neander_map, NeanderMap) \
140  V(JSObject, message_listeners, MessageListeners) \
141  V(Foreign, prototype_accessors, PrototypeAccessors) \
142  V(UnseededNumberDictionary, code_stubs, CodeStubs) \
143  V(UnseededNumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
144  V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache) \
145  V(Code, js_entry_code, JsEntryCode) \
146  V(Code, js_construct_entry_code, JsConstructEntryCode) \
147  V(FixedArray, natives_source_cache, NativesSourceCache) \
148  V(Object, last_script_id, LastScriptId) \
149  V(Script, empty_script, EmptyScript) \
150  V(Smi, real_stack_limit, RealStackLimit) \
151  V(StringDictionary, intrinsic_function_names, IntrinsicFunctionNames) \
152  V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
153  V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset)
154 
155 #define ROOT_LIST(V) \
156  STRONG_ROOT_LIST(V) \
157  V(SymbolTable, symbol_table, SymbolTable)
158 
159 #define SYMBOL_LIST(V) \
160  V(Array_symbol, "Array") \
161  V(Object_symbol, "Object") \
162  V(Proto_symbol, "__proto__") \
163  V(StringImpl_symbol, "StringImpl") \
164  V(arguments_symbol, "arguments") \
165  V(Arguments_symbol, "Arguments") \
166  V(call_symbol, "call") \
167  V(apply_symbol, "apply") \
168  V(caller_symbol, "caller") \
169  V(boolean_symbol, "boolean") \
170  V(Boolean_symbol, "Boolean") \
171  V(callee_symbol, "callee") \
172  V(constructor_symbol, "constructor") \
173  V(code_symbol, ".code") \
174  V(result_symbol, ".result") \
175  V(catch_var_symbol, ".catch-var") \
176  V(empty_symbol, "") \
177  V(eval_symbol, "eval") \
178  V(function_symbol, "function") \
179  V(length_symbol, "length") \
180  V(module_symbol, "module") \
181  V(name_symbol, "name") \
182  V(native_symbol, "native") \
183  V(null_symbol, "null") \
184  V(number_symbol, "number") \
185  V(Number_symbol, "Number") \
186  V(nan_symbol, "NaN") \
187  V(RegExp_symbol, "RegExp") \
188  V(source_symbol, "source") \
189  V(global_symbol, "global") \
190  V(ignore_case_symbol, "ignoreCase") \
191  V(multiline_symbol, "multiline") \
192  V(input_symbol, "input") \
193  V(index_symbol, "index") \
194  V(last_index_symbol, "lastIndex") \
195  V(object_symbol, "object") \
196  V(prototype_symbol, "prototype") \
197  V(string_symbol, "string") \
198  V(String_symbol, "String") \
199  V(Date_symbol, "Date") \
200  V(this_symbol, "this") \
201  V(to_string_symbol, "toString") \
202  V(char_at_symbol, "CharAt") \
203  V(undefined_symbol, "undefined") \
204  V(value_of_symbol, "valueOf") \
205  V(InitializeVarGlobal_symbol, "InitializeVarGlobal") \
206  V(InitializeConstGlobal_symbol, "InitializeConstGlobal") \
207  V(KeyedLoadElementMonomorphic_symbol, \
208  "KeyedLoadElementMonomorphic") \
209  V(KeyedStoreElementMonomorphic_symbol, \
210  "KeyedStoreElementMonomorphic") \
211  V(KeyedStoreAndGrowElementMonomorphic_symbol, \
212  "KeyedStoreAndGrowElementMonomorphic") \
213  V(stack_overflow_symbol, "kStackOverflowBoilerplate") \
214  V(illegal_access_symbol, "illegal access") \
215  V(out_of_memory_symbol, "out-of-memory") \
216  V(illegal_execution_state_symbol, "illegal execution state") \
217  V(get_symbol, "get") \
218  V(set_symbol, "set") \
219  V(function_class_symbol, "Function") \
220  V(illegal_argument_symbol, "illegal argument") \
221  V(MakeReferenceError_symbol, "MakeReferenceError") \
222  V(MakeSyntaxError_symbol, "MakeSyntaxError") \
223  V(MakeTypeError_symbol, "MakeTypeError") \
224  V(invalid_lhs_in_assignment_symbol, "invalid_lhs_in_assignment") \
225  V(invalid_lhs_in_for_in_symbol, "invalid_lhs_in_for_in") \
226  V(invalid_lhs_in_postfix_op_symbol, "invalid_lhs_in_postfix_op") \
227  V(invalid_lhs_in_prefix_op_symbol, "invalid_lhs_in_prefix_op") \
228  V(illegal_return_symbol, "illegal_return") \
229  V(illegal_break_symbol, "illegal_break") \
230  V(illegal_continue_symbol, "illegal_continue") \
231  V(unknown_label_symbol, "unknown_label") \
232  V(redeclaration_symbol, "redeclaration") \
233  V(failure_symbol, "<failure>") \
234  V(space_symbol, " ") \
235  V(exec_symbol, "exec") \
236  V(zero_symbol, "0") \
237  V(global_eval_symbol, "GlobalEval") \
238  V(identity_hash_symbol, "v8::IdentityHash") \
239  V(closure_symbol, "(closure)") \
240  V(use_strict, "use strict") \
241  V(dot_symbol, ".") \
242  V(anonymous_function_symbol, "(anonymous function)") \
243  V(compare_ic_symbol, ".compare_ic") \
244  V(infinity_symbol, "Infinity") \
245  V(minus_infinity_symbol, "-Infinity") \
246  V(hidden_stack_trace_symbol, "v8::hidden_stack_trace") \
247  V(query_colon_symbol, "(?:)")
248 
249 // Forward declarations.
250 class GCTracer;
251 class HeapStats;
252 class Isolate;
253 class WeakObjectRetainer;
254 
255 
256 typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
257  Object** pointer);
258 
260  public:
261  explicit StoreBufferRebuilder(StoreBuffer* store_buffer)
262  : store_buffer_(store_buffer) {
263  }
264 
265  void Callback(MemoryChunk* page, StoreBufferEvent event);
266 
267  private:
268  StoreBuffer* store_buffer_;
269 
270  // We record in this variable how full the store buffer was when we started
271  // iterating over the current page, finding pointers to new space. If the
272  // store buffer overflows again we can exempt the page from the store buffer
273  // by rewinding to this point instead of having to search the store buffer.
274  Object*** start_of_current_page_;
275  // The current page we are scanning in the store buffer iterator.
276  MemoryChunk* current_page_;
277 };
278 
279 
280 
281 // The all static Heap captures the interface to the global object heap.
282 // All JavaScript contexts by this process share the same object heap.
283 
284 #ifdef DEBUG
285 class HeapDebugUtils;
286 #endif
287 
288 
289 // A queue of objects promoted during scavenge. Each object is accompanied
290 // by it's size to avoid dereferencing a map pointer for scanning.
292  public:
293  explicit PromotionQueue(Heap* heap)
294  : front_(NULL),
295  rear_(NULL),
296  limit_(NULL),
297  emergency_stack_(0),
298  heap_(heap) { }
299 
300  void Initialize();
301 
302  void Destroy() {
303  ASSERT(is_empty());
304  delete emergency_stack_;
305  emergency_stack_ = NULL;
306  }
307 
308  inline void ActivateGuardIfOnTheSamePage();
309 
311  return Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
312  }
313 
314  void SetNewLimit(Address limit) {
315  if (!guard_) {
316  return;
317  }
318 
319  ASSERT(GetHeadPage() == Page::FromAllocationTop(limit));
320  limit_ = reinterpret_cast<intptr_t*>(limit);
321 
322  if (limit_ <= rear_) {
323  return;
324  }
325 
326  RelocateQueueHead();
327  }
328 
329  bool is_empty() {
330  return (front_ == rear_) &&
331  (emergency_stack_ == NULL || emergency_stack_->length() == 0);
332  }
333 
334  inline void insert(HeapObject* target, int size);
335 
336  void remove(HeapObject** target, int* size) {
337  ASSERT(!is_empty());
338  if (front_ == rear_) {
339  Entry e = emergency_stack_->RemoveLast();
340  *target = e.obj_;
341  *size = e.size_;
342  return;
343  }
344 
345  if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(front_))) {
346  NewSpacePage* front_page =
347  NewSpacePage::FromAddress(reinterpret_cast<Address>(front_));
348  ASSERT(!front_page->prev_page()->is_anchor());
349  front_ =
350  reinterpret_cast<intptr_t*>(front_page->prev_page()->area_end());
351  }
352  *target = reinterpret_cast<HeapObject*>(*(--front_));
353  *size = static_cast<int>(*(--front_));
354  // Assert no underflow.
355  SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
356  reinterpret_cast<Address>(front_));
357  }
358 
359  private:
360  // The front of the queue is higher in the memory page chain than the rear.
361  intptr_t* front_;
362  intptr_t* rear_;
363  intptr_t* limit_;
364 
365  bool guard_;
366 
367  static const int kEntrySizeInWords = 2;
368 
369  struct Entry {
370  Entry(HeapObject* obj, int size) : obj_(obj), size_(size) { }
371 
372  HeapObject* obj_;
373  int size_;
374  };
375  List<Entry>* emergency_stack_;
376 
377  Heap* heap_;
378 
379  void RelocateQueueHead();
380 
381  DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
382 };
383 
384 
385 typedef void (*ScavengingCallback)(Map* map,
386  HeapObject** slot,
387  HeapObject* object);
388 
389 
390 // External strings table is a place where all external strings are
391 // registered. We need to keep track of such strings to properly
392 // finalize them.
394  public:
395  // Registers an external string.
396  inline void AddString(String* string);
397 
398  inline void Iterate(ObjectVisitor* v);
399 
400  // Restores internal invariant and gets rid of collected strings.
401  // Must be called after each Iterate() that modified the strings.
402  void CleanUp();
403 
404  // Destroys all allocated memory.
405  void TearDown();
406 
407  private:
408  ExternalStringTable() { }
409 
410  friend class Heap;
411 
412  inline void Verify();
413 
414  inline void AddOldString(String* string);
415 
416  // Notifies the table that only a prefix of the new list is valid.
417  inline void ShrinkNewStrings(int position);
418 
419  // To speed up scavenge collections new space string are kept
420  // separate from old space strings.
421  List<Object*> new_space_strings_;
422  List<Object*> old_space_strings_;
423 
424  Heap* heap_;
425 
427 };
428 
429 
433 };
434 
435 class Heap {
436  public:
437  // Configure heap size before setup. Return false if the heap has been
438  // set up already.
439  bool ConfigureHeap(int max_semispace_size,
440  intptr_t max_old_gen_size,
441  intptr_t max_executable_size);
442  bool ConfigureHeapDefault();
443 
444  // Initializes the global object heap. If create_heap_objects is true,
445  // also creates the basic non-mutable objects.
446  // Returns whether it succeeded.
447  bool SetUp(bool create_heap_objects);
448 
449  // Destroys all memory allocated by the heap.
450  void TearDown();
451 
452  // Set the stack limit in the roots_ array. Some architectures generate
453  // code that looks here, because it is faster than loading from the static
454  // jslimit_/real_jslimit_ variable in the StackGuard.
455  void SetStackLimits();
456 
457  // Returns whether SetUp has been called.
458  bool HasBeenSetUp();
459 
460  // Returns the maximum amount of memory reserved for the heap. For
461  // the young generation, we reserve 4 times the amount needed for a
462  // semi space. The young generation consists of two semi spaces and
463  // we reserve twice the amount needed for those in order to ensure
464  // that new space can be aligned to its size.
465  intptr_t MaxReserved() {
466  return 4 * reserved_semispace_size_ + max_old_generation_size_;
467  }
468  int MaxSemiSpaceSize() { return max_semispace_size_; }
469  int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
470  int InitialSemiSpaceSize() { return initial_semispace_size_; }
471  intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
472  intptr_t MaxExecutableSize() { return max_executable_size_; }
473 
474  // Returns the capacity of the heap in bytes w/o growing. Heap grows when
475  // more spaces are needed until it reaches the limit.
476  intptr_t Capacity();
477 
478  // Returns the amount of memory currently committed for the heap.
479  intptr_t CommittedMemory();
480 
481  // Returns the amount of executable memory currently committed for the heap.
482  intptr_t CommittedMemoryExecutable();
483 
484  // Returns the available bytes in space w/o growing.
485  // Heap doesn't guarantee that it can allocate an object that requires
486  // all available bytes. Check MaxHeapObjectSize() instead.
487  intptr_t Available();
488 
489  // Returns of size of all objects residing in the heap.
490  intptr_t SizeOfObjects();
491 
492  // Return the starting address and a mask for the new space. And-masking an
493  // address with the mask will result in the start address of the new space
494  // for all addresses in either semispace.
495  Address NewSpaceStart() { return new_space_.start(); }
496  uintptr_t NewSpaceMask() { return new_space_.mask(); }
497  Address NewSpaceTop() { return new_space_.top(); }
498 
499  NewSpace* new_space() { return &new_space_; }
500  OldSpace* old_pointer_space() { return old_pointer_space_; }
501  OldSpace* old_data_space() { return old_data_space_; }
502  OldSpace* code_space() { return code_space_; }
503  MapSpace* map_space() { return map_space_; }
504  CellSpace* cell_space() { return cell_space_; }
505  LargeObjectSpace* lo_space() { return lo_space_; }
506 
507  bool always_allocate() { return always_allocate_scope_depth_ != 0; }
509  return reinterpret_cast<Address>(&always_allocate_scope_depth_);
510  }
512  return linear_allocation_scope_depth_ != 0;
513  }
514 
516  return new_space_.allocation_top_address();
517  }
519  return new_space_.allocation_limit_address();
520  }
521 
522  // Uncommit unused semi space.
523  bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
524 
525  // Allocates and initializes a new JavaScript object based on a
526  // constructor.
527  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
528  // failed.
529  // Please note this does not perform a garbage collection.
530  MUST_USE_RESULT MaybeObject* AllocateJSObject(
531  JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED);
532 
533  MUST_USE_RESULT MaybeObject* AllocateJSModule();
534 
535  // Allocate a JSArray with no elements
537  ElementsKind elements_kind,
538  PretenureFlag pretenure = NOT_TENURED) {
539  return AllocateJSArrayAndStorage(elements_kind, 0, 0,
541  pretenure);
542  }
543 
544  // Allocate a JSArray with a specified length but elements that are left
545  // uninitialized.
547  ElementsKind elements_kind,
548  int length,
549  int capacity,
551  PretenureFlag pretenure = NOT_TENURED);
552 
553  // Allocate a JSArray with no elements
555  FixedArrayBase* array_base,
556  ElementsKind elements_kind,
557  PretenureFlag pretenure = NOT_TENURED);
558 
559  // Allocates and initializes a new global object based on a constructor.
560  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
561  // failed.
562  // Please note this does not perform a garbage collection.
563  MUST_USE_RESULT MaybeObject* AllocateGlobalObject(JSFunction* constructor);
564 
565  // Returns a deep copy of the JavaScript object.
566  // Properties and elements are copied too.
567  // Returns failure if allocation failed.
568  MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source);
569 
570  // Allocates the function prototype.
571  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
572  // failed.
573  // Please note this does not perform a garbage collection.
574  MUST_USE_RESULT MaybeObject* AllocateFunctionPrototype(JSFunction* function);
575 
576  // Allocates a Harmony proxy or function proxy.
577  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
578  // failed.
579  // Please note this does not perform a garbage collection.
580  MUST_USE_RESULT MaybeObject* AllocateJSProxy(Object* handler,
581  Object* prototype);
582 
583  MUST_USE_RESULT MaybeObject* AllocateJSFunctionProxy(Object* handler,
584  Object* call_trap,
585  Object* construct_trap,
586  Object* prototype);
587 
588  // Reinitialize a JSReceiver into an (empty) JS object of respective type and
589  // size, but keeping the original prototype. The receiver must have at least
590  // the size of the new object. The object is reinitialized and behaves as an
591  // object that has been freshly allocated.
592  // Returns failure if an error occured, otherwise object.
595  int size);
596 
597  // Reinitialize an JSGlobalProxy based on a constructor. The object
598  // must have the same size as objects allocated using the
599  // constructor. The object is reinitialized and behaves as an
600  // object that has been freshly allocated using the constructor.
602  JSFunction* constructor, JSGlobalProxy* global);
603 
604  // Allocates and initializes a new JavaScript object based on a map.
605  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
606  // failed.
607  // Please note this does not perform a garbage collection.
609  Map* map, PretenureFlag pretenure = NOT_TENURED);
610 
611  // Allocates a heap object based on the map.
612  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
613  // failed.
614  // Please note this function does not perform a garbage collection.
615  MUST_USE_RESULT MaybeObject* Allocate(Map* map, AllocationSpace space);
616 
617  // Allocates a JS Map in the heap.
618  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
619  // failed.
620  // Please note this function does not perform a garbage collection.
621  MUST_USE_RESULT MaybeObject* AllocateMap(
622  InstanceType instance_type,
623  int instance_size,
624  ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
625 
626  // Allocates a partial map for bootstrapping.
627  MUST_USE_RESULT MaybeObject* AllocatePartialMap(InstanceType instance_type,
628  int instance_size);
629 
630  // Allocate a map for the specified function
631  MUST_USE_RESULT MaybeObject* AllocateInitialMap(JSFunction* fun);
632 
633  // Allocates an empty code cache.
634  MUST_USE_RESULT MaybeObject* AllocateCodeCache();
635 
636  // Allocates a serialized scope info.
637  MUST_USE_RESULT MaybeObject* AllocateScopeInfo(int length);
638 
639  // Allocates an empty PolymorphicCodeCache.
641 
642  // Allocates a pre-tenured empty AccessorPair.
643  MUST_USE_RESULT MaybeObject* AllocateAccessorPair();
644 
645  // Allocates an empty TypeFeedbackInfo.
647 
648  // Allocates an AliasedArgumentsEntry.
649  MUST_USE_RESULT MaybeObject* AllocateAliasedArgumentsEntry(int slot);
650 
651  // Clear the Instanceof cache (used when a prototype changes).
652  inline void ClearInstanceofCache();
653 
654  // Allocates and fully initializes a String. There are two String
655  // encodings: ASCII and two byte. One should choose between the three string
656  // allocation functions based on the encoding of the string buffer used to
657  // initialized the string.
658  // - ...FromAscii initializes the string from a buffer that is ASCII
659  // encoded (it does not check that the buffer is ASCII encoded) and the
660  // result will be ASCII encoded.
661  // - ...FromUTF8 initializes the string from a buffer that is UTF-8
662  // encoded. If the characters are all single-byte characters, the
663  // result will be ASCII encoded, otherwise it will converted to two
664  // byte.
665  // - ...FromTwoByte initializes the string from a buffer that is two-byte
666  // encoded. If the characters are all single-byte characters, the
667  // result will be converted to ASCII, otherwise it will be left as
668  // two-byte.
669  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
670  // failed.
671  // Please note this does not perform a garbage collection.
673  Vector<const char> str,
674  PretenureFlag pretenure = NOT_TENURED);
675  MUST_USE_RESULT inline MaybeObject* AllocateStringFromUtf8(
676  Vector<const char> str,
677  PretenureFlag pretenure = NOT_TENURED);
679  Vector<const char> str,
680  PretenureFlag pretenure = NOT_TENURED);
682  Vector<const uc16> str,
683  PretenureFlag pretenure = NOT_TENURED);
684 
685  // Allocates a symbol in old space based on the character stream.
686  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
687  // failed.
688  // Please note this function does not perform a garbage collection.
689  MUST_USE_RESULT inline MaybeObject* AllocateSymbol(Vector<const char> str,
690  int chars,
691  uint32_t hash_field);
692 
693  MUST_USE_RESULT inline MaybeObject* AllocateAsciiSymbol(
694  Vector<const char> str,
695  uint32_t hash_field);
696 
697  MUST_USE_RESULT inline MaybeObject* AllocateTwoByteSymbol(
698  Vector<const uc16> str,
699  uint32_t hash_field);
700 
702  unibrow::CharacterStream* buffer, int chars, uint32_t hash_field);
703 
705  Vector<const char> str,
706  int chars);
707 
708  // Allocates and partially initializes a String. There are two String
709  // encodings: ASCII and two byte. These functions allocate a string of the
710  // given length and set its map and length fields. The characters of the
711  // string are uninitialized.
712  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
713  // failed.
714  // Please note this does not perform a garbage collection.
716  int length,
717  PretenureFlag pretenure = NOT_TENURED);
719  int length,
720  PretenureFlag pretenure = NOT_TENURED);
721 
722  // Computes a single character string where the character has code.
723  // A cache is used for ASCII codes.
724  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
725  // failed. Please note this does not perform a garbage collection.
727  uint16_t code);
728 
729  // Allocate a byte array of the specified length
730  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
731  // failed.
732  // Please note this does not perform a garbage collection.
733  MUST_USE_RESULT MaybeObject* AllocateByteArray(int length,
734  PretenureFlag pretenure);
735 
736  // Allocate a non-tenured byte array of the specified length
737  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
738  // failed.
739  // Please note this does not perform a garbage collection.
740  MUST_USE_RESULT MaybeObject* AllocateByteArray(int length);
741 
742  // Allocates an external array of the specified length and type.
743  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
744  // failed.
745  // Please note this does not perform a garbage collection.
747  int length,
748  ExternalArrayType array_type,
749  void* external_pointer,
750  PretenureFlag pretenure);
751 
752  // Allocate a tenured JS global property cell.
753  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
754  // failed.
755  // Please note this does not perform a garbage collection.
757 
758  // Allocates a fixed array initialized with undefined values
759  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
760  // failed.
761  // Please note this does not perform a garbage collection.
762  MUST_USE_RESULT MaybeObject* AllocateFixedArray(int length,
763  PretenureFlag pretenure);
764  // Allocates a fixed array initialized with undefined values
765  MUST_USE_RESULT MaybeObject* AllocateFixedArray(int length);
766 
767  // Allocates an uninitialized fixed array. It must be filled by the caller.
768  //
769  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
770  // failed.
771  // Please note this does not perform a garbage collection.
772  MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedArray(int length);
773 
774  // Make a copy of src and return it. Returns
775  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
776  MUST_USE_RESULT inline MaybeObject* CopyFixedArray(FixedArray* src);
777 
778  // Make a copy of src, set the map, and return the copy. Returns
779  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
780  MUST_USE_RESULT MaybeObject* CopyFixedArrayWithMap(FixedArray* src, Map* map);
781 
782  // Make a copy of src and return it. Returns
783  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
784  MUST_USE_RESULT inline MaybeObject* CopyFixedDoubleArray(
785  FixedDoubleArray* src);
786 
787  // Make a copy of src, set the map, and return the copy. Returns
788  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
790  FixedDoubleArray* src, Map* map);
791 
792  // Allocates a fixed array initialized with the hole values.
793  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
794  // failed.
795  // Please note this does not perform a garbage collection.
797  int length,
798  PretenureFlag pretenure = NOT_TENURED);
799 
801  int length,
802  PretenureFlag pretenure);
803 
804  // Allocates a fixed double array with uninitialized values. Returns
805  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
806  // Please note this does not perform a garbage collection.
808  int length,
809  PretenureFlag pretenure = NOT_TENURED);
810 
811  // Allocates a fixed double array with hole values. Returns
812  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
813  // Please note this does not perform a garbage collection.
815  int length,
816  PretenureFlag pretenure = NOT_TENURED);
817 
818  // AllocateHashTable is identical to AllocateFixedArray except
819  // that the resulting object has hash_table_map as map.
820  MUST_USE_RESULT MaybeObject* AllocateHashTable(
821  int length, PretenureFlag pretenure = NOT_TENURED);
822 
823  // Allocate a global (but otherwise uninitialized) context.
824  MUST_USE_RESULT MaybeObject* AllocateGlobalContext();
825 
826  // Allocate a module context.
827  MUST_USE_RESULT MaybeObject* AllocateModuleContext(Context* previous,
828  ScopeInfo* scope_info);
829 
830  // Allocate a function context.
831  MUST_USE_RESULT MaybeObject* AllocateFunctionContext(int length,
832  JSFunction* function);
833 
834  // Allocate a catch context.
835  MUST_USE_RESULT MaybeObject* AllocateCatchContext(JSFunction* function,
836  Context* previous,
837  String* name,
838  Object* thrown_object);
839  // Allocate a 'with' context.
840  MUST_USE_RESULT MaybeObject* AllocateWithContext(JSFunction* function,
841  Context* previous,
842  JSObject* extension);
843 
844  // Allocate a block context.
845  MUST_USE_RESULT MaybeObject* AllocateBlockContext(JSFunction* function,
846  Context* previous,
847  ScopeInfo* info);
848 
849  // Allocates a new utility object in the old generation.
851 
852  // Allocates a function initialized with a shared part.
853  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
854  // failed.
855  // Please note this does not perform a garbage collection.
856  MUST_USE_RESULT MaybeObject* AllocateFunction(
857  Map* function_map,
858  SharedFunctionInfo* shared,
859  Object* prototype,
860  PretenureFlag pretenure = TENURED);
861 
862  // Arguments object size.
863  static const int kArgumentsObjectSize =
865  // Strict mode arguments has no callee so it is smaller.
866  static const int kArgumentsObjectSizeStrict =
868  // Indicies for direct access into argument objects.
869  static const int kArgumentsLengthIndex = 0;
870  // callee is only valid in non-strict mode.
871  static const int kArgumentsCalleeIndex = 1;
872 
873  // Allocates an arguments object - optionally with an elements array.
874  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
875  // failed.
876  // Please note this does not perform a garbage collection.
878  Object* callee, int length);
879 
880  // Same as NewNumberFromDouble, but may return a preallocated/immutable
881  // number object (e.g., minus_zero_value_, nan_value_)
882  MUST_USE_RESULT MaybeObject* NumberFromDouble(
883  double value, PretenureFlag pretenure = NOT_TENURED);
884 
885  // Allocated a HeapNumber from value.
886  MUST_USE_RESULT MaybeObject* AllocateHeapNumber(
887  double value,
888  PretenureFlag pretenure);
889  // pretenure = NOT_TENURED
890  MUST_USE_RESULT MaybeObject* AllocateHeapNumber(double value);
891 
892  // Converts an int into either a Smi or a HeapNumber object.
893  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
894  // failed.
895  // Please note this does not perform a garbage collection.
896  MUST_USE_RESULT inline MaybeObject* NumberFromInt32(
897  int32_t value, PretenureFlag pretenure = NOT_TENURED);
898 
899  // Converts an int into either a Smi or a HeapNumber object.
900  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
901  // failed.
902  // Please note this does not perform a garbage collection.
903  MUST_USE_RESULT inline MaybeObject* NumberFromUint32(
904  uint32_t value, PretenureFlag pretenure = NOT_TENURED);
905 
906  // Allocates a new foreign object.
907  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
908  // failed.
909  // Please note this does not perform a garbage collection.
910  MUST_USE_RESULT MaybeObject* AllocateForeign(
911  Address address, PretenureFlag pretenure = NOT_TENURED);
912 
913  // Allocates a new SharedFunctionInfo object.
914  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
915  // failed.
916  // Please note this does not perform a garbage collection.
918 
919  // Allocates a new JSMessageObject object.
920  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
921  // failed.
922  // Please note that this does not perform a garbage collection.
924  String* type,
925  JSArray* arguments,
926  int start_position,
927  int end_position,
928  Object* script,
929  Object* stack_trace,
930  Object* stack_frames);
931 
932  // Allocates a new cons string object.
933  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
934  // failed.
935  // Please note this does not perform a garbage collection.
936  MUST_USE_RESULT MaybeObject* AllocateConsString(String* first,
937  String* second);
938 
939  // Allocates a new sub string object which is a substring of an underlying
940  // string buffer stretching from the index start (inclusive) to the index
941  // end (exclusive).
942  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
943  // failed.
944  // Please note this does not perform a garbage collection.
945  MUST_USE_RESULT MaybeObject* AllocateSubString(
946  String* buffer,
947  int start,
948  int end,
949  PretenureFlag pretenure = NOT_TENURED);
950 
951  // Allocate a new external string object, which is backed by a string
952  // resource that resides outside the V8 heap.
953  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
954  // failed.
955  // Please note this does not perform a garbage collection.
957  const ExternalAsciiString::Resource* resource);
959  const ExternalTwoByteString::Resource* resource);
960 
961  // Finalizes an external string by deleting the associated external
962  // data and clearing the resource pointer.
963  inline void FinalizeExternalString(String* string);
964 
965  // Allocates an uninitialized object. The memory is non-executable if the
966  // hardware and OS allow.
967  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
968  // failed.
969  // Please note this function does not perform a garbage collection.
970  MUST_USE_RESULT inline MaybeObject* AllocateRaw(int size_in_bytes,
971  AllocationSpace space,
972  AllocationSpace retry_space);
973 
974  // Initialize a filler object to keep the ability to iterate over the heap
975  // when shortening objects.
976  void CreateFillerObjectAt(Address addr, int size);
977 
978  // Makes a new native code object
979  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
980  // failed. On success, the pointer to the Code object is stored in the
981  // self_reference. This allows generated code to reference its own Code
982  // object by containing this pointer.
983  // Please note this function does not perform a garbage collection.
984  MUST_USE_RESULT MaybeObject* CreateCode(const CodeDesc& desc,
986  Handle<Object> self_reference,
987  bool immovable = false);
988 
989  MUST_USE_RESULT MaybeObject* CopyCode(Code* code);
990 
991  // Copy the code and scope info part of the code object, but insert
992  // the provided data as the relocation information.
993  MUST_USE_RESULT MaybeObject* CopyCode(Code* code, Vector<byte> reloc_info);
994 
995  // Finds the symbol for string in the symbol table.
996  // If not found, a new symbol is added to the table and returned.
997  // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
998  // failed.
999  // Please note this function does not perform a garbage collection.
1003  MUST_USE_RESULT MaybeObject* LookupAsciiSymbol(const char* str) {
1004  return LookupSymbol(CStrVector(str));
1005  }
1006  MUST_USE_RESULT MaybeObject* LookupSymbol(String* str);
1008  int from,
1009  int length);
1010 
1011  bool LookupSymbolIfExists(String* str, String** symbol);
1012  bool LookupTwoCharsSymbolIfExists(String* str, String** symbol);
1013 
1014  // Compute the matching symbol map for a string if possible.
1015  // NULL is returned if string is in new space or not flattened.
1016  Map* SymbolMapForString(String* str);
1017 
1018  // Tries to flatten a string before compare operation.
1019  //
1020  // Returns a failure in case it was decided that flattening was
1021  // necessary and failed. Note, if flattening is not necessary the
1022  // string might stay non-flat even when not a failure is returned.
1023  //
1024  // Please note this function does not perform a garbage collection.
1025  MUST_USE_RESULT inline MaybeObject* PrepareForCompare(String* str);
1026 
1027  // Converts the given boolean condition to JavaScript boolean value.
1028  inline Object* ToBoolean(bool condition);
1029 
1030  // Code that should be run before and after each GC. Includes some
1031  // reporting/verification activities when compiled with DEBUG set.
1034 
1035  // Performs garbage collection operation.
1036  // Returns whether there is a chance that another major GC could
1037  // collect more garbage.
1038  bool CollectGarbage(AllocationSpace space,
1039  GarbageCollector collector,
1040  const char* gc_reason,
1041  const char* collector_reason);
1042 
1043  // Performs garbage collection operation.
1044  // Returns whether there is a chance that another major GC could
1045  // collect more garbage.
1046  inline bool CollectGarbage(AllocationSpace space,
1047  const char* gc_reason = NULL);
1048 
1049  static const int kNoGCFlags = 0;
1050  static const int kSweepPreciselyMask = 1;
1051  static const int kReduceMemoryFootprintMask = 2;
1052  static const int kAbortIncrementalMarkingMask = 4;
1053 
1054  // Making the heap iterable requires us to sweep precisely and abort any
1055  // incremental marking as well.
1056  static const int kMakeHeapIterableMask =
1058 
1059  // Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is
1060  // non-zero, then the slower precise sweeper is used, which leaves the heap
1061  // in a state where we can iterate over the heap visiting all objects.
1062  void CollectAllGarbage(int flags, const char* gc_reason = NULL);
1063 
1064  // Last hope GC, should try to squeeze as much as possible.
1065  void CollectAllAvailableGarbage(const char* gc_reason = NULL);
1066 
1067  // Check whether the heap is currently iterable.
1068  bool IsHeapIterable();
1069 
1070  // Ensure that we have swept all spaces in such a way that we can iterate
1071  // over all objects. May cause a GC.
1072  void EnsureHeapIsIterable();
1073 
1074  // Notify the heap that a context has been disposed.
1075  int NotifyContextDisposed() { return ++contexts_disposed_; }
1076 
1077  // Utility to invoke the scavenger. This is needed in test code to
1078  // ensure correct callback for weak global handles.
1079  void PerformScavenge();
1080 
1082  scan_on_scavenge_pages_++;
1083  if (FLAG_gc_verbose) {
1084  PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
1085  }
1086  }
1087 
1089  scan_on_scavenge_pages_--;
1090  if (FLAG_gc_verbose) {
1091  PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
1092  }
1093  }
1094 
1095  PromotionQueue* promotion_queue() { return &promotion_queue_; }
1096 
1097 #ifdef DEBUG
1098  // Utility used with flag gc-greedy.
1099  void GarbageCollectionGreedyCheck();
1100 #endif
1101 
1102  void AddGCPrologueCallback(
1103  GCEpilogueCallback callback, GCType gc_type_filter);
1105 
1106  void AddGCEpilogueCallback(
1107  GCEpilogueCallback callback, GCType gc_type_filter);
1109 
1111  ASSERT((callback == NULL) ^ (global_gc_prologue_callback_ == NULL));
1112  global_gc_prologue_callback_ = callback;
1113  }
1115  ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL));
1116  global_gc_epilogue_callback_ = callback;
1117  }
1118 
1119  // Heap root getters. We have versions with and without type::cast() here.
1120  // You can't use type::cast during GC because the assert fails.
1121  // TODO(1490): Try removing the unchecked accessors, now that GC marking does
1122  // not corrupt the map.
1123 #define ROOT_ACCESSOR(type, name, camel_name) \
1124  type* name() { \
1125  return type::cast(roots_[k##camel_name##RootIndex]); \
1126  } \
1127  type* raw_unchecked_##name() { \
1128  return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
1129  }
1131 #undef ROOT_ACCESSOR
1132 
1133 // Utility type maps
1134 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
1135  Map* name##_map() { \
1136  return Map::cast(roots_[k##Name##MapRootIndex]); \
1137  }
1139 #undef STRUCT_MAP_ACCESSOR
1140 
1141 #define SYMBOL_ACCESSOR(name, str) String* name() { \
1142  return String::cast(roots_[k##name##RootIndex]); \
1143  }
1145 #undef SYMBOL_ACCESSOR
1146 
1147  // The hidden_symbol is special because it is the empty string, but does
1148  // not match the empty string.
1149  String* hidden_symbol() { return hidden_symbol_; }
1150 
1152  global_contexts_list_ = object;
1153  }
1154  Object* global_contexts_list() { return global_contexts_list_; }
1155 
1156  // Number of mark-sweeps.
1157  int ms_count() { return ms_count_; }
1158 
1159  // Iterates over all roots in the heap.
1160  void IterateRoots(ObjectVisitor* v, VisitMode mode);
1161  // Iterates over all strong roots in the heap.
1162  void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
1163  // Iterates over all the other roots in the heap.
1164  void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
1165 
1166  // Iterate pointers to from semispace of new space found in memory interval
1167  // from start to end.
1169  Address end,
1170  ObjectSlotCallback callback);
1171 
1172  // Returns whether the object resides in new space.
1173  inline bool InNewSpace(Object* object);
1174  inline bool InNewSpace(Address addr);
1175  inline bool InNewSpacePage(Address addr);
1176  inline bool InFromSpace(Object* object);
1177  inline bool InToSpace(Object* object);
1178 
1179  // Checks whether an address/object in the heap (including auxiliary
1180  // area and unused area).
1181  bool Contains(Address addr);
1182  bool Contains(HeapObject* value);
1183 
1184  // Checks whether an address/object in a space.
1185  // Currently used by tests, serialization and heap verification only.
1186  bool InSpace(Address addr, AllocationSpace space);
1187  bool InSpace(HeapObject* value, AllocationSpace space);
1188 
1189  // Finds out which space an object should get promoted to based on its type.
1190  inline OldSpace* TargetSpace(HeapObject* object);
1192 
1193  // Sets the stub_cache_ (only used when expanding the dictionary).
1195  roots_[kCodeStubsRootIndex] = value;
1196  }
1197 
1198  // Support for computing object sizes for old objects during GCs. Returns
1199  // a function that is guaranteed to be safe for computing object sizes in
1200  // the current GC phase.
1202  return gc_safe_size_of_old_object_;
1203  }
1204 
1205  // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
1207  roots_[kNonMonomorphicCacheRootIndex] = value;
1208  }
1209 
1211  roots_[kEmptyScriptRootIndex] = script;
1212  }
1213 
1215  roots_[kStoreBufferTopRootIndex] = reinterpret_cast<Smi*>(top);
1216  }
1217 
1218  // Update the next script id.
1219  inline void SetLastScriptId(Object* last_script_id);
1220 
1221  // Generated code can embed this address to get access to the roots.
1222  Object** roots_array_start() { return roots_; }
1223 
1225  return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
1226  }
1227 
1228  // Get address of global contexts list for serialization support.
1230  return &global_contexts_list_;
1231  }
1232 
1233 #ifdef DEBUG
1234  void Print();
1235  void PrintHandles();
1236 
1237  // Verify the heap is in its normal state before or after a GC.
1238  void Verify();
1239 
1240  // Verify that AccessorPairs are not shared, i.e. make sure that they have
1241  // exactly one pointer to them.
1242  void VerifyNoAccessorPairSharing();
1243 
1244  void OldPointerSpaceCheckStoreBuffer();
1245  void MapSpaceCheckStoreBuffer();
1246  void LargeObjectSpaceCheckStoreBuffer();
1247 
1248  // Report heap statistics.
1249  void ReportHeapStatistics(const char* title);
1250  void ReportCodeStatistics(const char* title);
1251 
1252  // Fill in bogus values in from space
1253  void ZapFromSpace();
1254 #endif
1255 
1256  // Print short heap statistics.
1257  void PrintShortHeapStatistics();
1258 
1259  // Makes a new symbol object
1260  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1261  // failed.
1262  // Please note this function does not perform a garbage collection.
1263  MUST_USE_RESULT MaybeObject* CreateSymbol(
1264  const char* str, int length, int hash);
1265  MUST_USE_RESULT MaybeObject* CreateSymbol(String* str);
1266 
1267  // Write barrier support for address[offset] = o.
1268  inline void RecordWrite(Address address, int offset);
1269 
1270  // Write barrier support for address[start : start + len[ = o.
1271  inline void RecordWrites(Address address, int start, int len);
1272 
1273  // Given an address occupied by a live code object, return that object.
1275 
1276  // Invoke Shrink on shrinkable spaces.
1277  void Shrink();
1278 
1280  inline HeapState gc_state() { return gc_state_; }
1281 
1282  inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
1283 
1284 #ifdef DEBUG
1285  bool IsAllocationAllowed() { return allocation_allowed_; }
1286  inline bool allow_allocation(bool enable);
1287 
1288  bool disallow_allocation_failure() {
1289  return disallow_allocation_failure_;
1290  }
1291 
1292  void TracePathToObject(Object* target);
1293  void TracePathToGlobal();
1294 #endif
1295 
1296  // Callback function passed to Heap::Iterate etc. Copies an object if
1297  // necessary, the object might be promoted to an old space. The caller must
1298  // ensure the precondition that the object is (a) a heap object and (b) in
1299  // the heap's from space.
1300  static inline void ScavengePointer(HeapObject** p);
1301  static inline void ScavengeObject(HeapObject** p, HeapObject* object);
1302 
1303  // Commits from space if it is uncommitted.
1305 
1306  // Support for partial snapshots. After calling this we can allocate a
1307  // certain number of bytes using only linear allocation (with a
1308  // LinearAllocationScope and an AlwaysAllocateScope) without using freelists
1309  // or causing a GC. It returns true of space was reserved or false if a GC is
1310  // needed. For paged spaces the space requested must include the space wasted
1311  // at the end of each page when allocating linearly.
1312  void ReserveSpace(
1313  int new_space_size,
1314  int pointer_space_size,
1315  int data_space_size,
1316  int code_space_size,
1317  int map_space_size,
1318  int cell_space_size,
1319  int large_object_size);
1320 
1321  //
1322  // Support for the API.
1323  //
1324 
1325  bool CreateApiObjects();
1326 
1327  // Attempt to find the number in a small cache. If we finds it, return
1328  // the string representation of the number. Otherwise return undefined.
1330 
1331  // Update the cache with a new number-string pair.
1332  void SetNumberStringCache(Object* number, String* str);
1333 
1334  // Adjusts the amount of registered external memory.
1335  // Returns the adjusted value.
1336  inline intptr_t AdjustAmountOfExternalAllocatedMemory(
1337  intptr_t change_in_bytes);
1338 
1339  // Allocate uninitialized fixed array.
1340  MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length);
1341  MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length,
1342  PretenureFlag pretenure);
1343 
1344  inline intptr_t PromotedTotalSize() {
1345  return PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
1346  }
1347 
1348  // True if we have reached the allocation limit in the old generation that
1349  // should force the next GC (caused normally) to be a full one.
1351  return PromotedTotalSize() > old_gen_promotion_limit_;
1352  }
1353 
1354  inline intptr_t OldGenerationSpaceAvailable() {
1355  return old_gen_allocation_limit_ - PromotedTotalSize();
1356  }
1357 
1359  return max_old_generation_size_ - PromotedTotalSize();
1360  }
1361 
1362  static const intptr_t kMinimumPromotionLimit = 5 * Page::kPageSize;
1363  static const intptr_t kMinimumAllocationLimit =
1364  8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
1365 
1366  intptr_t OldGenPromotionLimit(intptr_t old_gen_size) {
1367  const int divisor = FLAG_stress_compaction ? 10 : 3;
1368  intptr_t limit =
1369  Max(old_gen_size + old_gen_size / divisor, kMinimumPromotionLimit);
1370  limit += new_space_.Capacity();
1371  limit *= old_gen_limit_factor_;
1372  intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2;
1373  return Min(limit, halfway_to_the_max);
1374  }
1375 
1376  intptr_t OldGenAllocationLimit(intptr_t old_gen_size) {
1377  const int divisor = FLAG_stress_compaction ? 8 : 2;
1378  intptr_t limit =
1379  Max(old_gen_size + old_gen_size / divisor, kMinimumAllocationLimit);
1380  limit += new_space_.Capacity();
1381  limit *= old_gen_limit_factor_;
1382  intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2;
1383  return Min(limit, halfway_to_the_max);
1384  }
1385 
1386  // Implements the corresponding V8 API function.
1387  bool IdleNotification(int hint);
1388 
1389  // Declare all the root indices.
1391 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1393 #undef ROOT_INDEX_DECLARATION
1394 
1395 // Utility type maps
1396 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
1398 #undef DECLARE_STRUCT_MAP
1399 
1400 #define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
1402 #undef SYMBOL_DECLARATION
1403 
1407  };
1408 
1409  STATIC_CHECK(kUndefinedValueRootIndex == Internals::kUndefinedValueRootIndex);
1410  STATIC_CHECK(kNullValueRootIndex == Internals::kNullValueRootIndex);
1411  STATIC_CHECK(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
1412  STATIC_CHECK(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
1413  STATIC_CHECK(kempty_symbolRootIndex == Internals::kEmptySymbolRootIndex);
1414 
1415  MUST_USE_RESULT MaybeObject* NumberToString(
1416  Object* number, bool check_number_string_cache = true);
1417  MUST_USE_RESULT MaybeObject* Uint32ToString(
1418  uint32_t value, bool check_number_string_cache = true);
1419 
1422  ExternalArrayType array_type);
1423 
1424  void RecordStats(HeapStats* stats, bool take_snapshot = false);
1425 
1426  // Copy block of memory from src to dst. Size of block should be aligned
1427  // by pointer size.
1428  static inline void CopyBlock(Address dst, Address src, int byte_size);
1429 
1430  // Optimized version of memmove for blocks with pointer size aligned sizes and
1431  // pointer size aligned addresses.
1432  static inline void MoveBlock(Address dst, Address src, int byte_size);
1433 
1434  // Check new space expansion criteria and expand semispaces if it was hit.
1436 
1437  inline void IncrementYoungSurvivorsCounter(int survived) {
1438  ASSERT(survived >= 0);
1439  young_survivors_after_last_gc_ = survived;
1440  survived_since_last_expansion_ += survived;
1441  }
1442 
1443  inline bool NextGCIsLikelyToBeFull() {
1444  if (FLAG_gc_global) return true;
1445 
1446  if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
1447 
1448  intptr_t total_promoted = PromotedTotalSize();
1449 
1450  intptr_t adjusted_promotion_limit =
1451  old_gen_promotion_limit_ - new_space_.Capacity();
1452 
1453  if (total_promoted >= adjusted_promotion_limit) return true;
1454 
1455  intptr_t adjusted_allocation_limit =
1456  old_gen_allocation_limit_ - new_space_.Capacity() / 5;
1457 
1458  if (PromotedSpaceSizeOfObjects() >= adjusted_allocation_limit) return true;
1459 
1460  return false;
1461  }
1462 
1463 
1465  ExternalStringTableUpdaterCallback updater_func);
1466 
1468  ExternalStringTableUpdaterCallback updater_func);
1469 
1470  void ProcessWeakReferences(WeakObjectRetainer* retainer);
1471 
1473 
1474  // Helper function that governs the promotion policy from new space to
1475  // old. If the object's old address lies below the new space's age
1476  // mark or if we've already filled the bottom 1/16th of the to space,
1477  // we try to promote this object.
1478  inline bool ShouldBePromoted(Address old_address, int object_size);
1479 
1480  int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; }
1481 
1483 
1484  void ClearNormalizedMapCaches();
1485 
1486  // Clears the cache of ICs related to this map.
1487  void ClearCacheOnMap(Map* map) {
1488  if (FLAG_cleanup_code_caches_at_gc) {
1489  map->ClearCodeCache(this);
1490  }
1491  }
1492 
1493  GCTracer* tracer() { return tracer_; }
1494 
1495  // Returns the size of objects residing in non new spaces.
1496  intptr_t PromotedSpaceSizeOfObjects();
1497 
1498  double total_regexp_code_generated() { return total_regexp_code_generated_; }
1500  total_regexp_code_generated_ += size;
1501  }
1502 
1503  // Returns maximum GC pause.
1504  int get_max_gc_pause() { return max_gc_pause_; }
1505 
1506  // Returns maximum size of objects alive after GC.
1507  intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1508 
1509  // Returns minimal interval between two subsequent collections.
1510  int get_min_in_mutator() { return min_in_mutator_; }
1511 
1513  return &mark_compact_collector_;
1514  }
1515 
1517  return &store_buffer_;
1518  }
1519 
1521  return &marking_;
1522  }
1523 
1525  return &incremental_marking_;
1526  }
1527 
1529  return old_data_space()->IsSweepingComplete() &&
1531  }
1532 
1533  bool AdvanceSweepers(int step_size) {
1534  bool sweeping_complete = old_data_space()->AdvanceSweeper(step_size);
1535  sweeping_complete &= old_pointer_space()->AdvanceSweeper(step_size);
1536  return sweeping_complete;
1537  }
1538 
1540  return &external_string_table_;
1541  }
1542 
1543  // Returns the current sweep generation.
1545  return sweep_generation_;
1546  }
1547 
1548  inline Isolate* isolate();
1549 
1551  if (global_gc_prologue_callback_ != NULL) global_gc_prologue_callback_();
1552  }
1553 
1555  if (global_gc_epilogue_callback_ != NULL) global_gc_epilogue_callback_();
1556  }
1557 
1559 
1560  inline void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
1561  scavenging_visitors_table_.GetVisitor(map)(map, slot, obj);
1562  }
1563 
1564  void QueueMemoryChunkForFree(MemoryChunk* chunk);
1565  void FreeQueuedChunks();
1566 
1567  // Completely clear the Instanceof cache (to stop it keeping objects alive
1568  // around a GC).
1569  inline void CompletelyClearInstanceofCache();
1570 
1571  // The roots that have an index less than this are always in old space.
1572  static const int kOldSpaceRoots = 0x20;
1573 
1574  uint32_t HashSeed() {
1575  uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
1576  ASSERT(FLAG_randomize_hashes || seed == 0);
1577  return seed;
1578  }
1579 
1580  void SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
1581  ASSERT(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
1582  set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
1583  }
1584 
1585  void SetConstructStubDeoptPCOffset(int pc_offset) {
1586  ASSERT(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
1587  set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1588  }
1589 
1590  // For post mortem debugging.
1591  void RememberUnmappedPage(Address page, bool compacted);
1592 
1593  // Global inline caching age: it is incremented on some GCs after context
1594  // disposal. We use it to flush inline caches.
1596  return global_ic_age_;
1597  }
1598 
1600  global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
1601  }
1602 
1603  private:
1604  Heap();
1605 
1606  // This can be calculated directly from a pointer to the heap; however, it is
1607  // more expedient to get at the isolate directly from within Heap methods.
1608  Isolate* isolate_;
1609 
1610  Object* roots_[kRootListLength];
1611 
1612  intptr_t code_range_size_;
1613  int reserved_semispace_size_;
1614  int max_semispace_size_;
1615  int initial_semispace_size_;
1616  intptr_t max_old_generation_size_;
1617  intptr_t max_executable_size_;
1618 
1619  // For keeping track of how much data has survived
1620  // scavenge since last new space expansion.
1621  int survived_since_last_expansion_;
1622 
1623  // For keeping track on when to flush RegExp code.
1624  int sweep_generation_;
1625 
1626  int always_allocate_scope_depth_;
1627  int linear_allocation_scope_depth_;
1628 
1629  // For keeping track of context disposals.
1630  int contexts_disposed_;
1631 
1632  int global_ic_age_;
1633 
1634  int scan_on_scavenge_pages_;
1635 
1636 #if defined(V8_TARGET_ARCH_X64)
1637  static const int kMaxObjectSizeInNewSpace = 1024*KB;
1638 #else
1639  static const int kMaxObjectSizeInNewSpace = 512*KB;
1640 #endif
1641 
1642  NewSpace new_space_;
1643  OldSpace* old_pointer_space_;
1644  OldSpace* old_data_space_;
1645  OldSpace* code_space_;
1646  MapSpace* map_space_;
1647  CellSpace* cell_space_;
1648  LargeObjectSpace* lo_space_;
1649  HeapState gc_state_;
1650  int gc_post_processing_depth_;
1651 
1652  // Returns the amount of external memory registered since last global gc.
1653  intptr_t PromotedExternalMemorySize();
1654 
1655  int ms_count_; // how many mark-sweep collections happened
1656  unsigned int gc_count_; // how many gc happened
1657 
1658  // For post mortem debugging.
1659  static const int kRememberedUnmappedPages = 128;
1660  int remembered_unmapped_pages_index_;
1661  Address remembered_unmapped_pages_[kRememberedUnmappedPages];
1662 
1663  // Total length of the strings we failed to flatten since the last GC.
1664  int unflattened_strings_length_;
1665 
1666 #define ROOT_ACCESSOR(type, name, camel_name) \
1667  inline void set_##name(type* value) { \
1668  /* The deserializer makes use of the fact that these common roots are */ \
1669  /* never in new space and never on a page that is being compacted. */ \
1670  ASSERT(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \
1671  roots_[k##camel_name##RootIndex] = value; \
1672  }
1674 #undef ROOT_ACCESSOR
1675 
1676 #ifdef DEBUG
1677  bool allocation_allowed_;
1678 
1679  // If the --gc-interval flag is set to a positive value, this
1680  // variable holds the value indicating the number of allocations
1681  // remain until the next failure and garbage collection.
1682  int allocation_timeout_;
1683 
1684  // Do we expect to be able to handle allocation failure at this
1685  // time?
1686  bool disallow_allocation_failure_;
1687 
1688  HeapDebugUtils* debug_utils_;
1689 #endif // DEBUG
1690 
1691  // Indicates that the new space should be kept small due to high promotion
1692  // rates caused by the mutator allocating a lot of long-lived objects.
1693  bool new_space_high_promotion_mode_active_;
1694 
1695  // Limit that triggers a global GC on the next (normally caused) GC. This
1696  // is checked when we have already decided to do a GC to help determine
1697  // which collector to invoke.
1698  intptr_t old_gen_promotion_limit_;
1699 
1700  // Limit that triggers a global GC as soon as is reasonable. This is
1701  // checked before expanding a paged space in the old generation and on
1702  // every allocation in large object space.
1703  intptr_t old_gen_allocation_limit_;
1704 
1705  // Sometimes the heuristics dictate that those limits are increased. This
1706  // variable records that fact.
1707  int old_gen_limit_factor_;
1708 
1709  // Used to adjust the limits that control the timing of the next GC.
1710  intptr_t size_of_old_gen_at_last_old_space_gc_;
1711 
1712  // Limit on the amount of externally allocated memory allowed
1713  // between global GCs. If reached a global GC is forced.
1714  intptr_t external_allocation_limit_;
1715 
1716  // The amount of external memory registered through the API kept alive
1717  // by global handles
1718  intptr_t amount_of_external_allocated_memory_;
1719 
1720  // Caches the amount of external memory registered at the last global gc.
1721  intptr_t amount_of_external_allocated_memory_at_last_global_gc_;
1722 
1723  // Indicates that an allocation has failed in the old generation since the
1724  // last GC.
1725  int old_gen_exhausted_;
1726 
1727  Object* global_contexts_list_;
1728 
1729  StoreBufferRebuilder store_buffer_rebuilder_;
1730 
1731  struct StringTypeTable {
1733  int size;
1734  RootListIndex index;
1735  };
1736 
1737  struct ConstantSymbolTable {
1738  const char* contents;
1739  RootListIndex index;
1740  };
1741 
1742  struct StructTable {
1744  int size;
1745  RootListIndex index;
1746  };
1747 
1748  static const StringTypeTable string_type_table[];
1749  static const ConstantSymbolTable constant_symbol_table[];
1750  static const StructTable struct_table[];
1751 
1752  // The special hidden symbol which is an empty string, but does not match
1753  // any string when looked up in properties.
1754  String* hidden_symbol_;
1755 
1756  // GC callback function, called before and after mark-compact GC.
1757  // Allocations in the callback function are disallowed.
1758  struct GCPrologueCallbackPair {
1759  GCPrologueCallbackPair(GCPrologueCallback callback, GCType gc_type)
1760  : callback(callback), gc_type(gc_type) {
1761  }
1762  bool operator==(const GCPrologueCallbackPair& pair) const {
1763  return pair.callback == callback;
1764  }
1765  GCPrologueCallback callback;
1766  GCType gc_type;
1767  };
1768  List<GCPrologueCallbackPair> gc_prologue_callbacks_;
1769 
1770  struct GCEpilogueCallbackPair {
1771  GCEpilogueCallbackPair(GCEpilogueCallback callback, GCType gc_type)
1772  : callback(callback), gc_type(gc_type) {
1773  }
1774  bool operator==(const GCEpilogueCallbackPair& pair) const {
1775  return pair.callback == callback;
1776  }
1777  GCEpilogueCallback callback;
1778  GCType gc_type;
1779  };
1780  List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
1781 
1782  GCCallback global_gc_prologue_callback_;
1783  GCCallback global_gc_epilogue_callback_;
1784 
1785  // Support for computing object sizes during GC.
1786  HeapObjectCallback gc_safe_size_of_old_object_;
1787  static int GcSafeSizeOfOldObject(HeapObject* object);
1788 
1789  // Update the GC state. Called from the mark-compact collector.
1790  void MarkMapPointersAsEncoded(bool encoded) {
1791  ASSERT(!encoded);
1792  gc_safe_size_of_old_object_ = &GcSafeSizeOfOldObject;
1793  }
1794 
1795  // Checks whether a global GC is necessary
1796  GarbageCollector SelectGarbageCollector(AllocationSpace space,
1797  const char** reason);
1798 
1799  // Performs garbage collection
1800  // Returns whether there is a chance another major GC could
1801  // collect more garbage.
1802  bool PerformGarbageCollection(GarbageCollector collector,
1803  GCTracer* tracer);
1804 
1805 
1806  inline void UpdateOldSpaceLimits();
1807 
1808  // Allocate an uninitialized object in map space. The behavior is identical
1809  // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
1810  // have to test the allocation space argument and (b) can reduce code size
1811  // (since both AllocateRaw and AllocateRawMap are inlined).
1812  MUST_USE_RESULT inline MaybeObject* AllocateRawMap();
1813 
1814  // Allocate an uninitialized object in the global property cell space.
1815  MUST_USE_RESULT inline MaybeObject* AllocateRawCell();
1816 
1817  // Initializes a JSObject based on its map.
1818  void InitializeJSObjectFromMap(JSObject* obj,
1819  FixedArray* properties,
1820  Map* map);
1821 
1822  bool CreateInitialMaps();
1823  bool CreateInitialObjects();
1824 
1825  // These five Create*EntryStub functions are here and forced to not be inlined
1826  // because of a gcc-4.4 bug that assigns wrong vtable entries.
1827  NO_INLINE(void CreateJSEntryStub());
1828  NO_INLINE(void CreateJSConstructEntryStub());
1829 
1830  void CreateFixedStubs();
1831 
1832  MaybeObject* CreateOddball(const char* to_string,
1833  Object* to_number,
1834  byte kind);
1835 
1836  // Allocate a JSArray with no elements
1837  MUST_USE_RESULT MaybeObject* AllocateJSArray(
1838  ElementsKind elements_kind,
1839  PretenureFlag pretenure = NOT_TENURED);
1840 
1841  // Allocate empty fixed array.
1842  MUST_USE_RESULT MaybeObject* AllocateEmptyFixedArray();
1843 
1844  // Allocate empty fixed double array.
1845  MUST_USE_RESULT MaybeObject* AllocateEmptyFixedDoubleArray();
1846 
1847  // Performs a minor collection in new generation.
1848  void Scavenge();
1849 
1850  static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
1851  Heap* heap,
1852  Object** pointer);
1853 
1854  Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
1855  static void ScavengeStoreBufferCallback(Heap* heap,
1856  MemoryChunk* page,
1857  StoreBufferEvent event);
1858 
1859  // Performs a major collection in the whole heap.
1860  void MarkCompact(GCTracer* tracer);
1861 
1862  // Code to be run before and after mark-compact.
1863  void MarkCompactPrologue();
1864 
1865  // Record statistics before and after garbage collection.
1866  void ReportStatisticsBeforeGC();
1867  void ReportStatisticsAfterGC();
1868 
1869  // Slow part of scavenge object.
1870  static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
1871 
1872  // Initializes a function with a shared part and prototype.
1873  // Note: this code was factored out of AllocateFunction such that
1874  // other parts of the VM could use it. Specifically, a function that creates
1875  // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
1876  // Please note this does not perform a garbage collection.
1877  inline void InitializeFunction(
1878  JSFunction* function,
1879  SharedFunctionInfo* shared,
1880  Object* prototype);
1881 
1882  // Total RegExp code ever generated
1883  double total_regexp_code_generated_;
1884 
1885  GCTracer* tracer_;
1886 
1887 
1888  // Allocates a small number to string cache.
1889  MUST_USE_RESULT MaybeObject* AllocateInitialNumberStringCache();
1890  // Creates and installs the full-sized number string cache.
1891  void AllocateFullSizeNumberStringCache();
1892  // Get the length of the number to string cache based on the max semispace
1893  // size.
1894  int FullSizeNumberStringCacheLength();
1895  // Flush the number to string cache.
1896  void FlushNumberStringCache();
1897 
1898  void UpdateSurvivalRateTrend(int start_new_space_size);
1899 
1900  enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING };
1901 
1902  static const int kYoungSurvivalRateHighThreshold = 90;
1903  static const int kYoungSurvivalRateLowThreshold = 10;
1904  static const int kYoungSurvivalRateAllowedDeviation = 15;
1905 
1906  int young_survivors_after_last_gc_;
1907  int high_survival_rate_period_length_;
1908  int low_survival_rate_period_length_;
1909  double survival_rate_;
1910  SurvivalRateTrend previous_survival_rate_trend_;
1911  SurvivalRateTrend survival_rate_trend_;
1912 
1913  void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) {
1914  ASSERT(survival_rate_trend != FLUCTUATING);
1915  previous_survival_rate_trend_ = survival_rate_trend_;
1916  survival_rate_trend_ = survival_rate_trend;
1917  }
1918 
1919  SurvivalRateTrend survival_rate_trend() {
1920  if (survival_rate_trend_ == STABLE) {
1921  return STABLE;
1922  } else if (previous_survival_rate_trend_ == STABLE) {
1923  return survival_rate_trend_;
1924  } else if (survival_rate_trend_ != previous_survival_rate_trend_) {
1925  return FLUCTUATING;
1926  } else {
1927  return survival_rate_trend_;
1928  }
1929  }
1930 
1931  bool IsStableOrIncreasingSurvivalTrend() {
1932  switch (survival_rate_trend()) {
1933  case STABLE:
1934  case INCREASING:
1935  return true;
1936  default:
1937  return false;
1938  }
1939  }
1940 
1941  bool IsStableOrDecreasingSurvivalTrend() {
1942  switch (survival_rate_trend()) {
1943  case STABLE:
1944  case DECREASING:
1945  return true;
1946  default:
1947  return false;
1948  }
1949  }
1950 
1951  bool IsIncreasingSurvivalTrend() {
1952  return survival_rate_trend() == INCREASING;
1953  }
1954 
1955  bool IsHighSurvivalRate() {
1956  return high_survival_rate_period_length_ > 0;
1957  }
1958 
1959  bool IsLowSurvivalRate() {
1960  return low_survival_rate_period_length_ > 0;
1961  }
1962 
1963  void SelectScavengingVisitorsTable();
1964 
1965  void StartIdleRound() {
1966  mark_sweeps_since_idle_round_started_ = 0;
1967  ms_count_at_last_idle_notification_ = ms_count_;
1968  }
1969 
1970  void FinishIdleRound() {
1971  mark_sweeps_since_idle_round_started_ = kMaxMarkSweepsInIdleRound;
1972  scavenges_since_last_idle_round_ = 0;
1973  }
1974 
1975  bool EnoughGarbageSinceLastIdleRound() {
1976  return (scavenges_since_last_idle_round_ >= kIdleScavengeThreshold);
1977  }
1978 
1979  // Estimates how many milliseconds a Mark-Sweep would take to complete.
1980  // In idle notification handler we assume that this function will return:
1981  // - a number less than 10 for small heaps, which are less than 8Mb.
1982  // - a number greater than 10 for large heaps, which are greater than 32Mb.
1983  int TimeMarkSweepWouldTakeInMs() {
1984  // Rough estimate of how many megabytes of heap can be processed in 1 ms.
1985  static const int kMbPerMs = 2;
1986 
1987  int heap_size_mb = static_cast<int>(SizeOfObjects() / MB);
1988  return heap_size_mb / kMbPerMs;
1989  }
1990 
1991  // Returns true if no more GC work is left.
1992  bool IdleGlobalGC();
1993 
1994  void AdvanceIdleIncrementalMarking(intptr_t step_size);
1995 
1996 
1997  static const int kInitialSymbolTableSize = 2048;
1998  static const int kInitialEvalCacheSize = 64;
1999  static const int kInitialNumberStringCacheSize = 256;
2000 
2001  // Maximum GC pause.
2002  int max_gc_pause_;
2003 
2004  // Maximum size of objects alive after GC.
2005  intptr_t max_alive_after_gc_;
2006 
2007  // Minimal interval between two subsequent collections.
2008  int min_in_mutator_;
2009 
2010  // Size of objects alive after last GC.
2011  intptr_t alive_after_last_gc_;
2012 
2013  double last_gc_end_timestamp_;
2014 
2015  MarkCompactCollector mark_compact_collector_;
2016 
2017  StoreBuffer store_buffer_;
2018 
2019  Marking marking_;
2020 
2021  IncrementalMarking incremental_marking_;
2022 
2023  int number_idle_notifications_;
2024  unsigned int last_idle_notification_gc_count_;
2025  bool last_idle_notification_gc_count_init_;
2026 
2027  int mark_sweeps_since_idle_round_started_;
2028  int ms_count_at_last_idle_notification_;
2029  unsigned int gc_count_at_last_idle_gc_;
2030  int scavenges_since_last_idle_round_;
2031 
2032  static const int kMaxMarkSweepsInIdleRound = 7;
2033  static const int kIdleScavengeThreshold = 5;
2034 
2035  // Shared state read by the scavenge collector and set by ScavengeObject.
2036  PromotionQueue promotion_queue_;
2037 
2038  // Flag is set when the heap has been configured. The heap can be repeatedly
2039  // configured through the API until it is set up.
2040  bool configured_;
2041 
2042  ExternalStringTable external_string_table_;
2043 
2044  VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
2045 
2046  MemoryChunk* chunks_queued_for_free_;
2047 
2048  friend class Factory;
2049  friend class GCTracer;
2051  friend class AlwaysAllocateScope;
2053  friend class Page;
2054  friend class Isolate;
2055  friend class MarkCompactCollector;
2056  friend class StaticMarkingVisitor;
2057  friend class MapCompact;
2058 
2060 };
2061 
2062 
2063 class HeapStats {
2064  public:
2065  static const int kStartMarker = 0xDECADE00;
2066  static const int kEndMarker = 0xDECADE01;
2067 
2068  int* start_marker; // 0
2069  int* new_space_size; // 1
2071  intptr_t* old_pointer_space_size; // 3
2073  intptr_t* old_data_space_size; // 5
2074  intptr_t* old_data_space_capacity; // 6
2075  intptr_t* code_space_size; // 7
2076  intptr_t* code_space_capacity; // 8
2077  intptr_t* map_space_size; // 9
2078  intptr_t* map_space_capacity; // 10
2079  intptr_t* cell_space_size; // 11
2080  intptr_t* cell_space_capacity; // 12
2081  intptr_t* lo_space_size; // 13
2087  intptr_t* memory_allocator_size; // 19
2088  intptr_t* memory_allocator_capacity; // 20
2089  int* objects_per_type; // 21
2090  int* size_per_type; // 22
2091  int* os_error; // 23
2092  int* end_marker; // 24
2093 };
2094 
2095 
2097  public:
2098  inline AlwaysAllocateScope();
2099  inline ~AlwaysAllocateScope();
2100 };
2101 
2102 
2104  public:
2105  inline LinearAllocationScope();
2106  inline ~LinearAllocationScope();
2107 };
2108 
2109 
2110 #ifdef DEBUG
2111 // Visitor class to verify interior pointers in spaces that do not contain
2112 // or care about intergenerational references. All heap object pointers have to
2113 // point into the heap to a location that has a map pointer at its first word.
2114 // Caveat: Heap::Contains is an approximation because it can return true for
2115 // objects in a heap space but above the allocation pointer.
2116 class VerifyPointersVisitor: public ObjectVisitor {
2117  public:
2118  inline void VisitPointers(Object** start, Object** end);
2119 };
2120 #endif
2121 
2122 
2123 // Space iterator for iterating over all spaces of the heap.
2124 // Returns each space in turn, and null when it is done.
2125 class AllSpaces BASE_EMBEDDED {
2126  public:
2127  Space* next();
2128  AllSpaces() { counter_ = FIRST_SPACE; }
2129  private:
2130  int counter_;
2131 };
2132 
2133 
2134 // Space iterator for iterating over all old spaces of the heap: Old pointer
2135 // space, old data space and code space.
2136 // Returns each space in turn, and null when it is done.
2137 class OldSpaces BASE_EMBEDDED {
2138  public:
2139  OldSpace* next();
2140  OldSpaces() { counter_ = OLD_POINTER_SPACE; }
2141  private:
2142  int counter_;
2143 };
2144 
2145 
2146 // Space iterator for iterating over all the paged spaces of the heap:
2147 // Map space, old pointer space, old data space, code space and cell space.
2148 // Returns each space in turn, and null when it is done.
2149 class PagedSpaces BASE_EMBEDDED {
2150  public:
2151  PagedSpace* next();
2152  PagedSpaces() { counter_ = OLD_POINTER_SPACE; }
2153  private:
2154  int counter_;
2155 };
2156 
2157 
2158 // Space iterator for iterating over all spaces of the heap.
2159 // For each space an object iterator is provided. The deallocation of the
2160 // returned object iterators is handled by the space iterator.
2161 class SpaceIterator : public Malloced {
2162  public:
2163  SpaceIterator();
2164  explicit SpaceIterator(HeapObjectCallback size_func);
2165  virtual ~SpaceIterator();
2166 
2167  bool has_next();
2168  ObjectIterator* next();
2169 
2170  private:
2171  ObjectIterator* CreateIterator();
2172 
2173  int current_space_; // from enum AllocationSpace.
2174  ObjectIterator* iterator_; // object iterator for the current space.
2175  HeapObjectCallback size_func_;
2176 };
2177 
2178 
2179 // A HeapIterator provides iteration over the whole heap. It
2180 // aggregates the specific iterators for the different spaces as
2181 // these can only iterate over one space only.
2182 //
2183 // HeapIterator can skip free list nodes (that is, de-allocated heap
2184 // objects that still remain in the heap). As implementation of free
2185 // nodes filtering uses GC marks, it can't be used during MS/MC GC
2186 // phases. Also, it is forbidden to interrupt iteration in this mode,
2187 // as this will leave heap objects marked (and thus, unusable).
2188 class HeapObjectsFilter;
2189 
2190 class HeapIterator BASE_EMBEDDED {
2191  public:
2194  kFilterUnreachable
2195  };
2196 
2197  HeapIterator();
2198  explicit HeapIterator(HeapObjectsFiltering filtering);
2199  ~HeapIterator();
2200 
2201  HeapObject* next();
2202  void reset();
2203 
2204  private:
2205  // Perform the initialization.
2206  void Init();
2207  // Perform all necessary shutdown (destruction) work.
2208  void Shutdown();
2209  HeapObject* NextObject();
2210 
2211  HeapObjectsFiltering filtering_;
2212  HeapObjectsFilter* filter_;
2213  // Space iterator for iterating all the spaces.
2214  SpaceIterator* space_iterator_;
2215  // Object iterator for the space currently being iterated.
2216  ObjectIterator* object_iterator_;
2217 };
2218 
2219 
2220 // Cache for mapping (map, property name) into field offset.
2221 // Cleared at startup and prior to mark sweep collection.
2223  public:
2224  // Lookup field offset for (map, name). If absent, -1 is returned.
2225  int Lookup(Map* map, String* name);
2226 
2227  // Update an element in the cache.
2228  void Update(Map* map, String* name, int field_offset);
2229 
2230  // Clear the cache.
2231  void Clear();
2232 
2233  static const int kLength = 256;
2234  static const int kCapacityMask = kLength - 1;
2235  static const int kMapHashShift = 5;
2236  static const int kHashMask = -4; // Zero the last two bits.
2237  static const int kEntriesPerBucket = 4;
2238  static const int kNotFound = -1;
2239 
2240  // kEntriesPerBucket should be a power of 2.
2243 
2244  private:
2245  KeyedLookupCache() {
2246  for (int i = 0; i < kLength; ++i) {
2247  keys_[i].map = NULL;
2248  keys_[i].name = NULL;
2249  field_offsets_[i] = kNotFound;
2250  }
2251  }
2252 
2253  static inline int Hash(Map* map, String* name);
2254 
2255  // Get the address of the keys and field_offsets arrays. Used in
2256  // generated code to perform cache lookups.
2257  Address keys_address() {
2258  return reinterpret_cast<Address>(&keys_);
2259  }
2260 
2261  Address field_offsets_address() {
2262  return reinterpret_cast<Address>(&field_offsets_);
2263  }
2264 
2265  struct Key {
2266  Map* map;
2267  String* name;
2268  };
2269 
2270  Key keys_[kLength];
2271  int field_offsets_[kLength];
2272 
2273  friend class ExternalReference;
2274  friend class Isolate;
2276 };
2277 
2278 
2279 // Cache for mapping (array, property name) into descriptor index.
2280 // The cache contains both positive and negative results.
2281 // Descriptor index equals kNotFound means the property is absent.
2282 // Cleared at startup and prior to any gc.
2284  public:
2285  // Lookup descriptor index for (map, name).
2286  // If absent, kAbsent is returned.
2287  int Lookup(DescriptorArray* array, String* name) {
2288  if (!StringShape(name).IsSymbol()) return kAbsent;
2289  int index = Hash(array, name);
2290  Key& key = keys_[index];
2291  if ((key.array == array) && (key.name == name)) return results_[index];
2292  return kAbsent;
2293  }
2294 
2295  // Update an element in the cache.
2296  void Update(DescriptorArray* array, String* name, int result) {
2297  ASSERT(result != kAbsent);
2298  if (StringShape(name).IsSymbol()) {
2299  int index = Hash(array, name);
2300  Key& key = keys_[index];
2301  key.array = array;
2302  key.name = name;
2303  results_[index] = result;
2304  }
2305  }
2306 
2307  // Clear the cache.
2308  void Clear();
2309 
2310  static const int kAbsent = -2;
2311 
2312  private:
2314  for (int i = 0; i < kLength; ++i) {
2315  keys_[i].array = NULL;
2316  keys_[i].name = NULL;
2317  results_[i] = kAbsent;
2318  }
2319  }
2320 
2321  static int Hash(DescriptorArray* array, String* name) {
2322  // Uses only lower 32 bits if pointers are larger.
2323  uint32_t array_hash =
2324  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2;
2325  uint32_t name_hash =
2326  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2;
2327  return (array_hash ^ name_hash) % kLength;
2328  }
2329 
2330  static const int kLength = 64;
2331  struct Key {
2332  DescriptorArray* array;
2333  String* name;
2334  };
2335 
2336  Key keys_[kLength];
2337  int results_[kLength];
2338 
2339  friend class Isolate;
2341 };
2342 
2343 
2344 #ifdef DEBUG
2345 class DisallowAllocationFailure {
2346  public:
2347  inline DisallowAllocationFailure();
2348  inline ~DisallowAllocationFailure();
2349 
2350  private:
2351  bool old_state_;
2352 };
2353 #endif
2354 
2355 
2356 // A helper class to document/test C++ scopes where we do not
2357 // expect a GC. Usage:
2358 //
2359 // /* Allocation not allowed: we cannot handle a GC in this scope. */
2360 // { AssertNoAllocation nogc;
2361 // ...
2362 // }
2364  public:
2365  inline AssertNoAllocation();
2366  inline ~AssertNoAllocation();
2367 
2368 #ifdef DEBUG
2369  private:
2370  bool old_state_;
2371 #endif
2372 };
2373 
2374 
2376  public:
2377  inline DisableAssertNoAllocation();
2378  inline ~DisableAssertNoAllocation();
2379 
2380 #ifdef DEBUG
2381  private:
2382  bool old_state_;
2383 #endif
2384 };
2385 
2386 // GCTracer collects and prints ONE line after each garbage collector
2387 // invocation IFF --trace_gc is used.
2388 
2389 class GCTracer BASE_EMBEDDED {
2390  public:
2391  class Scope BASE_EMBEDDED {
2392  public:
2393  enum ScopeId {
2394  EXTERNAL,
2395  MC_MARK,
2396  MC_SWEEP,
2397  MC_SWEEP_NEWSPACE,
2398  MC_EVACUATE_PAGES,
2399  MC_UPDATE_NEW_TO_NEW_POINTERS,
2400  MC_UPDATE_ROOT_TO_NEW_POINTERS,
2401  MC_UPDATE_OLD_TO_NEW_POINTERS,
2402  MC_UPDATE_POINTERS_TO_EVACUATED,
2403  MC_UPDATE_POINTERS_BETWEEN_EVACUATED,
2404  MC_UPDATE_MISC_POINTERS,
2405  MC_FLUSH_CODE,
2406  kNumberOfScopes
2407  };
2408 
2409  Scope(GCTracer* tracer, ScopeId scope)
2410  : tracer_(tracer),
2411  scope_(scope) {
2412  start_time_ = OS::TimeCurrentMillis();
2413  }
2414 
2415  ~Scope() {
2416  ASSERT(scope_ < kNumberOfScopes); // scope_ is unsigned.
2417  tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_;
2418  }
2419 
2420  private:
2421  GCTracer* tracer_;
2422  ScopeId scope_;
2423  double start_time_;
2424  };
2425 
2426  explicit GCTracer(Heap* heap,
2427  const char* gc_reason,
2428  const char* collector_reason);
2429  ~GCTracer();
2430 
2431  // Sets the collector.
2432  void set_collector(GarbageCollector collector) { collector_ = collector; }
2433 
2434  // Sets the GC count.
2435  void set_gc_count(unsigned int count) { gc_count_ = count; }
2436 
2437  // Sets the full GC count.
2438  void set_full_gc_count(int count) { full_gc_count_ = count; }
2439 
2440  void increment_promoted_objects_size(int object_size) {
2441  promoted_objects_size_ += object_size;
2442  }
2443 
2444  private:
2445  // Returns a string matching the collector.
2446  const char* CollectorString();
2447 
2448  // Returns size of object in heap (in MB).
2449  inline double SizeOfHeapObjects();
2450 
2451  // Timestamp set in the constructor.
2452  double start_time_;
2453 
2454  // Size of objects in heap set in constructor.
2455  intptr_t start_object_size_;
2456 
2457  // Size of memory allocated from OS set in constructor.
2458  intptr_t start_memory_size_;
2459 
2460  // Type of collector.
2461  GarbageCollector collector_;
2462 
2463  // A count (including this one, e.g. the first collection is 1) of the
2464  // number of garbage collections.
2465  unsigned int gc_count_;
2466 
2467  // A count (including this one) of the number of full garbage collections.
2468  int full_gc_count_;
2469 
2470  // Amounts of time spent in different scopes during GC.
2471  double scopes_[Scope::kNumberOfScopes];
2472 
2473  // Total amount of space either wasted or contained in one of free lists
2474  // before the current GC.
2475  intptr_t in_free_list_or_wasted_before_gc_;
2476 
2477  // Difference between space used in the heap at the beginning of the current
2478  // collection and the end of the previous collection.
2479  intptr_t allocated_since_last_gc_;
2480 
2481  // Amount of time spent in mutator that is time elapsed between end of the
2482  // previous collection and the beginning of the current one.
2483  double spent_in_mutator_;
2484 
2485  // Size of objects promoted during the current collection.
2486  intptr_t promoted_objects_size_;
2487 
2488  // Incremental marking steps counters.
2489  int steps_count_;
2490  double steps_took_;
2491  double longest_step_;
2492  int steps_count_since_last_gc_;
2493  double steps_took_since_last_gc_;
2494 
2495  Heap* heap_;
2496 
2497  const char* gc_reason_;
2498  const char* collector_reason_;
2499 };
2500 
2501 
2503  public:
2504  static Object* Lookup(FixedArray* cache, String* string, String* pattern);
2505  static void Enter(Heap* heap,
2506  FixedArray* cache,
2507  String* string,
2508  String* pattern,
2509  FixedArray* array);
2510  static void Clear(FixedArray* cache);
2511  static const int kStringSplitCacheSize = 0x100;
2512 
2513  private:
2514  static const int kArrayEntriesPerCacheEntry = 4;
2515  static const int kStringOffset = 0;
2516  static const int kPatternOffset = 1;
2517  static const int kArrayOffset = 2;
2518 
2519  static MaybeObject* WrapFixedArrayInJSArray(Object* fixed_array);
2520 };
2521 
2522 
2524  public:
2526  static const int kTranscendentalTypeBits = 3;
2528 
2529  // Returns a heap number with f(input), where f is a math function specified
2530  // by the 'type' argument.
2531  MUST_USE_RESULT inline MaybeObject* Get(Type type, double input);
2532 
2533  // The cache contains raw Object pointers. This method disposes of
2534  // them before a garbage collection.
2535  void Clear();
2536 
2537  private:
2538  class SubCache {
2539  static const int kCacheSize = 512;
2540 
2541  explicit SubCache(Type t);
2542 
2543  MUST_USE_RESULT inline MaybeObject* Get(double input);
2544 
2545  inline double Calculate(double input);
2546 
2547  struct Element {
2548  uint32_t in[2];
2549  Object* output;
2550  };
2551 
2552  union Converter {
2553  double dbl;
2554  uint32_t integers[2];
2555  };
2556 
2557  inline static int Hash(const Converter& c) {
2558  uint32_t hash = (c.integers[0] ^ c.integers[1]);
2559  hash ^= static_cast<int32_t>(hash) >> 16;
2560  hash ^= static_cast<int32_t>(hash) >> 8;
2561  return (hash & (kCacheSize - 1));
2562  }
2563 
2564  Element elements_[kCacheSize];
2565  Type type_;
2566  Isolate* isolate_;
2567 
2568  // Allow access to the caches_ array as an ExternalReference.
2569  friend class ExternalReference;
2570  // Inline implementation of the cache.
2571  friend class TranscendentalCacheStub;
2572  // For evaluating value.
2573  friend class TranscendentalCache;
2574 
2575  DISALLOW_COPY_AND_ASSIGN(SubCache);
2576  };
2577 
2578  TranscendentalCache() {
2579  for (int i = 0; i < kNumberOfCaches; ++i) caches_[i] = NULL;
2580  }
2581 
2582  // Used to create an external reference.
2583  inline Address cache_array_address();
2584 
2585  // Instantiation
2586  friend class Isolate;
2587  // Inline implementation of the caching.
2589  // Allow access to the caches_ array as an ExternalReference.
2590  friend class ExternalReference;
2591 
2592  SubCache* caches_[kNumberOfCaches];
2594 };
2595 
2596 
2597 // Abstract base class for checking whether a weak object should be retained.
2599  public:
2600  virtual ~WeakObjectRetainer() {}
2601 
2602  // Return whether this object should be retained. If NULL is returned the
2603  // object has no references. Otherwise the address of the retained object
2604  // should be returned as in some GC situations the object has been moved.
2605  virtual Object* RetainAs(Object* object) = 0;
2606 };
2607 
2608 
2609 // Intrusive object marking uses least significant bit of
2610 // heap object's map word to mark objects.
2611 // Normally all map words have least significant bit set
2612 // because they contain tagged map pointer.
2613 // If the bit is not set object is marked.
2614 // All objects should be unmarked before resuming
2615 // JavaScript execution.
2617  public:
2618  static bool IsMarked(HeapObject* object) {
2619  return (object->map_word().ToRawValue() & kNotMarkedBit) == 0;
2620  }
2621 
2622  static void ClearMark(HeapObject* object) {
2623  uintptr_t map_word = object->map_word().ToRawValue();
2624  object->set_map_word(MapWord::FromRawValue(map_word | kNotMarkedBit));
2625  ASSERT(!IsMarked(object));
2626  }
2627 
2628  static void SetMark(HeapObject* object) {
2629  uintptr_t map_word = object->map_word().ToRawValue();
2630  object->set_map_word(MapWord::FromRawValue(map_word & ~kNotMarkedBit));
2631  ASSERT(IsMarked(object));
2632  }
2633 
2634  static Map* MapOfMarkedObject(HeapObject* object) {
2635  uintptr_t map_word = object->map_word().ToRawValue();
2636  return MapWord::FromRawValue(map_word | kNotMarkedBit).ToMap();
2637  }
2638 
2639  static int SizeOfMarkedObject(HeapObject* object) {
2640  return object->SizeFromMap(MapOfMarkedObject(object));
2641  }
2642 
2643  private:
2644  static const uintptr_t kNotMarkedBit = 0x1;
2645  STATIC_ASSERT((kHeapObjectTag & kNotMarkedBit) != 0);
2646 };
2647 
2648 
2649 #if defined(DEBUG) || defined(LIVE_OBJECT_LIST)
2650 // Helper class for tracing paths to a search target Object from all roots.
2651 // The TracePathFrom() method can be used to trace paths from a specific
2652 // object to the search target object.
2653 class PathTracer : public ObjectVisitor {
2654  public:
2655  enum WhatToFind {
2656  FIND_ALL, // Will find all matches.
2657  FIND_FIRST // Will stop the search after first match.
2658  };
2659 
2660  // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
2661  // after the first match. If FIND_ALL is specified, then tracing will be
2662  // done for all matches.
2663  PathTracer(Object* search_target,
2664  WhatToFind what_to_find,
2665  VisitMode visit_mode)
2666  : search_target_(search_target),
2667  found_target_(false),
2668  found_target_in_trace_(false),
2669  what_to_find_(what_to_find),
2670  visit_mode_(visit_mode),
2671  object_stack_(20),
2672  no_alloc() {}
2673 
2674  virtual void VisitPointers(Object** start, Object** end);
2675 
2676  void Reset();
2677  void TracePathFrom(Object** root);
2678 
2679  bool found() const { return found_target_; }
2680 
2681  static Object* const kAnyGlobalObject;
2682 
2683  protected:
2684  class MarkVisitor;
2685  class UnmarkVisitor;
2686 
2687  void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
2688  void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
2689  virtual void ProcessResults();
2690 
2691  // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
2692  static const int kMarkTag = 2;
2693 
2694  Object* search_target_;
2695  bool found_target_;
2696  bool found_target_in_trace_;
2697  WhatToFind what_to_find_;
2698  VisitMode visit_mode_;
2699  List<Object*> object_stack_;
2700 
2701  AssertNoAllocation no_alloc; // i.e. no gc allowed.
2702 
2703  private:
2704  DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2705 };
2706 #endif // DEBUG || LIVE_OBJECT_LIST
2707 
2708 } } // namespace v8::internal
2709 
2710 #endif // V8_HEAP_H_
static int SizeOfMarkedObject(HeapObject *object)
Definition: heap.h:2639
byte * Address
Definition: globals.h:172
Object ** roots_array_start()
Definition: heap.h:1222
Address NewSpaceStart()
Definition: heap.h:495
intptr_t OldGenPromotionLimit(intptr_t old_gen_size)
Definition: heap.h:1366
void GarbageCollectionEpilogue()
Definition: heap.cc:419
MUST_USE_RESULT MaybeObject * CopyCode(Code *code)
Definition: heap.cc:3546
void(* GCCallback)()
Definition: v8.h:2732
void Reset()
Definition: flags.cc:1446
double total_regexp_code_generated()
Definition: heap.h:1498
MUST_USE_RESULT MaybeObject * AllocateFixedArray(int length, PretenureFlag pretenure)
Definition: heap.cc:4712
void TearDown()
Definition: heap.cc:6157
void SetStackLimits()
Definition: heap.cc:6140
void CallGlobalGCEpilogueCallback()
Definition: heap.h:1554
PromotionQueue(Heap *heap)
Definition: heap.h:293
void SetNewLimit(Address limit)
Definition: heap.h:314
bool NextGCIsLikelyToBeFull()
Definition: heap.h:1443
MUST_USE_RESULT MaybeObject * AllocateExternalStringFromAscii(const ExternalAsciiString::Resource *resource)
Definition: heap.cc:3327
MUST_USE_RESULT MaybeObject * AllocateSymbol(Vector< const char > str, int chars, uint32_t hash_field)
Definition: heap-inl.h:98
void set_full_gc_count(int count)
Definition: heap.h:2438
intptr_t OldGenerationCapacityAvailable()
Definition: heap.h:1358
void Callback(MemoryChunk *page, StoreBufferEvent event)
Definition: heap.cc:1054
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
Definition: heap-inl.h:178
intptr_t * old_pointer_space_size
Definition: heap.h:2071
MUST_USE_RESULT MaybeObject * AllocateFunctionPrototype(JSFunction *function)
Definition: heap.cc:3661
intptr_t * cell_space_size
Definition: heap.h:2079
static const int kMapHashShift
Definition: heap.h:2235
int ReservedSemiSpaceSize()
Definition: heap.h:469
void RecordWrite(Address address, int offset)
Definition: heap-inl.h:335
void PrintF(const char *format,...)
Definition: v8utils.cc:40
bool OldGenerationPromotionLimitReached()
Definition: heap.h:1350
bool InNewSpace(Object *object)
Definition: heap-inl.h:292
static const int kArgumentsObjectSize
Definition: heap.h:863
bool IsHeapIterable()
Definition: heap.cc:4979
int Lookup(DescriptorArray *array, String *name)
Definition: heap.h:2287
MUST_USE_RESULT MaybeObject * AllocateFunctionContext(int length, JSFunction *function)
Definition: heap.cc:4875
MUST_USE_RESULT MaybeObject * Allocate(Map *map, AllocationSpace space)
Definition: heap.cc:3628
MUST_USE_RESULT MaybeObject * AllocateSubString(String *buffer, int start, int end, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:3233
int * new_space_capacity
Definition: heap.h:2070
void(* ObjectSlotCallback)(HeapObject **from, HeapObject *to)
Definition: store-buffer.h:42
void SetConstructStubDeoptPCOffset(int pc_offset)
Definition: heap.h:1585
void IterateWeakRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:5636
uint32_t HashSeed()
Definition: heap.h:1574
Object * ToBoolean(bool condition)
Definition: heap-inl.h:641
Isolate * isolate()
Definition: heap-inl.h:494
MUST_USE_RESULT MaybeObject * ReinitializeJSGlobalProxy(JSFunction *constructor, JSGlobalProxy *global)
Definition: heap.cc:4276
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArrayWithMap(FixedDoubleArray *src, Map *map)
Definition: heap.cc:4630
MUST_USE_RESULT MaybeObject * AllocateGlobalObject(JSFunction *constructor)
Definition: heap.cc:4071
static Smi * FromInt(int value)
Definition: objects-inl.h:973
MUST_USE_RESULT MaybeObject * AllocateJSFunctionProxy(Object *handler, Object *call_trap, Object *construct_trap, Object *prototype)
Definition: heap.cc:4045
const int KB
Definition: globals.h:221
void FinalizeExternalString(String *string)
Definition: heap-inl.h:247
int sweep_generation()
Definition: heap.h:1544
MUST_USE_RESULT MaybeObject * AllocateJSObject(JSFunction *constructor, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:3931
intptr_t MaxReserved()
Definition: heap.h:465
void CompletelyClearInstanceofCache()
Definition: heap-inl.h:646
void CollectAllGarbage(int flags, const char *gc_reason=NULL)
Definition: heap.cc:452
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
Map * MapForExternalArrayType(ExternalArrayType array_type)
Definition: heap.cc:2938
void SetNumberStringCache(Object *number, String *str)
Definition: heap.cc:2879
static const int kNullValueRootIndex
Definition: v8.h:3928
void AgeInlineCaches()
Definition: heap.h:1599
MUST_USE_RESULT MaybeObject * AllocateCodeCache()
Definition: heap.cc:2047
MUST_USE_RESULT MaybeObject * AllocateTwoByteSymbol(Vector< const uc16 > str, uint32_t hash_field)
Definition: heap-inl.h:141
void AddString(String *string)
Definition: heap-inl.h:571
T Max(T a, T b)
Definition: utils.h:222
RootListIndex RootIndexForExternalArrayType(ExternalArrayType array_type)
Definition: heap.cc:2943
MUST_USE_RESULT MaybeObject * LookupAsciiSymbol(Vector< const char > str)
Definition: heap.cc:5336
static const int kOldSpaceRoots
Definition: heap.h:1572
void VisitExternalResources(v8::ExternalResourceVisitor *visitor)
Definition: heap.cc:1476
intptr_t OldGenAllocationLimit(intptr_t old_gen_size)
Definition: heap.h:1376
MUST_USE_RESULT MaybeObject * AllocateByteArray(int length, PretenureFlag pretenure)
Definition: heap.cc:3406
Address * allocation_top_address()
Definition: spaces.h:2183
void DoScavengeObject(Map *map, HeapObject **slot, HeapObject *obj)
Definition: heap.h:1560
Flag flags[]
Definition: flags.cc:1467
void SetGlobalGCPrologueCallback(GCCallback callback)
Definition: heap.h:1110
int int32_t
Definition: unicode.cc:47
static Object * Lookup(FixedArray *cache, String *string, String *pattern)
Definition: heap.cc:2732
void ClearInstanceofCache()
Definition: heap-inl.h:636
void RemoveGCPrologueCallback(GCEpilogueCallback callback)
Definition: heap.cc:6247
HeapObjectCallback GcSafeSizeOfOldObjectFunction()
Definition: heap.h:1201
bool InFromSpace(Object *object)
Definition: heap-inl.h:306
MUST_USE_RESULT MaybeObject * Uint32ToString(uint32_t value, bool check_number_string_cache=true)
Definition: heap.cc:2929
PromotionQueue * promotion_queue()
Definition: heap.h:1095
Map * SymbolMapForString(String *str)
Definition: heap.cc:4383
Marking * marking()
Definition: heap.h:1520
intptr_t * code_space_size
Definition: heap.h:2075
void AddGCEpilogueCallback(GCEpilogueCallback callback, GCType gc_type_filter)
Definition: heap.cc:6259
MUST_USE_RESULT MaybeObject * AllocateRawAsciiString(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4472
static void MoveBlock(Address dst, Address src, int byte_size)
Definition: heap-inl.h:390
FlagType type_
Definition: flags.cc:1351
bool linear_allocation()
Definition: heap.h:511
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:5648
#define ASSERT(condition)
Definition: checks.h:270
bool InSpace(Address addr, AllocationSpace space)
Definition: heap.cc:5242
void(* GCPrologueCallback)(GCType type, GCCallbackFlags flags)
Definition: v8.h:2729
v8::Handle< v8::Value > Print(const v8::Arguments &args)
void public_set_code_stubs(UnseededNumberDictionary *value)
Definition: heap.h:1194
static const int kReduceMemoryFootprintMask
Definition: heap.h:1051
OldSpace * TargetSpace(HeapObject *object)
Definition: heap-inl.h:349
void AddGCPrologueCallback(GCEpilogueCallback callback, GCType gc_type_filter)
Definition: heap.cc:6239
void set_collector(GarbageCollector collector)
Definition: heap.h:2432
MUST_USE_RESULT MaybeObject * LookupTwoByteSymbol(Vector< const uc16 > str)
Definition: heap.cc:5371
ExternalArrayType
Definition: v8.h:1407
unsigned short uint16_t
Definition: unicode.cc:46
Address * NewSpaceAllocationLimitAddress()
Definition: heap.h:518
#define STRONG_ROOT_LIST(V)
Definition: heap.h:49
MUST_USE_RESULT MaybeObject * LookupSymbol(Vector< const char > str)
Definition: heap.cc:5321
bool SetUp(bool create_heap_objects)
Definition: heap.cc:6028
MUST_USE_RESULT MaybeObject * AllocateBlockContext(JSFunction *function, Context *previous, ScopeInfo *info)
Definition: heap.cc:4929
int global_ic_age()
Definition: heap.h:1595
void(* ScavengingCallback)(Map *map, HeapObject **slot, HeapObject *object)
Definition: heap.h:385
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8Slow(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4322
intptr_t CommittedMemoryExecutable()
Definition: heap.cc:209
ObjectIterator * next()
Definition: heap.cc:6381
friend class ExternalReference
Definition: heap.h:2273
#define SYMBOL_ACCESSOR(name, str)
Definition: heap.h:1141
MUST_USE_RESULT MaybeObject * CopyJSObject(JSObject *source)
Definition: heap.cc:4144
int ms_count()
Definition: heap.h:1157
static const int kPageSize
Definition: spaces.h:695
int * pending_global_handle_count
Definition: heap.h:2084
Address * store_buffer_top_address()
Definition: heap.h:1224
void CallGlobalGCPrologueCallback()
Definition: heap.h:1550
void ReserveSpace(int new_space_size, int pointer_space_size, int data_space_size, int code_space_size, int map_space_size, int cell_space_size, int large_object_size)
Definition: heap.cc:610
friend class GCTracer
Definition: heap.h:2049
MUST_USE_RESULT MaybeObject * AllocateInternalSymbol(unibrow::CharacterStream *buffer, int chars, uint32_t hash_field)
Definition: heap.cc:4407
Address always_allocate_scope_depth_address()
Definition: heap.h:508
void IterateAndMarkPointersToFromSpace(Address start, Address end, ObjectSlotCallback callback)
Definition: heap.cc:5427
MUST_USE_RESULT MaybeObject * AllocateJSArrayWithElements(FixedArrayBase *array_base, ElementsKind elements_kind, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4010
ArrayStorageAllocationMode
Definition: heap.h:430
STATIC_CHECK(kUndefinedValueRootIndex==Internals::kUndefinedValueRootIndex)
virtual Object * RetainAs(Object *object)=0
StoreBuffer * store_buffer()
Definition: heap.h:1516
MUST_USE_RESULT MaybeObject * AllocateHeapNumber(double value, PretenureFlag pretenure)
Definition: heap.cc:2407
unsigned int seed
Definition: test-strings.cc:17
void ClearCodeCache(Heap *heap)
Definition: objects-inl.h:4963
bool CollectGarbage(AllocationSpace space, GarbageCollector collector, const char *gc_reason, const char *collector_reason)
Definition: heap.cc:491
MUST_USE_RESULT MaybeObject * AllocateWithContext(JSFunction *function, Context *previous, JSObject *extension)
Definition: heap.cc:4912
static const int kStringSplitCacheSize
Definition: heap.h:2511
intptr_t * lo_space_size
Definition: heap.h:2081
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
Definition: heap.cc:4604
uint8_t byte
Definition: globals.h:171
int NotifyContextDisposed()
Definition: heap.h:1075
MUST_USE_RESULT MaybeObject * AllocateConsString(String *first, String *second)
Definition: heap.cc:3114
#define STRUCT_MAP_ACCESSOR(NAME, Name, name)
Definition: heap.h:1134
MUST_USE_RESULT MaybeObject * NumberToString(Object *number, bool check_number_string_cache=true)
Definition: heap.cc:2899
void public_set_empty_script(Script *script)
Definition: heap.h:1210
int * near_death_global_handle_count
Definition: heap.h:2085
STATIC_ASSERT((kEntriesPerBucket &(kEntriesPerBucket-1))==0)
String *(* ExternalStringTableUpdaterCallback)(Heap *heap, Object **pointer)
Definition: heap.h:256
static const intptr_t kMinimumPromotionLimit
Definition: heap.h:1362
void SetArgumentsAdaptorDeoptPCOffset(int pc_offset)
Definition: heap.h:1580
static void ClearMark(HeapObject *object)
Definition: heap.h:2622
static const int kEndMarker
Definition: heap.h:2066
bool IdleNotification(int hint)
Definition: heap.cc:5015
MUST_USE_RESULT MaybeObject * AllocateStringFromAscii(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4302
intptr_t MaxOldGenerationSize()
Definition: heap.h:471
void IncreaseTotalRegexpCodeGenerated(int size)
Definition: heap.h:1499
friend class MarkCompactCollector
Definition: heap.h:2055
friend class MapCompact
Definition: heap.h:2057
void EnsureHeapIsIterable()
Definition: heap.cc:4985
static const int kArgumentsObjectSizeStrict
Definition: heap.h:866
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedDoubleArray(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4759
int(* HeapObjectCallback)(HeapObject *obj)
Definition: v8globals.h:245
bool always_allocate()
Definition: heap.h:507
void SetLastScriptId(Object *last_script_id)
Definition: heap-inl.h:489
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:85
bool AdvanceSweeper(intptr_t bytes_to_sweep)
Definition: spaces.cc:2274
static bool IsMarked(HeapObject *object)
Definition: heap.h:2618
#define MUST_USE_RESULT
Definition: globals.h:360
void ClearCacheOnMap(Map *map)
Definition: heap.h:1487
void RemoveGCEpilogueCallback(GCEpilogueCallback callback)
Definition: heap.cc:6267
int MaxObjectSizeInNewSpace()
Definition: heap.h:1480
static NewSpacePage * FromAddress(Address address_in_page)
Definition: spaces.h:1774
void CollectAllAvailableGarbage(const char *gc_reason=NULL)
Definition: heap.cc:462
bool ConfigureHeapDefault()
Definition: heap.cc:5792
#define ROOT_LIST(V)
Definition: heap.h:155
static const int kNoGCFlags
Definition: heap.h:1049
MUST_USE_RESULT MaybeObject * AllocateFixedArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4720
const int kPointerSize
Definition: globals.h:234
bool LookupTwoCharsSymbolIfExists(String *str, String **symbol)
MUST_USE_RESULT MaybeObject * AllocateInitialMap(JSFunction *fun)
Definition: heap.cc:3783
friend class ExternalReference
Definition: heap.h:2590
int * global_handle_count
Definition: heap.h:2082
MUST_USE_RESULT MaybeObject * AllocateForeign(Address address, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:2992
friend class DisallowAllocationFailure
Definition: heap.h:2050
MUST_USE_RESULT MaybeObject * AllocateModuleContext(Context *previous, ScopeInfo *scope_info)
Definition: heap.cc:4859
void QueueMemoryChunkForFree(MemoryChunk *chunk)
Definition: heap.cc:7093
MUST_USE_RESULT MaybeObject * AllocateGlobalContext()
Definition: heap.cc:4844
MUST_USE_RESULT MaybeObject * AllocateExternalArray(int length, ExternalArrayType array_type, void *external_pointer, PretenureFlag pretenure)
Definition: heap.cc:3461
const int kHeapObjectTag
Definition: v8.h:3848
static void ScavengePointer(HeapObject **p)
Definition: heap-inl.h:409
intptr_t * cell_space_capacity
Definition: heap.h:2080
intptr_t * memory_allocator_size
Definition: heap.h:2087
#define DISALLOW_IMPLICIT_CONSTRUCTORS(TypeName)
Definition: globals.h:332
void decrement_scan_on_scavenge_pages()
Definition: heap.h:1088
void IncrementYoungSurvivorsCounter(int survived)
Definition: heap.h:1437
MUST_USE_RESULT MaybeObject * AllocatePolymorphicCodeCache()
Definition: heap.cc:2058
intptr_t * code_space_capacity
Definition: heap.h:2076
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition: globals.h:321
void Update(Map *map, String *name, int field_offset)
Definition: heap.cc:6982
static Map * MapOfMarkedObject(HeapObject *object)
Definition: heap.h:2634
OldSpace * old_pointer_space()
Definition: heap.h:500
bool ConfigureHeap(int max_semispace_size, intptr_t max_old_gen_size, intptr_t max_executable_size)
Definition: heap.cc:5722
intptr_t * map_space_size
Definition: heap.h:2077
static double TimeCurrentMillis()
MUST_USE_RESULT MaybeObject * AllocateTypeFeedbackInfo()
Definition: heap.cc:2074
bool CreateApiObjects()
Definition: heap.cc:2466
GCType
Definition: v8.h:2718
HeapState gc_state()
Definition: heap.h:1280
#define SYMBOL_LIST(V)
Definition: heap.h:159
OldSpace * code_space()
Definition: heap.h:502
static const int kMakeHeapIterableMask
Definition: heap.h:1056
void public_set_store_buffer_top(Address *top)
Definition: heap.h:1214
intptr_t AdjustAmountOfExternalAllocatedMemory(intptr_t change_in_bytes)
Definition: heap-inl.h:463
MUST_USE_RESULT MaybeObject * AllocateJSArrayAndStorage(ElementsKind elements_kind, int length, int capacity, ArrayStorageAllocationMode mode=DONT_INITIALIZE_ARRAY_ELEMENTS, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:3964
Object ** global_contexts_list_address()
Definition: heap.h:1229
static const int kUndefinedValueRootIndex
Definition: v8.h:3927
void Iterate(ObjectVisitor *v)
Definition: heap-inl.h:581
bool InToSpace(Object *object)
Definition: heap-inl.h:311
void Update(DescriptorArray *array, String *name, int result)
Definition: heap.h:2296
bool UncommitFromSpace()
Definition: heap.h:523
void GarbageCollectionPrologue()
Definition: heap.cc:386
static void Clear(FixedArray *cache)
Definition: heap.cc:2794
#define ROOT_ACCESSOR(type, name, camel_name)
Definition: heap.h:1666
bool HasBeenSetUp()
Definition: heap.cc:228
LargeObjectSpace * lo_space()
Definition: heap.h:505
#define BASE_EMBEDDED
Definition: allocation.h:68
MUST_USE_RESULT MaybeObject * AllocateJSMessageObject(String *type, JSArray *arguments, int start_position, int end_position, Object *script, Object *stack_trace, Object *stack_frames)
Definition: heap.cc:3045
static const int kFalseValueRootIndex
Definition: v8.h:3930
MUST_USE_RESULT MaybeObject * AllocateExternalStringFromTwoByte(const ExternalTwoByteString::Resource *resource)
Definition: heap.cc:3352
MUST_USE_RESULT MaybeObject * AllocatePartialMap(InstanceType instance_type, int instance_size)
Definition: heap.cc:1989
int * free_global_handle_count
Definition: heap.h:2086
MUST_USE_RESULT MaybeObject * CreateCode(const CodeDesc &desc, Code::Flags flags, Handle< Object > self_reference, bool immovable=false)
Definition: heap.cc:3483
void PerformScavenge()
Definition: heap.cc:564
STATIC_ASSERT((1<< kTranscendentalTypeBits) >=kNumberOfCaches)
MUST_USE_RESULT MaybeObject * AllocateAsciiSymbol(Vector< const char > str, uint32_t hash_field)
Definition: heap-inl.h:107
void set_gc_count(unsigned int count)
Definition: heap.h:2435
static const int kAbortIncrementalMarkingMask
Definition: heap.h:1052
Vector< const char > CStrVector(const char *data)
Definition: utils.h:525
void FreeQueuedChunks()
Definition: heap.cc:7099
CellSpace * cell_space()
Definition: heap.h:504
intptr_t CommittedMemory()
Definition: heap.cc:197
void increment_promoted_objects_size(int object_size)
Definition: heap.h:2440
Object * GetNumberStringCache(Object *number)
Definition: heap.cc:2859
intptr_t SizeOfObjects()
Definition: heap.cc:410
MUST_USE_RESULT MaybeObject * AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure)
Definition: heap.cc:4797
MUST_USE_RESULT MaybeObject * AllocateEmptyJSArray(ElementsKind elements_kind, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.h:536
Address * NewSpaceAllocationTopAddress()
Definition: heap.h:515
void RecordWrites(Address address, int start, int len)
Definition: heap-inl.h:340
void(* GCEpilogueCallback)(GCType type, GCCallbackFlags flags)
Definition: v8.h:2730
intptr_t get_max_alive_after_gc()
Definition: heap.h:1507
void UpdateReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
Definition: heap.cc:1332
MUST_USE_RESULT MaybeObject * AllocateJSProxy(Object *handler, Object *prototype)
Definition: heap.cc:4025
void ProcessWeakReferences(WeakObjectRetainer *retainer)
Definition: heap.cc:1395
void ClearNormalizedMapCaches()
Definition: heap.cc:720
MUST_USE_RESULT MaybeObject * NumberFromDouble(double value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:2971
static const int kLength
Definition: heap.h:2233
MUST_USE_RESULT MaybeObject * AllocateExternalSymbol(Vector< const char > str, int chars)
intptr_t * old_data_space_capacity
Definition: heap.h:2074
void set_global_contexts_list(Object *object)
Definition: heap.h:1151
intptr_t Available()
Definition: heap.cc:216
MUST_USE_RESULT MaybeObject * AllocateArgumentsObject(Object *callee, int length)
Definition: heap.cc:3709
int Lookup(Map *map, String *name)
Definition: heap.cc:6970
int InitialSemiSpaceSize()
Definition: heap.h:470
int get_min_in_mutator()
Definition: heap.h:1510
intptr_t Capacity()
Definition: spaces.h:2125
int get_max_gc_pause()
Definition: heap.h:1504
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
Definition: heap-inl.h:173
void IterateRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:5630
Address NewSpaceTop()
Definition: heap.h:497
MUST_USE_RESULT MaybeObject * ReinitializeJSReceiver(JSReceiver *object, InstanceType type, int size)
Definition: heap.cc:4216
MUST_USE_RESULT MaybeObject * AllocateAccessorPair()
Definition: heap.cc:2063
int * weak_global_handle_count
Definition: heap.h:2083
MUST_USE_RESULT MaybeObject * AllocateCatchContext(JSFunction *function, Context *previous, String *name, Object *thrown_object)
Definition: heap.cc:4891
MUST_USE_RESULT MaybeObject * Get(Type type, double input)
Definition: heap-inl.h:652
void SetGlobalGCEpilogueCallback(GCCallback callback)
Definition: heap.h:1114
static const int kEmptySymbolRootIndex
Definition: v8.h:3931
#define STRUCT_LIST(V)
Definition: objects.h:429
static const int kArgumentsLengthIndex
Definition: heap.h:869
bool UncommitFromSpace()
Definition: spaces.h:2267
MUST_USE_RESULT MaybeObject * LookupAsciiSymbol(const char *str)
Definition: heap.h:1003
void CheckNewSpaceExpansionCriteria()
Definition: heap.cc:1027
static void Enter(Heap *heap, FixedArray *cache, String *string, String *pattern, FixedArray *array)
Definition: heap.cc:2751
MUST_USE_RESULT MaybeObject * AllocateScopeInfo(int length)
Definition: heap.cc:4947
bool LookupSymbolIfExists(String *str, String **symbol)
Definition: heap.cc:5402
ExternalStringTable * external_string_table()
Definition: heap.h:1539
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:237
IncrementalMarking * incremental_marking()
Definition: heap.h:1524
bool Contains(Address addr)
Definition: heap.cc:5224
bool InNewSpacePage(Address addr)
void insert(HeapObject *target, int size)
Definition: heap-inl.h:43
String * hidden_symbol()
Definition: heap.h:1149
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedArray(int length)
Definition: heap.cc:4729
static const int kStartMarker
Definition: heap.h:2065
bool ShouldBePromoted(Address old_address, int object_size)
Definition: heap-inl.h:322
uintptr_t NewSpaceMask()
Definition: heap.h:496
static const intptr_t kMinimumAllocationLimit
Definition: heap.h:1363
int MaxSemiSpaceSize()
Definition: heap.h:468
MUST_USE_RESULT MaybeObject * AllocateJSObjectFromMap(Map *map, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:3892
void RememberUnmappedPage(Address page, bool compacted)
Definition: heap.cc:7147
Object * global_contexts_list()
Definition: heap.h:1154
static const int kNotFound
Definition: heap.h:2238
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
friend class TranscendentalCacheStub
Definition: heap.h:2588
MUST_USE_RESULT MaybeObject * LookupSingleCharacterStringFromCode(uint16_t code)
Definition: heap.cc:3381
intptr_t PromotedTotalSize()
Definition: heap.h:1344
static void CopyBlock(Address dst, Address src, int byte_size)
Definition: heap-inl.h:383
MUST_USE_RESULT MaybeObject * AllocateJSGlobalPropertyCell(Object *value)
Definition: heap.cc:2443
MUST_USE_RESULT MaybeObject * CreateSymbol(const char *str, int length, int hash)
MUST_USE_RESULT MaybeObject * AllocateFixedDoubleArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4776
MUST_USE_RESULT MaybeObject * AllocateRawFixedArray(int length)
Definition: heap.cc:4589
static const int kArgumentsCalleeIndex
Definition: heap.h:871
MUST_USE_RESULT MaybeObject * AllocateHashTable(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4832
void public_set_non_monomorphic_cache(UnseededNumberDictionary *value)
Definition: heap.h:1206
static const int kHeaderSize
Definition: objects.h:2115
Object * FindCodeObject(Address a)
Definition: heap.cc:961
#define DECLARE_STRUCT_MAP(NAME, Name, name)
Definition: heap.h:1396
MapSpace * map_space()
Definition: heap.h:503
intptr_t PromotedSpaceSizeOfObjects()
Definition: heap.cc:5836
intptr_t * old_pointer_space_capacity
Definition: heap.h:2072
bool OldGenerationAllocationLimitReached()
Definition: heap-inl.h:316
#define ROOT_INDEX_DECLARATION(type, name, camel_name)
Definition: heap.h:1391
MUST_USE_RESULT MaybeObject * NumberFromInt32(int32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:229
void ClearJSFunctionResultCaches()
Definition: heap.cc:696
#define SYMBOL_INDEX_DECLARATION(name, str)
Definition: heap.h:1400
void RecordStats(HeapStats *stats, bool take_snapshot=false)
Definition: heap.cc:5799
NewSpacePage * prev_page() const
Definition: spaces.h:1746
bool IsInGCPostProcessing()
Definition: heap.h:1282
void CreateFillerObjectAt(Address addr, int size)
Definition: heap.cc:3447
MUST_USE_RESULT MaybeObject * AllocateSharedFunctionInfo(Object *name)
Definition: heap.cc:3004
bool AdvanceSweepers(int step_size)
Definition: heap.h:1533
void increment_scan_on_scavenge_pages()
Definition: heap.h:1081
Address * allocation_limit_address()
Definition: spaces.h:2184
intptr_t * map_space_capacity
Definition: heap.h:2078
static const int kTrueValueRootIndex
Definition: v8.h:3929
static const int kCapacityMask
Definition: heap.h:2234
static void ScavengeObject(HeapObject **p, HeapObject *object)
Definition: heap-inl.h:414
bool IsSweepingComplete()
Definition: heap.h:1528
static const int kTranscendentalTypeBits
Definition: heap.h:2526
MUST_USE_RESULT MaybeObject * PrepareForCompare(String *str)
Definition: heap-inl.h:444
static bool IsAtStart(Address addr)
Definition: spaces.h:1760
T Min(T a, T b)
Definition: utils.h:229
intptr_t * memory_allocator_capacity
Definition: heap.h:2088
static const int kSweepPreciselyMask
Definition: heap.h:1050
intptr_t Capacity()
Definition: heap.cc:185
MUST_USE_RESULT MaybeObject * AllocateStruct(InstanceType type)
Definition: heap.cc:4956
intptr_t * old_data_space_size
Definition: heap.h:2073
intptr_t OldGenerationSpaceAvailable()
Definition: heap.h:1354
MUST_USE_RESULT MaybeObject * AllocateJSModule()
Definition: heap.cc:3954
GCTracer * tracer()
Definition: heap.h:1493
NewSpace * new_space()
Definition: heap.h:499
MUST_USE_RESULT MaybeObject * AllocateMap(InstanceType instance_type, int instance_size, ElementsKind elements_kind=TERMINAL_FAST_ELEMENTS_KIND)
Definition: heap.cc:2011
intptr_t MaxExecutableSize()
Definition: heap.h:472
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
Definition: heap-inl.h:183
uintptr_t mask()
Definition: spaces.h:2169
void PrintShortHeapStatistics()
Definition: heap.cc:321
static const int kHashMask
Definition: heap.h:2236
FlagType type() const
Definition: flags.cc:1358
AllocationSpace TargetSpaceId(InstanceType type)
Definition: heap-inl.h:358
OldSpace * old_data_space()
Definition: heap.h:501
MUST_USE_RESULT MaybeObject * AllocateRawTwoByteString(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4519
static void AssertValidRange(Address from, Address to)
Definition: spaces.h:1923
static void SetMark(HeapObject *object)
Definition: heap.h:2628
MarkCompactCollector * mark_compact_collector()
Definition: heap.h:1512
MUST_USE_RESULT MaybeObject * AllocateFunction(Map *function_map, SharedFunctionInfo *shared, Object *prototype, PretenureFlag pretenure=TENURED)
Definition: heap.cc:3694
void UpdateNewSpaceReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
Definition: heap.cc:1297
StoreBufferRebuilder(StoreBuffer *store_buffer)
Definition: heap.h:261
MUST_USE_RESULT MaybeObject * AllocateStringFromTwoByte(Vector< const uc16 > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4361
static const int kEntriesPerBucket
Definition: heap.h:2237
void EnsureFromSpaceIsCommitted()
Definition: heap.cc:682
MUST_USE_RESULT MaybeObject * AllocateAliasedArgumentsEntry(int slot)
Definition: heap.cc:2087
const int MB
Definition: globals.h:222