v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
heap.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "accessors.h"
31 #include "api.h"
32 #include "bootstrapper.h"
33 #include "codegen.h"
34 #include "compilation-cache.h"
35 #include "debug.h"
36 #include "deoptimizer.h"
37 #include "global-handles.h"
38 #include "heap-profiler.h"
39 #include "incremental-marking.h"
40 #include "liveobjectlist-inl.h"
41 #include "mark-compact.h"
42 #include "natives.h"
43 #include "objects-visiting.h"
44 #include "objects-visiting-inl.h"
45 #include "once.h"
46 #include "runtime-profiler.h"
47 #include "scopeinfo.h"
48 #include "snapshot.h"
49 #include "store-buffer.h"
50 #include "v8threads.h"
51 #include "vm-state-inl.h"
52 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
53 #include "regexp-macro-assembler.h"
55 #endif
56 #if V8_TARGET_ARCH_MIPS && !V8_INTERPRETED_REGEXP
57 #include "regexp-macro-assembler.h"
59 #endif
60 
61 namespace v8 {
62 namespace internal {
63 
64 
65 Heap::Heap()
66  : isolate_(NULL),
67 // semispace_size_ should be a power of 2 and old_generation_size_ should be
68 // a multiple of Page::kPageSize.
69 #if defined(V8_TARGET_ARCH_X64)
70 #define LUMP_OF_MEMORY (2 * MB)
71  code_range_size_(512*MB),
72 #else
73 #define LUMP_OF_MEMORY MB
74  code_range_size_(0),
75 #endif
76 #if defined(ANDROID)
77  reserved_semispace_size_(4 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
78  max_semispace_size_(4 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
79  initial_semispace_size_(Page::kPageSize),
80  max_old_generation_size_(192*MB),
81  max_executable_size_(max_old_generation_size_),
82 #else
83  reserved_semispace_size_(8 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
84  max_semispace_size_(8 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
85  initial_semispace_size_(Page::kPageSize),
86  max_old_generation_size_(700ul * LUMP_OF_MEMORY),
87  max_executable_size_(256l * LUMP_OF_MEMORY),
88 #endif
89 
90 // Variables set based on semispace_size_ and old_generation_size_ in
91 // ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_)
92 // Will be 4 * reserved_semispace_size_ to ensure that young
93 // generation can be aligned to its size.
94  survived_since_last_expansion_(0),
95  sweep_generation_(0),
96  always_allocate_scope_depth_(0),
97  linear_allocation_scope_depth_(0),
98  contexts_disposed_(0),
99  global_ic_age_(0),
100  scan_on_scavenge_pages_(0),
101  new_space_(this),
102  old_pointer_space_(NULL),
103  old_data_space_(NULL),
104  code_space_(NULL),
105  map_space_(NULL),
106  cell_space_(NULL),
107  lo_space_(NULL),
108  gc_state_(NOT_IN_GC),
109  gc_post_processing_depth_(0),
110  ms_count_(0),
111  gc_count_(0),
112  remembered_unmapped_pages_index_(0),
113  unflattened_strings_length_(0),
114 #ifdef DEBUG
115  allocation_allowed_(true),
116  allocation_timeout_(0),
117  disallow_allocation_failure_(false),
118  debug_utils_(NULL),
119 #endif // DEBUG
120  new_space_high_promotion_mode_active_(false),
121  old_gen_promotion_limit_(kMinimumPromotionLimit),
122  old_gen_allocation_limit_(kMinimumAllocationLimit),
123  old_gen_limit_factor_(1),
124  size_of_old_gen_at_last_old_space_gc_(0),
125  external_allocation_limit_(0),
126  amount_of_external_allocated_memory_(0),
127  amount_of_external_allocated_memory_at_last_global_gc_(0),
128  old_gen_exhausted_(false),
129  store_buffer_rebuilder_(store_buffer()),
130  hidden_symbol_(NULL),
131  global_gc_prologue_callback_(NULL),
132  global_gc_epilogue_callback_(NULL),
133  gc_safe_size_of_old_object_(NULL),
134  total_regexp_code_generated_(0),
135  tracer_(NULL),
136  young_survivors_after_last_gc_(0),
137  high_survival_rate_period_length_(0),
138  survival_rate_(0),
139  previous_survival_rate_trend_(Heap::STABLE),
140  survival_rate_trend_(Heap::STABLE),
141  max_gc_pause_(0),
142  max_alive_after_gc_(0),
143  min_in_mutator_(kMaxInt),
144  alive_after_last_gc_(0),
145  last_gc_end_timestamp_(0.0),
146  store_buffer_(this),
147  marking_(this),
148  incremental_marking_(this),
149  number_idle_notifications_(0),
150  last_idle_notification_gc_count_(0),
151  last_idle_notification_gc_count_init_(false),
152  mark_sweeps_since_idle_round_started_(0),
153  ms_count_at_last_idle_notification_(0),
154  gc_count_at_last_idle_gc_(0),
155  scavenges_since_last_idle_round_(kIdleScavengeThreshold),
156  promotion_queue_(this),
157  configured_(false),
158  chunks_queued_for_free_(NULL) {
159  // Allow build-time customization of the max semispace size. Building
160  // V8 with snapshots and a non-default max semispace size is much
161  // easier if you can define it as part of the build environment.
162 #if defined(V8_MAX_SEMISPACE_SIZE)
163  max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
164 #endif
165 
166  intptr_t max_virtual = OS::MaxVirtualMemory();
167 
168  if (max_virtual > 0) {
169  if (code_range_size_ > 0) {
170  // Reserve no more than 1/8 of the memory for the code range.
171  code_range_size_ = Min(code_range_size_, max_virtual >> 3);
172  }
173  }
174 
175  memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
176  global_contexts_list_ = NULL;
177  mark_compact_collector_.heap_ = this;
178  external_string_table_.heap_ = this;
179  // Put a dummy entry in the remembered pages so we can find the list the
180  // minidump even if there are no real unmapped pages.
181  RememberUnmappedPage(NULL, false);
182 }
183 
184 
185 intptr_t Heap::Capacity() {
186  if (!HasBeenSetUp()) return 0;
187 
188  return new_space_.Capacity() +
189  old_pointer_space_->Capacity() +
190  old_data_space_->Capacity() +
191  code_space_->Capacity() +
192  map_space_->Capacity() +
193  cell_space_->Capacity();
194 }
195 
196 
198  if (!HasBeenSetUp()) return 0;
199 
200  return new_space_.CommittedMemory() +
201  old_pointer_space_->CommittedMemory() +
202  old_data_space_->CommittedMemory() +
203  code_space_->CommittedMemory() +
204  map_space_->CommittedMemory() +
205  cell_space_->CommittedMemory() +
206  lo_space_->Size();
207 }
208 
210  if (!HasBeenSetUp()) return 0;
211 
212  return isolate()->memory_allocator()->SizeExecutable();
213 }
214 
215 
216 intptr_t Heap::Available() {
217  if (!HasBeenSetUp()) return 0;
218 
219  return new_space_.Available() +
220  old_pointer_space_->Available() +
221  old_data_space_->Available() +
222  code_space_->Available() +
223  map_space_->Available() +
224  cell_space_->Available();
225 }
226 
227 
229  return old_pointer_space_ != NULL &&
230  old_data_space_ != NULL &&
231  code_space_ != NULL &&
232  map_space_ != NULL &&
233  cell_space_ != NULL &&
234  lo_space_ != NULL;
235 }
236 
237 
238 int Heap::GcSafeSizeOfOldObject(HeapObject* object) {
239  if (IntrusiveMarking::IsMarked(object)) {
241  }
242  return object->SizeFromMap(object->map());
243 }
244 
245 
246 GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
247  const char** reason) {
248  // Is global GC requested?
249  if (space != NEW_SPACE) {
250  isolate_->counters()->gc_compactor_caused_by_request()->Increment();
251  *reason = "GC in old space requested";
252  return MARK_COMPACTOR;
253  }
254 
255  if (FLAG_gc_global || (FLAG_stress_compaction && (gc_count_ & 1) != 0)) {
256  *reason = "GC in old space forced by flags";
257  return MARK_COMPACTOR;
258  }
259 
260  // Is enough data promoted to justify a global GC?
262  isolate_->counters()->gc_compactor_caused_by_promoted_data()->Increment();
263  *reason = "promotion limit reached";
264  return MARK_COMPACTOR;
265  }
266 
267  // Have allocation in OLD and LO failed?
268  if (old_gen_exhausted_) {
269  isolate_->counters()->
270  gc_compactor_caused_by_oldspace_exhaustion()->Increment();
271  *reason = "old generations exhausted";
272  return MARK_COMPACTOR;
273  }
274 
275  // Is there enough space left in OLD to guarantee that a scavenge can
276  // succeed?
277  //
278  // Note that MemoryAllocator->MaxAvailable() undercounts the memory available
279  // for object promotion. It counts only the bytes that the memory
280  // allocator has not yet allocated from the OS and assigned to any space,
281  // and does not count available bytes already in the old space or code
282  // space. Undercounting is safe---we may get an unrequested full GC when
283  // a scavenge would have succeeded.
284  if (isolate_->memory_allocator()->MaxAvailable() <= new_space_.Size()) {
285  isolate_->counters()->
286  gc_compactor_caused_by_oldspace_exhaustion()->Increment();
287  *reason = "scavenge might not succeed";
288  return MARK_COMPACTOR;
289  }
290 
291  // Default
292  *reason = NULL;
293  return SCAVENGER;
294 }
295 
296 
297 // TODO(1238405): Combine the infrastructure for --heap-stats and
298 // --log-gc to avoid the complicated preprocessor and flag testing.
299 void Heap::ReportStatisticsBeforeGC() {
300  // Heap::ReportHeapStatistics will also log NewSpace statistics when
301  // compiled --log-gc is set. The following logic is used to avoid
302  // double logging.
303 #ifdef DEBUG
304  if (FLAG_heap_stats || FLAG_log_gc) new_space_.CollectStatistics();
305  if (FLAG_heap_stats) {
306  ReportHeapStatistics("Before GC");
307  } else if (FLAG_log_gc) {
308  new_space_.ReportStatistics();
309  }
310  if (FLAG_heap_stats || FLAG_log_gc) new_space_.ClearHistograms();
311 #else
312  if (FLAG_log_gc) {
313  new_space_.CollectStatistics();
314  new_space_.ReportStatistics();
315  new_space_.ClearHistograms();
316  }
317 #endif // DEBUG
318 }
319 
320 
322  if (!FLAG_trace_gc_verbose) return;
323  PrintF("Memory allocator, used: %8" V8_PTR_PREFIX "d"
324  ", available: %8" V8_PTR_PREFIX "d\n",
325  isolate_->memory_allocator()->Size(),
326  isolate_->memory_allocator()->Available());
327  PrintF("New space, used: %8" V8_PTR_PREFIX "d"
328  ", available: %8" V8_PTR_PREFIX "d\n",
329  Heap::new_space_.Size(),
330  new_space_.Available());
331  PrintF("Old pointers, used: %8" V8_PTR_PREFIX "d"
332  ", available: %8" V8_PTR_PREFIX "d"
333  ", waste: %8" V8_PTR_PREFIX "d\n",
334  old_pointer_space_->Size(),
335  old_pointer_space_->Available(),
336  old_pointer_space_->Waste());
337  PrintF("Old data space, used: %8" V8_PTR_PREFIX "d"
338  ", available: %8" V8_PTR_PREFIX "d"
339  ", waste: %8" V8_PTR_PREFIX "d\n",
340  old_data_space_->Size(),
341  old_data_space_->Available(),
342  old_data_space_->Waste());
343  PrintF("Code space, used: %8" V8_PTR_PREFIX "d"
344  ", available: %8" V8_PTR_PREFIX "d"
345  ", waste: %8" V8_PTR_PREFIX "d\n",
346  code_space_->Size(),
347  code_space_->Available(),
348  code_space_->Waste());
349  PrintF("Map space, used: %8" V8_PTR_PREFIX "d"
350  ", available: %8" V8_PTR_PREFIX "d"
351  ", waste: %8" V8_PTR_PREFIX "d\n",
352  map_space_->Size(),
353  map_space_->Available(),
354  map_space_->Waste());
355  PrintF("Cell space, used: %8" V8_PTR_PREFIX "d"
356  ", available: %8" V8_PTR_PREFIX "d"
357  ", waste: %8" V8_PTR_PREFIX "d\n",
358  cell_space_->Size(),
359  cell_space_->Available(),
360  cell_space_->Waste());
361  PrintF("Large object space, used: %8" V8_PTR_PREFIX "d"
362  ", available: %8" V8_PTR_PREFIX "d\n",
363  lo_space_->Size(),
364  lo_space_->Available());
365 }
366 
367 
368 // TODO(1238405): Combine the infrastructure for --heap-stats and
369 // --log-gc to avoid the complicated preprocessor and flag testing.
370 void Heap::ReportStatisticsAfterGC() {
371  // Similar to the before GC, we use some complicated logic to ensure that
372  // NewSpace statistics are logged exactly once when --log-gc is turned on.
373 #if defined(DEBUG)
374  if (FLAG_heap_stats) {
375  new_space_.CollectStatistics();
376  ReportHeapStatistics("After GC");
377  } else if (FLAG_log_gc) {
378  new_space_.ReportStatistics();
379  }
380 #else
381  if (FLAG_log_gc) new_space_.ReportStatistics();
382 #endif // DEBUG
383 }
384 
385 
387  isolate_->transcendental_cache()->Clear();
389  gc_count_++;
390  unflattened_strings_length_ = 0;
391 #ifdef DEBUG
392  ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
393  allow_allocation(false);
394 
395  if (FLAG_verify_heap) {
396  Verify();
397  }
398 
399  if (FLAG_gc_verbose) Print();
400 #endif // DEBUG
401 
402 #if defined(DEBUG)
403  ReportStatisticsBeforeGC();
404 #endif // DEBUG
405 
408 }
409 
411  intptr_t total = 0;
412  AllSpaces spaces;
413  for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
414  total += space->SizeOfObjects();
415  }
416  return total;
417 }
418 
422 #ifdef DEBUG
423  allow_allocation(true);
424  ZapFromSpace();
425 
426  if (FLAG_verify_heap) {
427  Verify();
428  }
429 
430  if (FLAG_print_global_handles) isolate_->global_handles()->Print();
431  if (FLAG_print_handles) PrintHandles();
432  if (FLAG_gc_verbose) Print();
433  if (FLAG_code_stats) ReportCodeStatistics("After GC");
434 #endif
435 
436  isolate_->counters()->alive_after_last_gc()->Set(
437  static_cast<int>(SizeOfObjects()));
438 
439  isolate_->counters()->symbol_table_capacity()->Set(
440  symbol_table()->Capacity());
441  isolate_->counters()->number_of_symbols()->Set(
442  symbol_table()->NumberOfElements());
443 #if defined(DEBUG)
444  ReportStatisticsAfterGC();
445 #endif // DEBUG
446 #ifdef ENABLE_DEBUGGER_SUPPORT
447  isolate_->debug()->AfterGarbageCollection();
448 #endif // ENABLE_DEBUGGER_SUPPORT
449 }
450 
451 
452 void Heap::CollectAllGarbage(int flags, const char* gc_reason) {
453  // Since we are ignoring the return value, the exact choice of space does
454  // not matter, so long as we do not specify NEW_SPACE, which would not
455  // cause a full GC.
456  mark_compact_collector_.SetFlags(flags);
457  CollectGarbage(OLD_POINTER_SPACE, gc_reason);
458  mark_compact_collector_.SetFlags(kNoGCFlags);
459 }
460 
461 
462 void Heap::CollectAllAvailableGarbage(const char* gc_reason) {
463  // Since we are ignoring the return value, the exact choice of space does
464  // not matter, so long as we do not specify NEW_SPACE, which would not
465  // cause a full GC.
466  // Major GC would invoke weak handle callbacks on weakly reachable
467  // handles, but won't collect weakly reachable objects until next
468  // major GC. Therefore if we collect aggressively and weak handle callback
469  // has been invoked, we rerun major GC to release objects which become
470  // garbage.
471  // Note: as weak callbacks can execute arbitrary code, we cannot
472  // hope that eventually there will be no weak callbacks invocations.
473  // Therefore stop recollecting after several attempts.
476  isolate_->compilation_cache()->Clear();
477  const int kMaxNumberOfAttempts = 7;
478  for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
480  break;
481  }
482  }
484  new_space_.Shrink();
486  Shrink();
488 }
489 
490 
492  GarbageCollector collector,
493  const char* gc_reason,
494  const char* collector_reason) {
495  // The VM is in the GC state until exiting this function.
496  VMState state(isolate_, GC);
497 
498 #ifdef DEBUG
499  // Reset the allocation timeout to the GC interval, but make sure to
500  // allow at least a few allocations after a collection. The reason
501  // for this is that we have a lot of allocation sequences and we
502  // assume that a garbage collection will allow the subsequent
503  // allocation attempts to go through.
504  allocation_timeout_ = Max(6, FLAG_gc_interval);
505 #endif
506 
507  if (collector == SCAVENGER && !incremental_marking()->IsStopped()) {
508  if (FLAG_trace_incremental_marking) {
509  PrintF("[IncrementalMarking] Scavenge during marking.\n");
510  }
511  }
512 
513  if (collector == MARK_COMPACTOR &&
514  !mark_compact_collector()->abort_incremental_marking_ &&
515  !incremental_marking()->IsStopped() &&
516  !incremental_marking()->should_hurry() &&
517  FLAG_incremental_marking_steps) {
518  // Make progress in incremental marking.
519  const intptr_t kStepSizeWhenDelayedByScavenge = 1 * MB;
520  incremental_marking()->Step(kStepSizeWhenDelayedByScavenge,
522  if (!incremental_marking()->IsComplete()) {
523  if (FLAG_trace_incremental_marking) {
524  PrintF("[IncrementalMarking] Delaying MarkSweep.\n");
525  }
526  collector = SCAVENGER;
527  collector_reason = "incremental marking delaying mark-sweep";
528  }
529  }
530 
531  bool next_gc_likely_to_collect_more = false;
532 
533  { GCTracer tracer(this, gc_reason, collector_reason);
535  // The GC count was incremented in the prologue. Tell the tracer about
536  // it.
537  tracer.set_gc_count(gc_count_);
538 
539  // Tell the tracer which collector we've selected.
540  tracer.set_collector(collector);
541 
542  HistogramTimer* rate = (collector == SCAVENGER)
543  ? isolate_->counters()->gc_scavenger()
544  : isolate_->counters()->gc_compactor();
545  rate->Start();
546  next_gc_likely_to_collect_more =
547  PerformGarbageCollection(collector, &tracer);
548  rate->Stop();
549 
551  }
552 
553  ASSERT(collector == SCAVENGER || incremental_marking()->IsStopped());
554  if (incremental_marking()->IsStopped()) {
555  if (incremental_marking()->WorthActivating() && NextGCIsLikelyToBeFull()) {
557  }
558  }
559 
560  return next_gc_likely_to_collect_more;
561 }
562 
563 
565  GCTracer tracer(this, NULL, NULL);
566  if (incremental_marking()->IsStopped()) {
567  PerformGarbageCollection(SCAVENGER, &tracer);
568  } else {
569  PerformGarbageCollection(MARK_COMPACTOR, &tracer);
570  }
571 }
572 
573 
574 #ifdef DEBUG
575 // Helper class for verifying the symbol table.
576 class SymbolTableVerifier : public ObjectVisitor {
577  public:
578  void VisitPointers(Object** start, Object** end) {
579  // Visit all HeapObject pointers in [start, end).
580  for (Object** p = start; p < end; p++) {
581  if ((*p)->IsHeapObject()) {
582  // Check that the symbol is actually a symbol.
583  ASSERT((*p)->IsTheHole() || (*p)->IsUndefined() || (*p)->IsSymbol());
584  }
585  }
586  }
587 };
588 #endif // DEBUG
589 
590 
591 static void VerifySymbolTable() {
592 #ifdef DEBUG
593  SymbolTableVerifier verifier;
594  HEAP->symbol_table()->IterateElements(&verifier);
595 #endif // DEBUG
596 }
597 
598 
599 static bool AbortIncrementalMarkingAndCollectGarbage(
600  Heap* heap,
601  AllocationSpace space,
602  const char* gc_reason = NULL) {
603  heap->mark_compact_collector()->SetFlags(Heap::kAbortIncrementalMarkingMask);
604  bool result = heap->CollectGarbage(space, gc_reason);
605  heap->mark_compact_collector()->SetFlags(Heap::kNoGCFlags);
606  return result;
607 }
608 
609 
611  int new_space_size,
612  int pointer_space_size,
613  int data_space_size,
614  int code_space_size,
615  int map_space_size,
616  int cell_space_size,
617  int large_object_size) {
625  bool gc_performed = true;
626  int counter = 0;
627  static const int kThreshold = 20;
628  while (gc_performed && counter++ < kThreshold) {
629  gc_performed = false;
630  if (!new_space->ReserveSpace(new_space_size)) {
632  "failed to reserve space in the new space");
633  gc_performed = true;
634  }
635  if (!old_pointer_space->ReserveSpace(pointer_space_size)) {
636  AbortIncrementalMarkingAndCollectGarbage(this, OLD_POINTER_SPACE,
637  "failed to reserve space in the old pointer space");
638  gc_performed = true;
639  }
640  if (!(old_data_space->ReserveSpace(data_space_size))) {
641  AbortIncrementalMarkingAndCollectGarbage(this, OLD_DATA_SPACE,
642  "failed to reserve space in the old data space");
643  gc_performed = true;
644  }
645  if (!(code_space->ReserveSpace(code_space_size))) {
646  AbortIncrementalMarkingAndCollectGarbage(this, CODE_SPACE,
647  "failed to reserve space in the code space");
648  gc_performed = true;
649  }
650  if (!(map_space->ReserveSpace(map_space_size))) {
651  AbortIncrementalMarkingAndCollectGarbage(this, MAP_SPACE,
652  "failed to reserve space in the map space");
653  gc_performed = true;
654  }
655  if (!(cell_space->ReserveSpace(cell_space_size))) {
656  AbortIncrementalMarkingAndCollectGarbage(this, CELL_SPACE,
657  "failed to reserve space in the cell space");
658  gc_performed = true;
659  }
660  // We add a slack-factor of 2 in order to have space for a series of
661  // large-object allocations that are only just larger than the page size.
662  large_object_size *= 2;
663  // The ReserveSpace method on the large object space checks how much
664  // we can expand the old generation. This includes expansion caused by
665  // allocation in the other spaces.
666  large_object_size += cell_space_size + map_space_size + code_space_size +
667  data_space_size + pointer_space_size;
668  if (!(lo_space->ReserveSpace(large_object_size))) {
669  AbortIncrementalMarkingAndCollectGarbage(this, LO_SPACE,
670  "failed to reserve space in the large object space");
671  gc_performed = true;
672  }
673  }
674 
675  if (gc_performed) {
676  // Failed to reserve the space after several attempts.
677  V8::FatalProcessOutOfMemory("Heap::ReserveSpace");
678  }
679 }
680 
681 
683  if (new_space_.CommitFromSpaceIfNeeded()) return;
684 
685  // Committing memory to from space failed.
686  // Try shrinking and try again.
687  Shrink();
688  if (new_space_.CommitFromSpaceIfNeeded()) return;
689 
690  // Committing memory to from space failed again.
691  // Memory is exhausted and we will die.
692  V8::FatalProcessOutOfMemory("Committing semi space failed.");
693 }
694 
695 
697  if (isolate_->bootstrapper()->IsActive()) return;
698 
699  Object* context = global_contexts_list_;
700  while (!context->IsUndefined()) {
701  // Get the caches for this context. GC can happen when the context
702  // is not fully initialized, so the caches can be undefined.
703  Object* caches_or_undefined =
705  if (!caches_or_undefined->IsUndefined()) {
706  FixedArray* caches = FixedArray::cast(caches_or_undefined);
707  // Clear the caches:
708  int length = caches->length();
709  for (int i = 0; i < length; i++) {
710  JSFunctionResultCache::cast(caches->get(i))->Clear();
711  }
712  }
713  // Get the next context:
714  context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
715  }
716 }
717 
718 
719 
721  if (isolate_->bootstrapper()->IsActive() &&
722  !incremental_marking()->IsMarking()) {
723  return;
724  }
725 
726  Object* context = global_contexts_list_;
727  while (!context->IsUndefined()) {
728  // GC can happen when the context is not fully initialized,
729  // so the cache can be undefined.
730  Object* cache =
732  if (!cache->IsUndefined()) {
734  }
735  context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
736  }
737 }
738 
739 
740 void Heap::UpdateSurvivalRateTrend(int start_new_space_size) {
741  double survival_rate =
742  (static_cast<double>(young_survivors_after_last_gc_) * 100) /
743  start_new_space_size;
744 
745  if (survival_rate > kYoungSurvivalRateHighThreshold) {
746  high_survival_rate_period_length_++;
747  } else {
748  high_survival_rate_period_length_ = 0;
749  }
750 
751  if (survival_rate < kYoungSurvivalRateLowThreshold) {
752  low_survival_rate_period_length_++;
753  } else {
754  low_survival_rate_period_length_ = 0;
755  }
756 
757  double survival_rate_diff = survival_rate_ - survival_rate;
758 
759  if (survival_rate_diff > kYoungSurvivalRateAllowedDeviation) {
760  set_survival_rate_trend(DECREASING);
761  } else if (survival_rate_diff < -kYoungSurvivalRateAllowedDeviation) {
762  set_survival_rate_trend(INCREASING);
763  } else {
764  set_survival_rate_trend(STABLE);
765  }
766 
767  survival_rate_ = survival_rate;
768 }
769 
770 bool Heap::PerformGarbageCollection(GarbageCollector collector,
771  GCTracer* tracer) {
772  bool next_gc_likely_to_collect_more = false;
773 
774  if (collector != SCAVENGER) {
775  PROFILE(isolate_, CodeMovingGCEvent());
776  }
777 
778  if (FLAG_verify_heap) {
779  VerifySymbolTable();
780  }
781  if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
782  ASSERT(!allocation_allowed_);
783  GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
784  global_gc_prologue_callback_();
785  }
786 
787  GCType gc_type =
789 
790  for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
791  if (gc_type & gc_prologue_callbacks_[i].gc_type) {
792  gc_prologue_callbacks_[i].callback(gc_type, kNoGCCallbackFlags);
793  }
794  }
795 
797 
798  int start_new_space_size = Heap::new_space()->SizeAsInt();
799 
800  if (IsHighSurvivalRate()) {
801  // We speed up the incremental marker if it is running so that it
802  // does not fall behind the rate of promotion, which would cause a
803  // constantly growing old space.
805  }
806 
807  if (collector == MARK_COMPACTOR) {
808  // Perform mark-sweep with optional compaction.
809  MarkCompact(tracer);
810  sweep_generation_++;
811  bool high_survival_rate_during_scavenges = IsHighSurvivalRate() &&
812  IsStableOrIncreasingSurvivalTrend();
813 
814  UpdateSurvivalRateTrend(start_new_space_size);
815 
816  size_of_old_gen_at_last_old_space_gc_ = PromotedSpaceSizeOfObjects();
817 
818  if (high_survival_rate_during_scavenges &&
819  IsStableOrIncreasingSurvivalTrend()) {
820  // Stable high survival rates of young objects both during partial and
821  // full collection indicate that mutator is either building or modifying
822  // a structure with a long lifetime.
823  // In this case we aggressively raise old generation memory limits to
824  // postpone subsequent mark-sweep collection and thus trade memory
825  // space for the mutation speed.
826  old_gen_limit_factor_ = 2;
827  } else {
828  old_gen_limit_factor_ = 1;
829  }
830 
831  old_gen_promotion_limit_ =
832  OldGenPromotionLimit(size_of_old_gen_at_last_old_space_gc_);
833  old_gen_allocation_limit_ =
834  OldGenAllocationLimit(size_of_old_gen_at_last_old_space_gc_);
835 
836  old_gen_exhausted_ = false;
837  } else {
838  tracer_ = tracer;
839  Scavenge();
840  tracer_ = NULL;
841 
842  UpdateSurvivalRateTrend(start_new_space_size);
843  }
844 
845  if (!new_space_high_promotion_mode_active_ &&
846  new_space_.Capacity() == new_space_.MaximumCapacity() &&
847  IsStableOrIncreasingSurvivalTrend() &&
848  IsHighSurvivalRate()) {
849  // Stable high survival rates even though young generation is at
850  // maximum capacity indicates that most objects will be promoted.
851  // To decrease scavenger pauses and final mark-sweep pauses, we
852  // have to limit maximal capacity of the young generation.
853  new_space_high_promotion_mode_active_ = true;
854  if (FLAG_trace_gc) {
855  PrintF("Limited new space size due to high promotion rate: %d MB\n",
856  new_space_.InitialCapacity() / MB);
857  }
858  } else if (new_space_high_promotion_mode_active_ &&
859  IsStableOrDecreasingSurvivalTrend() &&
860  IsLowSurvivalRate()) {
861  // Decreasing low survival rates might indicate that the above high
862  // promotion mode is over and we should allow the young generation
863  // to grow again.
864  new_space_high_promotion_mode_active_ = false;
865  if (FLAG_trace_gc) {
866  PrintF("Unlimited new space size due to low promotion rate: %d MB\n",
867  new_space_.MaximumCapacity() / MB);
868  }
869  }
870 
871  if (new_space_high_promotion_mode_active_ &&
872  new_space_.Capacity() > new_space_.InitialCapacity()) {
873  new_space_.Shrink();
874  }
875 
876  isolate_->counters()->objs_since_last_young()->Set(0);
877 
878  gc_post_processing_depth_++;
879  { DisableAssertNoAllocation allow_allocation;
880  GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
881  next_gc_likely_to_collect_more =
882  isolate_->global_handles()->PostGarbageCollectionProcessing(collector);
883  }
884  gc_post_processing_depth_--;
885 
886  // Update relocatables.
887  Relocatable::PostGarbageCollectionProcessing();
888 
889  if (collector == MARK_COMPACTOR) {
890  // Register the amount of external allocated memory.
891  amount_of_external_allocated_memory_at_last_global_gc_ =
892  amount_of_external_allocated_memory_;
893  }
894 
895  GCCallbackFlags callback_flags = kNoGCCallbackFlags;
896  for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
897  if (gc_type & gc_epilogue_callbacks_[i].gc_type) {
898  gc_epilogue_callbacks_[i].callback(gc_type, callback_flags);
899  }
900  }
901 
902  if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) {
903  ASSERT(!allocation_allowed_);
904  GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
905  global_gc_epilogue_callback_();
906  }
907  if (FLAG_verify_heap) {
908  VerifySymbolTable();
909  }
910 
911  return next_gc_likely_to_collect_more;
912 }
913 
914 
915 void Heap::MarkCompact(GCTracer* tracer) {
916  gc_state_ = MARK_COMPACT;
917  LOG(isolate_, ResourceEvent("markcompact", "begin"));
918 
919  mark_compact_collector_.Prepare(tracer);
920 
921  ms_count_++;
922  tracer->set_full_gc_count(ms_count_);
923 
924  MarkCompactPrologue();
925 
926  mark_compact_collector_.CollectGarbage();
927 
928  LOG(isolate_, ResourceEvent("markcompact", "end"));
929 
930  gc_state_ = NOT_IN_GC;
931 
932  isolate_->counters()->objs_since_last_full()->Set(0);
933 
934  contexts_disposed_ = 0;
935 
936  isolate_->set_context_exit_happened(false);
937 }
938 
939 
940 void Heap::MarkCompactPrologue() {
941  // At any old GC clear the keyed lookup cache to enable collection of unused
942  // maps.
943  isolate_->keyed_lookup_cache()->Clear();
944  isolate_->context_slot_cache()->Clear();
945  isolate_->descriptor_lookup_cache()->Clear();
946  StringSplitCache::Clear(string_split_cache());
947 
948  isolate_->compilation_cache()->MarkCompactPrologue();
949 
951 
952  FlushNumberStringCache();
953  if (FLAG_cleanup_code_caches_at_gc) {
954  polymorphic_code_cache()->set_cache(undefined_value());
955  }
956 
958 }
959 
960 
963  GcSafeFindCodeForInnerPointer(a);
964 }
965 
966 
967 // Helper class for copying HeapObjects
968 class ScavengeVisitor: public ObjectVisitor {
969  public:
970  explicit ScavengeVisitor(Heap* heap) : heap_(heap) {}
971 
972  void VisitPointer(Object** p) { ScavengePointer(p); }
973 
974  void VisitPointers(Object** start, Object** end) {
975  // Copy all HeapObject pointers in [start, end)
976  for (Object** p = start; p < end; p++) ScavengePointer(p);
977  }
978 
979  private:
980  void ScavengePointer(Object** p) {
981  Object* object = *p;
982  if (!heap_->InNewSpace(object)) return;
983  Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
984  reinterpret_cast<HeapObject*>(object));
985  }
986 
987  Heap* heap_;
988 };
989 
990 
991 #ifdef DEBUG
992 // Visitor class to verify pointers in code or data space do not point into
993 // new space.
994 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
995  public:
996  void VisitPointers(Object** start, Object**end) {
997  for (Object** current = start; current < end; current++) {
998  if ((*current)->IsHeapObject()) {
999  ASSERT(!HEAP->InNewSpace(HeapObject::cast(*current)));
1000  }
1001  }
1002  }
1003 };
1004 
1005 
1006 static void VerifyNonPointerSpacePointers() {
1007  // Verify that there are no pointers to new space in spaces where we
1008  // do not expect them.
1009  VerifyNonPointerSpacePointersVisitor v;
1010  HeapObjectIterator code_it(HEAP->code_space());
1011  for (HeapObject* object = code_it.Next();
1012  object != NULL; object = code_it.Next())
1013  object->Iterate(&v);
1014 
1015  // The old data space was normally swept conservatively so that the iterator
1016  // doesn't work, so we normally skip the next bit.
1017  if (!HEAP->old_data_space()->was_swept_conservatively()) {
1018  HeapObjectIterator data_it(HEAP->old_data_space());
1019  for (HeapObject* object = data_it.Next();
1020  object != NULL; object = data_it.Next())
1021  object->Iterate(&v);
1022  }
1023 }
1024 #endif
1025 
1026 
1028  if (new_space_.Capacity() < new_space_.MaximumCapacity() &&
1029  survived_since_last_expansion_ > new_space_.Capacity() &&
1030  !new_space_high_promotion_mode_active_) {
1031  // Grow the size of new space if there is room to grow, enough data
1032  // has survived scavenge since the last expansion and we are not in
1033  // high promotion mode.
1034  new_space_.Grow();
1035  survived_since_last_expansion_ = 0;
1036  }
1037 }
1038 
1039 
1040 static bool IsUnscavengedHeapObject(Heap* heap, Object** p) {
1041  return heap->InNewSpace(*p) &&
1042  !HeapObject::cast(*p)->map_word().IsForwardingAddress();
1043 }
1044 
1045 
1046 void Heap::ScavengeStoreBufferCallback(
1047  Heap* heap,
1048  MemoryChunk* page,
1049  StoreBufferEvent event) {
1050  heap->store_buffer_rebuilder_.Callback(page, event);
1051 }
1052 
1053 
1055  if (event == kStoreBufferStartScanningPagesEvent) {
1056  start_of_current_page_ = NULL;
1057  current_page_ = NULL;
1058  } else if (event == kStoreBufferScanningPageEvent) {
1059  if (current_page_ != NULL) {
1060  // If this page already overflowed the store buffer during this iteration.
1061  if (current_page_->scan_on_scavenge()) {
1062  // Then we should wipe out the entries that have been added for it.
1063  store_buffer_->SetTop(start_of_current_page_);
1064  } else if (store_buffer_->Top() - start_of_current_page_ >=
1065  (store_buffer_->Limit() - store_buffer_->Top()) >> 2) {
1066  // Did we find too many pointers in the previous page? The heuristic is
1067  // that no page can take more then 1/5 the remaining slots in the store
1068  // buffer.
1069  current_page_->set_scan_on_scavenge(true);
1070  store_buffer_->SetTop(start_of_current_page_);
1071  } else {
1072  // In this case the page we scanned took a reasonable number of slots in
1073  // the store buffer. It has now been rehabilitated and is no longer
1074  // marked scan_on_scavenge.
1075  ASSERT(!current_page_->scan_on_scavenge());
1076  }
1077  }
1078  start_of_current_page_ = store_buffer_->Top();
1079  current_page_ = page;
1080  } else if (event == kStoreBufferFullEvent) {
1081  // The current page overflowed the store buffer again. Wipe out its entries
1082  // in the store buffer and mark it scan-on-scavenge again. This may happen
1083  // several times while scanning.
1084  if (current_page_ == NULL) {
1085  // Store Buffer overflowed while scanning promoted objects. These are not
1086  // in any particular page, though they are likely to be clustered by the
1087  // allocation routines.
1089  } else {
1090  // Store Buffer overflowed while scanning a particular old space page for
1091  // pointers to new space.
1092  ASSERT(current_page_ == page);
1093  ASSERT(page != NULL);
1094  current_page_->set_scan_on_scavenge(true);
1095  ASSERT(start_of_current_page_ != store_buffer_->Top());
1096  store_buffer_->SetTop(start_of_current_page_);
1097  }
1098  } else {
1099  UNREACHABLE();
1100  }
1101 }
1102 
1103 
1105  // Assumes that a NewSpacePage exactly fits a number of promotion queue
1106  // entries (where each is a pair of intptr_t). This allows us to simplify
1107  // the test fpr when to switch pages.
1109  == 0);
1110  limit_ = reinterpret_cast<intptr_t*>(heap_->new_space()->ToSpaceStart());
1111  front_ = rear_ =
1112  reinterpret_cast<intptr_t*>(heap_->new_space()->ToSpaceEnd());
1113  emergency_stack_ = NULL;
1114  guard_ = false;
1115 }
1116 
1117 
1118 void PromotionQueue::RelocateQueueHead() {
1119  ASSERT(emergency_stack_ == NULL);
1120 
1121  Page* p = Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
1122  intptr_t* head_start = rear_;
1123  intptr_t* head_end =
1124  Min(front_, reinterpret_cast<intptr_t*>(p->area_end()));
1125 
1126  int entries_count =
1127  static_cast<int>(head_end - head_start) / kEntrySizeInWords;
1128 
1129  emergency_stack_ = new List<Entry>(2 * entries_count);
1130 
1131  while (head_start != head_end) {
1132  int size = static_cast<int>(*(head_start++));
1133  HeapObject* obj = reinterpret_cast<HeapObject*>(*(head_start++));
1134  emergency_stack_->Add(Entry(obj, size));
1135  }
1136  rear_ = head_end;
1137 }
1138 
1139 
1141  public:
1142  explicit ScavengeWeakObjectRetainer(Heap* heap) : heap_(heap) { }
1143 
1144  virtual Object* RetainAs(Object* object) {
1145  if (!heap_->InFromSpace(object)) {
1146  return object;
1147  }
1148 
1149  MapWord map_word = HeapObject::cast(object)->map_word();
1150  if (map_word.IsForwardingAddress()) {
1151  return map_word.ToForwardingAddress();
1152  }
1153  return NULL;
1154  }
1155 
1156  private:
1157  Heap* heap_;
1158 };
1159 
1160 
1161 void Heap::Scavenge() {
1162 #ifdef DEBUG
1163  if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
1164 #endif
1165 
1166  gc_state_ = SCAVENGE;
1167 
1168  // Implements Cheney's copying algorithm
1169  LOG(isolate_, ResourceEvent("scavenge", "begin"));
1170 
1171  // Clear descriptor cache.
1172  isolate_->descriptor_lookup_cache()->Clear();
1173 
1174  // Used for updating survived_since_last_expansion_ at function end.
1175  intptr_t survived_watermark = PromotedSpaceSizeOfObjects();
1176 
1178 
1179  SelectScavengingVisitorsTable();
1180 
1182 
1183  AdvanceSweepers(static_cast<int>(new_space_.Size()));
1184 
1185  // Flip the semispaces. After flipping, to space is empty, from space has
1186  // live objects.
1187  new_space_.Flip();
1188  new_space_.ResetAllocationInfo();
1189 
1190  // We need to sweep newly copied objects which can be either in the
1191  // to space or promoted to the old generation. For to-space
1192  // objects, we treat the bottom of the to space as a queue. Newly
1193  // copied and unswept objects lie between a 'front' mark and the
1194  // allocation pointer.
1195  //
1196  // Promoted objects can go into various old-generation spaces, and
1197  // can be allocated internally in the spaces (from the free list).
1198  // We treat the top of the to space as a queue of addresses of
1199  // promoted objects. The addresses of newly promoted and unswept
1200  // objects lie between a 'front' mark and a 'rear' mark that is
1201  // updated as a side effect of promoting an object.
1202  //
1203  // There is guaranteed to be enough room at the top of the to space
1204  // for the addresses of promoted objects: every object promoted
1205  // frees up its size in bytes from the top of the new space, and
1206  // objects are at least one pointer in size.
1207  Address new_space_front = new_space_.ToSpaceStart();
1208  promotion_queue_.Initialize();
1209 
1210 #ifdef DEBUG
1211  store_buffer()->Clean();
1212 #endif
1213 
1214  ScavengeVisitor scavenge_visitor(this);
1215  // Copy roots.
1216  IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
1217 
1218  // Copy objects reachable from the old generation.
1219  {
1220  StoreBufferRebuildScope scope(this,
1221  store_buffer(),
1222  &ScavengeStoreBufferCallback);
1224  }
1225 
1226  // Copy objects reachable from cells by scavenging cell values directly.
1227  HeapObjectIterator cell_iterator(cell_space_);
1228  for (HeapObject* cell = cell_iterator.Next();
1229  cell != NULL; cell = cell_iterator.Next()) {
1230  if (cell->IsJSGlobalPropertyCell()) {
1231  Address value_address =
1232  reinterpret_cast<Address>(cell) +
1234  scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
1235  }
1236  }
1237 
1238  // Scavenge object reachable from the global contexts list directly.
1239  scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_));
1240 
1241  new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1243  &IsUnscavengedHeapObject);
1245  &scavenge_visitor);
1246  new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1247 
1249  &UpdateNewSpaceReferenceInExternalStringTableEntry);
1250 
1251  promotion_queue_.Destroy();
1252 
1254  if (!FLAG_watch_ic_patching) {
1256  }
1258 
1259  ScavengeWeakObjectRetainer weak_object_retainer(this);
1260  ProcessWeakReferences(&weak_object_retainer);
1261 
1262  ASSERT(new_space_front == new_space_.top());
1263 
1264  // Set age mark.
1265  new_space_.set_age_mark(new_space_.top());
1266 
1267  new_space_.LowerInlineAllocationLimit(
1268  new_space_.inline_allocation_limit_step());
1269 
1270  // Update how much has survived scavenge.
1271  IncrementYoungSurvivorsCounter(static_cast<int>(
1272  (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size()));
1273 
1274  LOG(isolate_, ResourceEvent("scavenge", "end"));
1275 
1276  gc_state_ = NOT_IN_GC;
1277 
1278  scavenges_since_last_idle_round_++;
1279 }
1280 
1281 
1282 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
1283  Object** p) {
1284  MapWord first_word = HeapObject::cast(*p)->map_word();
1285 
1286  if (!first_word.IsForwardingAddress()) {
1287  // Unreachable external string can be finalized.
1288  heap->FinalizeExternalString(String::cast(*p));
1289  return NULL;
1290  }
1291 
1292  // String is still reachable.
1293  return String::cast(first_word.ToForwardingAddress());
1294 }
1295 
1296 
1298  ExternalStringTableUpdaterCallback updater_func) {
1299  if (FLAG_verify_heap) {
1300  external_string_table_.Verify();
1301  }
1302 
1303  if (external_string_table_.new_space_strings_.is_empty()) return;
1304 
1305  Object** start = &external_string_table_.new_space_strings_[0];
1306  Object** end = start + external_string_table_.new_space_strings_.length();
1307  Object** last = start;
1308 
1309  for (Object** p = start; p < end; ++p) {
1310  ASSERT(InFromSpace(*p));
1311  String* target = updater_func(this, p);
1312 
1313  if (target == NULL) continue;
1314 
1315  ASSERT(target->IsExternalString());
1316 
1317  if (InNewSpace(target)) {
1318  // String is still in new space. Update the table entry.
1319  *last = target;
1320  ++last;
1321  } else {
1322  // String got promoted. Move it to the old string list.
1323  external_string_table_.AddOldString(target);
1324  }
1325  }
1326 
1327  ASSERT(last <= end);
1328  external_string_table_.ShrinkNewStrings(static_cast<int>(last - start));
1329 }
1330 
1331 
1333  ExternalStringTableUpdaterCallback updater_func) {
1334 
1335  // Update old space string references.
1336  if (external_string_table_.old_space_strings_.length() > 0) {
1337  Object** start = &external_string_table_.old_space_strings_[0];
1338  Object** end = start + external_string_table_.old_space_strings_.length();
1339  for (Object** p = start; p < end; ++p) *p = updater_func(this, p);
1340  }
1341 
1343 }
1344 
1345 
1346 static Object* ProcessFunctionWeakReferences(Heap* heap,
1347  Object* function,
1348  WeakObjectRetainer* retainer,
1349  bool record_slots) {
1350  Object* undefined = heap->undefined_value();
1351  Object* head = undefined;
1352  JSFunction* tail = NULL;
1353  Object* candidate = function;
1354  while (candidate != undefined) {
1355  // Check whether to keep the candidate in the list.
1356  JSFunction* candidate_function = reinterpret_cast<JSFunction*>(candidate);
1357  Object* retain = retainer->RetainAs(candidate);
1358  if (retain != NULL) {
1359  if (head == undefined) {
1360  // First element in the list.
1361  head = retain;
1362  } else {
1363  // Subsequent elements in the list.
1364  ASSERT(tail != NULL);
1365  tail->set_next_function_link(retain);
1366  if (record_slots) {
1367  Object** next_function =
1369  heap->mark_compact_collector()->RecordSlot(
1370  next_function, next_function, retain);
1371  }
1372  }
1373  // Retained function is new tail.
1374  candidate_function = reinterpret_cast<JSFunction*>(retain);
1375  tail = candidate_function;
1376 
1377  ASSERT(retain->IsUndefined() || retain->IsJSFunction());
1378 
1379  if (retain == undefined) break;
1380  }
1381 
1382  // Move to next element in the list.
1383  candidate = candidate_function->next_function_link();
1384  }
1385 
1386  // Terminate the list if there is one or more elements.
1387  if (tail != NULL) {
1388  tail->set_next_function_link(undefined);
1389  }
1390 
1391  return head;
1392 }
1393 
1394 
1396  Object* undefined = undefined_value();
1397  Object* head = undefined;
1398  Context* tail = NULL;
1399  Object* candidate = global_contexts_list_;
1400 
1401  // We don't record weak slots during marking or scavenges.
1402  // Instead we do it once when we complete mark-compact cycle.
1403  // Note that write barrier has no effect if we are already in the middle of
1404  // compacting mark-sweep cycle and we have to record slots manually.
1405  bool record_slots =
1406  gc_state() == MARK_COMPACT &&
1408 
1409  while (candidate != undefined) {
1410  // Check whether to keep the candidate in the list.
1411  Context* candidate_context = reinterpret_cast<Context*>(candidate);
1412  Object* retain = retainer->RetainAs(candidate);
1413  if (retain != NULL) {
1414  if (head == undefined) {
1415  // First element in the list.
1416  head = retain;
1417  } else {
1418  // Subsequent elements in the list.
1419  ASSERT(tail != NULL);
1420  tail->set_unchecked(this,
1422  retain,
1424 
1425  if (record_slots) {
1426  Object** next_context =
1429  mark_compact_collector()->RecordSlot(
1430  next_context, next_context, retain);
1431  }
1432  }
1433  // Retained context is new tail.
1434  candidate_context = reinterpret_cast<Context*>(retain);
1435  tail = candidate_context;
1436 
1437  if (retain == undefined) break;
1438 
1439  // Process the weak list of optimized functions for the context.
1440  Object* function_list_head =
1441  ProcessFunctionWeakReferences(
1442  this,
1443  candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1444  retainer,
1445  record_slots);
1446  candidate_context->set_unchecked(this,
1448  function_list_head,
1450  if (record_slots) {
1451  Object** optimized_functions =
1454  mark_compact_collector()->RecordSlot(
1455  optimized_functions, optimized_functions, function_list_head);
1456  }
1457  }
1458 
1459  // Move to next element in the list.
1460  candidate = candidate_context->get(Context::NEXT_CONTEXT_LINK);
1461  }
1462 
1463  // Terminate the list if there is one or more elements.
1464  if (tail != NULL) {
1465  tail->set_unchecked(this,
1467  Heap::undefined_value(),
1469  }
1470 
1471  // Update the head of the list of contexts.
1472  global_contexts_list_ = head;
1473 }
1474 
1475 
1477  AssertNoAllocation no_allocation;
1478 
1479  class VisitorAdapter : public ObjectVisitor {
1480  public:
1481  explicit VisitorAdapter(v8::ExternalResourceVisitor* visitor)
1482  : visitor_(visitor) {}
1483  virtual void VisitPointers(Object** start, Object** end) {
1484  for (Object** p = start; p < end; p++) {
1485  if ((*p)->IsExternalString()) {
1486  visitor_->VisitExternalString(Utils::ToLocal(
1488  }
1489  }
1490  }
1491  private:
1492  v8::ExternalResourceVisitor* visitor_;
1493  } visitor_adapter(visitor);
1494  external_string_table_.Iterate(&visitor_adapter);
1495 }
1496 
1497 
1498 class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> {
1499  public:
1500  static inline void VisitPointer(Heap* heap, Object** p) {
1501  Object* object = *p;
1502  if (!heap->InNewSpace(object)) return;
1503  Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
1504  reinterpret_cast<HeapObject*>(object));
1505  }
1506 };
1507 
1508 
1509 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
1510  Address new_space_front) {
1511  do {
1512  SemiSpace::AssertValidRange(new_space_front, new_space_.top());
1513  // The addresses new_space_front and new_space_.top() define a
1514  // queue of unprocessed copied objects. Process them until the
1515  // queue is empty.
1516  while (new_space_front != new_space_.top()) {
1517  if (!NewSpacePage::IsAtEnd(new_space_front)) {
1518  HeapObject* object = HeapObject::FromAddress(new_space_front);
1519  new_space_front +=
1520  NewSpaceScavenger::IterateBody(object->map(), object);
1521  } else {
1522  new_space_front =
1523  NewSpacePage::FromLimit(new_space_front)->next_page()->area_start();
1524  }
1525  }
1526 
1527  // Promote and process all the to-be-promoted objects.
1528  {
1529  StoreBufferRebuildScope scope(this,
1530  store_buffer(),
1531  &ScavengeStoreBufferCallback);
1532  while (!promotion_queue()->is_empty()) {
1533  HeapObject* target;
1534  int size;
1535  promotion_queue()->remove(&target, &size);
1536 
1537  // Promoted object might be already partially visited
1538  // during old space pointer iteration. Thus we search specificly
1539  // for pointers to from semispace instead of looking for pointers
1540  // to new space.
1541  ASSERT(!target->IsMap());
1542  IterateAndMarkPointersToFromSpace(target->address(),
1543  target->address() + size,
1544  &ScavengeObject);
1545  }
1546  }
1547 
1548  // Take another spin if there are now unswept objects in new space
1549  // (there are currently no more unswept promoted objects).
1550  } while (new_space_front != new_space_.top());
1551 
1552  return new_space_front;
1553 }
1554 
1555 
1557 
1558 
1559 INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap,
1560  HeapObject* object,
1561  int size));
1562 
1563 static HeapObject* EnsureDoubleAligned(Heap* heap,
1564  HeapObject* object,
1565  int size) {
1566  if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) {
1567  heap->CreateFillerObjectAt(object->address(), kPointerSize);
1568  return HeapObject::FromAddress(object->address() + kPointerSize);
1569  } else {
1570  heap->CreateFillerObjectAt(object->address() + size - kPointerSize,
1571  kPointerSize);
1572  return object;
1573  }
1574 }
1575 
1576 
1580 };
1581 
1582 
1584 
1585 
1586 template<MarksHandling marks_handling,
1587  LoggingAndProfiling logging_and_profiling_mode>
1589  public:
1590  static void Initialize() {
1591  table_.Register(kVisitSeqAsciiString, &EvacuateSeqAsciiString);
1592  table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
1593  table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
1594  table_.Register(kVisitByteArray, &EvacuateByteArray);
1595  table_.Register(kVisitFixedArray, &EvacuateFixedArray);
1596  table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
1597 
1599  &ObjectEvacuationStrategy<POINTER_OBJECT>::
1600  template VisitSpecialized<Context::kSize>);
1601 
1602  table_.Register(kVisitConsString,
1603  &ObjectEvacuationStrategy<POINTER_OBJECT>::
1604  template VisitSpecialized<ConsString::kSize>);
1605 
1607  &ObjectEvacuationStrategy<POINTER_OBJECT>::
1608  template VisitSpecialized<SlicedString::kSize>);
1609 
1611  &ObjectEvacuationStrategy<POINTER_OBJECT>::
1612  template VisitSpecialized<SharedFunctionInfo::kSize>);
1613 
1614  table_.Register(kVisitJSWeakMap,
1615  &ObjectEvacuationStrategy<POINTER_OBJECT>::
1616  Visit);
1617 
1618  table_.Register(kVisitJSRegExp,
1619  &ObjectEvacuationStrategy<POINTER_OBJECT>::
1620  Visit);
1621 
1622  if (marks_handling == IGNORE_MARKS) {
1623  table_.Register(kVisitJSFunction,
1624  &ObjectEvacuationStrategy<POINTER_OBJECT>::
1625  template VisitSpecialized<JSFunction::kSize>);
1626  } else {
1627  table_.Register(kVisitJSFunction, &EvacuateJSFunction);
1628  }
1629 
1630  table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>,
1633 
1634  table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
1637 
1638  table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
1639  kVisitStruct,
1641  }
1642 
1644  return &table_;
1645  }
1646 
1647  private:
1648  enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
1649  enum SizeRestriction { SMALL, UNKNOWN_SIZE };
1650 
1651  static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
1652  bool should_record = false;
1653 #ifdef DEBUG
1654  should_record = FLAG_heap_stats;
1655 #endif
1656  should_record = should_record || FLAG_log_gc;
1657  if (should_record) {
1658  if (heap->new_space()->Contains(obj)) {
1659  heap->new_space()->RecordAllocation(obj);
1660  } else {
1661  heap->new_space()->RecordPromotion(obj);
1662  }
1663  }
1664  }
1665 
1666  // Helper function used by CopyObject to copy a source object to an
1667  // allocated target object and update the forwarding pointer in the source
1668  // object. Returns the target object.
1669  INLINE(static void MigrateObject(Heap* heap,
1670  HeapObject* source,
1671  HeapObject* target,
1672  int size)) {
1673  // Copy the content of source to target.
1674  heap->CopyBlock(target->address(), source->address(), size);
1675 
1676  // Set the forwarding address.
1677  source->set_map_word(MapWord::FromForwardingAddress(target));
1678 
1679  if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
1680  // Update NewSpace stats if necessary.
1681  RecordCopiedObject(heap, target);
1682  HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
1683  Isolate* isolate = heap->isolate();
1684  if (isolate->logger()->is_logging() ||
1685  CpuProfiler::is_profiling(isolate)) {
1686  if (target->IsSharedFunctionInfo()) {
1687  PROFILE(isolate, SharedFunctionInfoMoveEvent(
1688  source->address(), target->address()));
1689  }
1690  }
1691  }
1692 
1693  if (marks_handling == TRANSFER_MARKS) {
1694  if (Marking::TransferColor(source, target)) {
1695  MemoryChunk::IncrementLiveBytesFromGC(target->address(), size);
1696  }
1697  }
1698  }
1699 
1700 
1701  template<ObjectContents object_contents,
1702  SizeRestriction size_restriction,
1703  int alignment>
1704  static inline void EvacuateObject(Map* map,
1705  HeapObject** slot,
1706  HeapObject* object,
1707  int object_size) {
1708  SLOW_ASSERT((size_restriction != SMALL) ||
1709  (object_size <= Page::kMaxNonCodeHeapObjectSize));
1710  SLOW_ASSERT(object->Size() == object_size);
1711 
1712  int allocation_size = object_size;
1713  if (alignment != kObjectAlignment) {
1714  ASSERT(alignment == kDoubleAlignment);
1715  allocation_size += kPointerSize;
1716  }
1717 
1718  Heap* heap = map->GetHeap();
1719  if (heap->ShouldBePromoted(object->address(), object_size)) {
1720  MaybeObject* maybe_result;
1721 
1722  if ((size_restriction != SMALL) &&
1723  (allocation_size > Page::kMaxNonCodeHeapObjectSize)) {
1724  maybe_result = heap->lo_space()->AllocateRaw(allocation_size,
1725  NOT_EXECUTABLE);
1726  } else {
1727  if (object_contents == DATA_OBJECT) {
1728  maybe_result = heap->old_data_space()->AllocateRaw(allocation_size);
1729  } else {
1730  maybe_result =
1731  heap->old_pointer_space()->AllocateRaw(allocation_size);
1732  }
1733  }
1734 
1735  Object* result = NULL; // Initialization to please compiler.
1736  if (maybe_result->ToObject(&result)) {
1737  HeapObject* target = HeapObject::cast(result);
1738 
1739  if (alignment != kObjectAlignment) {
1740  target = EnsureDoubleAligned(heap, target, allocation_size);
1741  }
1742 
1743  // Order is important: slot might be inside of the target if target
1744  // was allocated over a dead object and slot comes from the store
1745  // buffer.
1746  *slot = target;
1747  MigrateObject(heap, object, target, object_size);
1748 
1749  if (object_contents == POINTER_OBJECT) {
1750  if (map->instance_type() == JS_FUNCTION_TYPE) {
1751  heap->promotion_queue()->insert(
1753  } else {
1754  heap->promotion_queue()->insert(target, object_size);
1755  }
1756  }
1757 
1758  heap->tracer()->increment_promoted_objects_size(object_size);
1759  return;
1760  }
1761  }
1762  MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size);
1763  heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
1764  Object* result = allocation->ToObjectUnchecked();
1765  HeapObject* target = HeapObject::cast(result);
1766 
1767  if (alignment != kObjectAlignment) {
1768  target = EnsureDoubleAligned(heap, target, allocation_size);
1769  }
1770 
1771  // Order is important: slot might be inside of the target if target
1772  // was allocated over a dead object and slot comes from the store
1773  // buffer.
1774  *slot = target;
1775  MigrateObject(heap, object, target, object_size);
1776  return;
1777  }
1778 
1779 
1780  static inline void EvacuateJSFunction(Map* map,
1781  HeapObject** slot,
1782  HeapObject* object) {
1783  ObjectEvacuationStrategy<POINTER_OBJECT>::
1784  template VisitSpecialized<JSFunction::kSize>(map, slot, object);
1785 
1786  HeapObject* target = *slot;
1787  MarkBit mark_bit = Marking::MarkBitFrom(target);
1788  if (Marking::IsBlack(mark_bit)) {
1789  // This object is black and it might not be rescanned by marker.
1790  // We should explicitly record code entry slot for compaction because
1791  // promotion queue processing (IterateAndMarkPointersToFromSpace) will
1792  // miss it as it is not HeapObject-tagged.
1793  Address code_entry_slot =
1794  target->address() + JSFunction::kCodeEntryOffset;
1795  Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
1796  map->GetHeap()->mark_compact_collector()->
1797  RecordCodeEntrySlot(code_entry_slot, code);
1798  }
1799  }
1800 
1801 
1802  static inline void EvacuateFixedArray(Map* map,
1803  HeapObject** slot,
1804  HeapObject* object) {
1805  int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
1806  EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(map,
1807  slot,
1808  object,
1809  object_size);
1810  }
1811 
1812 
1813  static inline void EvacuateFixedDoubleArray(Map* map,
1814  HeapObject** slot,
1815  HeapObject* object) {
1816  int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
1817  int object_size = FixedDoubleArray::SizeFor(length);
1818  EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kDoubleAlignment>(
1819  map,
1820  slot,
1821  object,
1822  object_size);
1823  }
1824 
1825 
1826  static inline void EvacuateByteArray(Map* map,
1827  HeapObject** slot,
1828  HeapObject* object) {
1829  int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
1830  EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
1831  map, slot, object, object_size);
1832  }
1833 
1834 
1835  static inline void EvacuateSeqAsciiString(Map* map,
1836  HeapObject** slot,
1837  HeapObject* object) {
1838  int object_size = SeqAsciiString::cast(object)->
1839  SeqAsciiStringSize(map->instance_type());
1840  EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
1841  map, slot, object, object_size);
1842  }
1843 
1844 
1845  static inline void EvacuateSeqTwoByteString(Map* map,
1846  HeapObject** slot,
1847  HeapObject* object) {
1848  int object_size = SeqTwoByteString::cast(object)->
1849  SeqTwoByteStringSize(map->instance_type());
1850  EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
1851  map, slot, object, object_size);
1852  }
1853 
1854 
1855  static inline bool IsShortcutCandidate(int type) {
1856  return ((type & kShortcutTypeMask) == kShortcutTypeTag);
1857  }
1858 
1859  static inline void EvacuateShortcutCandidate(Map* map,
1860  HeapObject** slot,
1861  HeapObject* object) {
1862  ASSERT(IsShortcutCandidate(map->instance_type()));
1863 
1864  Heap* heap = map->GetHeap();
1865 
1866  if (marks_handling == IGNORE_MARKS &&
1867  ConsString::cast(object)->unchecked_second() ==
1868  heap->empty_string()) {
1869  HeapObject* first =
1870  HeapObject::cast(ConsString::cast(object)->unchecked_first());
1871 
1872  *slot = first;
1873 
1874  if (!heap->InNewSpace(first)) {
1875  object->set_map_word(MapWord::FromForwardingAddress(first));
1876  return;
1877  }
1878 
1879  MapWord first_word = first->map_word();
1880  if (first_word.IsForwardingAddress()) {
1881  HeapObject* target = first_word.ToForwardingAddress();
1882 
1883  *slot = target;
1884  object->set_map_word(MapWord::FromForwardingAddress(target));
1885  return;
1886  }
1887 
1888  heap->DoScavengeObject(first->map(), slot, first);
1889  object->set_map_word(MapWord::FromForwardingAddress(*slot));
1890  return;
1891  }
1892 
1893  int object_size = ConsString::kSize;
1894  EvacuateObject<POINTER_OBJECT, SMALL, kObjectAlignment>(
1895  map, slot, object, object_size);
1896  }
1897 
1898  template<ObjectContents object_contents>
1899  class ObjectEvacuationStrategy {
1900  public:
1901  template<int object_size>
1902  static inline void VisitSpecialized(Map* map,
1903  HeapObject** slot,
1904  HeapObject* object) {
1905  EvacuateObject<object_contents, SMALL, kObjectAlignment>(
1906  map, slot, object, object_size);
1907  }
1908 
1909  static inline void Visit(Map* map,
1910  HeapObject** slot,
1911  HeapObject* object) {
1912  int object_size = map->instance_size();
1913  EvacuateObject<object_contents, SMALL, kObjectAlignment>(
1914  map, slot, object, object_size);
1915  }
1916  };
1917 
1918  static VisitorDispatchTable<ScavengingCallback> table_;
1919 };
1920 
1921 
1922 template<MarksHandling marks_handling,
1923  LoggingAndProfiling logging_and_profiling_mode>
1924 VisitorDispatchTable<ScavengingCallback>
1925  ScavengingVisitor<marks_handling, logging_and_profiling_mode>::table_;
1926 
1927 
1928 static void InitializeScavengingVisitorsTables() {
1929  ScavengingVisitor<TRANSFER_MARKS,
1930  LOGGING_AND_PROFILING_DISABLED>::Initialize();
1932  ScavengingVisitor<TRANSFER_MARKS,
1933  LOGGING_AND_PROFILING_ENABLED>::Initialize();
1935 }
1936 
1937 
1938 void Heap::SelectScavengingVisitorsTable() {
1939  bool logging_and_profiling =
1940  isolate()->logger()->is_logging() ||
1941  CpuProfiler::is_profiling(isolate()) ||
1942  (isolate()->heap_profiler() != NULL &&
1943  isolate()->heap_profiler()->is_profiling());
1944 
1945  if (!incremental_marking()->IsMarking()) {
1946  if (!logging_and_profiling) {
1947  scavenging_visitors_table_.CopyFrom(
1948  ScavengingVisitor<IGNORE_MARKS,
1949  LOGGING_AND_PROFILING_DISABLED>::GetTable());
1950  } else {
1951  scavenging_visitors_table_.CopyFrom(
1952  ScavengingVisitor<IGNORE_MARKS,
1953  LOGGING_AND_PROFILING_ENABLED>::GetTable());
1954  }
1955  } else {
1956  if (!logging_and_profiling) {
1957  scavenging_visitors_table_.CopyFrom(
1958  ScavengingVisitor<TRANSFER_MARKS,
1959  LOGGING_AND_PROFILING_DISABLED>::GetTable());
1960  } else {
1961  scavenging_visitors_table_.CopyFrom(
1962  ScavengingVisitor<TRANSFER_MARKS,
1963  LOGGING_AND_PROFILING_ENABLED>::GetTable());
1964  }
1965 
1966  if (incremental_marking()->IsCompacting()) {
1967  // When compacting forbid short-circuiting of cons-strings.
1968  // Scavenging code relies on the fact that new space object
1969  // can't be evacuated into evacuation candidate but
1970  // short-circuiting violates this assumption.
1971  scavenging_visitors_table_.Register(
1973  scavenging_visitors_table_.GetVisitorById(
1975  }
1976  }
1977 }
1978 
1979 
1980 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
1981  SLOW_ASSERT(HEAP->InFromSpace(object));
1982  MapWord first_word = object->map_word();
1983  SLOW_ASSERT(!first_word.IsForwardingAddress());
1984  Map* map = first_word.ToMap();
1985  map->GetHeap()->DoScavengeObject(map, p, object);
1986 }
1987 
1988 
1989 MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
1990  int instance_size) {
1991  Object* result;
1992  { MaybeObject* maybe_result = AllocateRawMap();
1993  if (!maybe_result->ToObject(&result)) return maybe_result;
1994  }
1995 
1996  // Map::cast cannot be used due to uninitialized map field.
1997  reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
1998  reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
1999  reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
2000  reinterpret_cast<Map*>(result)->set_visitor_id(
2001  StaticVisitorBase::GetVisitorId(instance_type, instance_size));
2002  reinterpret_cast<Map*>(result)->set_inobject_properties(0);
2003  reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
2004  reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
2005  reinterpret_cast<Map*>(result)->set_bit_field(0);
2006  reinterpret_cast<Map*>(result)->set_bit_field2(0);
2007  return result;
2008 }
2009 
2010 
2011 MaybeObject* Heap::AllocateMap(InstanceType instance_type,
2012  int instance_size,
2013  ElementsKind elements_kind) {
2014  Object* result;
2015  { MaybeObject* maybe_result = AllocateRawMap();
2016  if (!maybe_result->ToObject(&result)) return maybe_result;
2017  }
2018 
2019  Map* map = reinterpret_cast<Map*>(result);
2020  map->set_map_no_write_barrier(meta_map());
2021  map->set_instance_type(instance_type);
2022  map->set_visitor_id(
2023  StaticVisitorBase::GetVisitorId(instance_type, instance_size));
2024  map->set_prototype(null_value(), SKIP_WRITE_BARRIER);
2025  map->set_constructor(null_value(), SKIP_WRITE_BARRIER);
2026  map->set_instance_size(instance_size);
2027  map->set_inobject_properties(0);
2030  map->set_code_cache(empty_fixed_array(), SKIP_WRITE_BARRIER);
2031  map->init_prototype_transitions(undefined_value());
2033  map->set_bit_field(0);
2035  map->set_elements_kind(elements_kind);
2036 
2037  // If the map object is aligned fill the padding area with Smi 0 objects.
2038  if (Map::kPadStart < Map::kSize) {
2039  memset(reinterpret_cast<byte*>(map) + Map::kPadStart - kHeapObjectTag,
2040  0,
2042  }
2043  return map;
2044 }
2045 
2046 
2047 MaybeObject* Heap::AllocateCodeCache() {
2048  CodeCache* code_cache;
2049  { MaybeObject* maybe_code_cache = AllocateStruct(CODE_CACHE_TYPE);
2050  if (!maybe_code_cache->To(&code_cache)) return maybe_code_cache;
2051  }
2052  code_cache->set_default_cache(empty_fixed_array(), SKIP_WRITE_BARRIER);
2053  code_cache->set_normal_type_cache(undefined_value(), SKIP_WRITE_BARRIER);
2054  return code_cache;
2055 }
2056 
2057 
2060 }
2061 
2062 
2064  AccessorPair* accessors;
2065  { MaybeObject* maybe_accessors = AllocateStruct(ACCESSOR_PAIR_TYPE);
2066  if (!maybe_accessors->To(&accessors)) return maybe_accessors;
2067  }
2068  accessors->set_getter(the_hole_value(), SKIP_WRITE_BARRIER);
2069  accessors->set_setter(the_hole_value(), SKIP_WRITE_BARRIER);
2070  return accessors;
2071 }
2072 
2073 
2075  TypeFeedbackInfo* info;
2076  { MaybeObject* maybe_info = AllocateStruct(TYPE_FEEDBACK_INFO_TYPE);
2077  if (!maybe_info->To(&info)) return maybe_info;
2078  }
2079  info->set_ic_total_count(0);
2080  info->set_ic_with_type_info_count(0);
2081  info->set_type_feedback_cells(TypeFeedbackCells::cast(empty_fixed_array()),
2083  return info;
2084 }
2085 
2086 
2087 MaybeObject* Heap::AllocateAliasedArgumentsEntry(int aliased_context_slot) {
2088  AliasedArgumentsEntry* entry;
2089  { MaybeObject* maybe_entry = AllocateStruct(ALIASED_ARGUMENTS_ENTRY_TYPE);
2090  if (!maybe_entry->To(&entry)) return maybe_entry;
2091  }
2092  entry->set_aliased_context_slot(aliased_context_slot);
2093  return entry;
2094 }
2095 
2096 
2097 const Heap::StringTypeTable Heap::string_type_table[] = {
2098 #define STRING_TYPE_ELEMENT(type, size, name, camel_name) \
2099  {type, size, k##camel_name##MapRootIndex},
2101 #undef STRING_TYPE_ELEMENT
2102 };
2103 
2104 
2105 const Heap::ConstantSymbolTable Heap::constant_symbol_table[] = {
2106 #define CONSTANT_SYMBOL_ELEMENT(name, contents) \
2107  {contents, k##name##RootIndex},
2109 #undef CONSTANT_SYMBOL_ELEMENT
2110 };
2111 
2112 
2113 const Heap::StructTable Heap::struct_table[] = {
2114 #define STRUCT_TABLE_ELEMENT(NAME, Name, name) \
2115  { NAME##_TYPE, Name::kSize, k##Name##MapRootIndex },
2117 #undef STRUCT_TABLE_ELEMENT
2118 };
2119 
2120 
2121 bool Heap::CreateInitialMaps() {
2122  Object* obj;
2123  { MaybeObject* maybe_obj = AllocatePartialMap(MAP_TYPE, Map::kSize);
2124  if (!maybe_obj->ToObject(&obj)) return false;
2125  }
2126  // Map::cast cannot be used due to uninitialized map field.
2127  Map* new_meta_map = reinterpret_cast<Map*>(obj);
2128  set_meta_map(new_meta_map);
2129  new_meta_map->set_map(new_meta_map);
2130 
2131  { MaybeObject* maybe_obj =
2133  if (!maybe_obj->ToObject(&obj)) return false;
2134  }
2135  set_fixed_array_map(Map::cast(obj));
2136 
2137  { MaybeObject* maybe_obj = AllocatePartialMap(ODDBALL_TYPE, Oddball::kSize);
2138  if (!maybe_obj->ToObject(&obj)) return false;
2139  }
2140  set_oddball_map(Map::cast(obj));
2141 
2142  // Allocate the empty array.
2143  { MaybeObject* maybe_obj = AllocateEmptyFixedArray();
2144  if (!maybe_obj->ToObject(&obj)) return false;
2145  }
2146  set_empty_fixed_array(FixedArray::cast(obj));
2147 
2148  { MaybeObject* maybe_obj = Allocate(oddball_map(), OLD_POINTER_SPACE);
2149  if (!maybe_obj->ToObject(&obj)) return false;
2150  }
2151  set_null_value(Oddball::cast(obj));
2153 
2154  { MaybeObject* maybe_obj = Allocate(oddball_map(), OLD_POINTER_SPACE);
2155  if (!maybe_obj->ToObject(&obj)) return false;
2156  }
2157  set_undefined_value(Oddball::cast(obj));
2159  ASSERT(!InNewSpace(undefined_value()));
2160 
2161  // Allocate the empty descriptor array.
2162  { MaybeObject* maybe_obj = AllocateEmptyFixedArray();
2163  if (!maybe_obj->ToObject(&obj)) return false;
2164  }
2165  set_empty_descriptor_array(DescriptorArray::cast(obj));
2166 
2167  // Fix the instance_descriptors for the existing maps.
2168  meta_map()->init_instance_descriptors();
2169  meta_map()->set_code_cache(empty_fixed_array());
2170  meta_map()->init_prototype_transitions(undefined_value());
2171 
2172  fixed_array_map()->init_instance_descriptors();
2173  fixed_array_map()->set_code_cache(empty_fixed_array());
2174  fixed_array_map()->init_prototype_transitions(undefined_value());
2175 
2176  oddball_map()->init_instance_descriptors();
2177  oddball_map()->set_code_cache(empty_fixed_array());
2178  oddball_map()->init_prototype_transitions(undefined_value());
2179 
2180  // Fix prototype object for existing maps.
2181  meta_map()->set_prototype(null_value());
2182  meta_map()->set_constructor(null_value());
2183 
2184  fixed_array_map()->set_prototype(null_value());
2185  fixed_array_map()->set_constructor(null_value());
2186 
2187  oddball_map()->set_prototype(null_value());
2188  oddball_map()->set_constructor(null_value());
2189 
2190  { MaybeObject* maybe_obj =
2192  if (!maybe_obj->ToObject(&obj)) return false;
2193  }
2194  set_fixed_cow_array_map(Map::cast(obj));
2195  ASSERT(fixed_array_map() != fixed_cow_array_map());
2196 
2197  { MaybeObject* maybe_obj =
2199  if (!maybe_obj->ToObject(&obj)) return false;
2200  }
2201  set_scope_info_map(Map::cast(obj));
2202 
2203  { MaybeObject* maybe_obj = AllocateMap(HEAP_NUMBER_TYPE, HeapNumber::kSize);
2204  if (!maybe_obj->ToObject(&obj)) return false;
2205  }
2206  set_heap_number_map(Map::cast(obj));
2207 
2208  { MaybeObject* maybe_obj = AllocateMap(FOREIGN_TYPE, Foreign::kSize);
2209  if (!maybe_obj->ToObject(&obj)) return false;
2210  }
2211  set_foreign_map(Map::cast(obj));
2212 
2213  for (unsigned i = 0; i < ARRAY_SIZE(string_type_table); i++) {
2214  const StringTypeTable& entry = string_type_table[i];
2215  { MaybeObject* maybe_obj = AllocateMap(entry.type, entry.size);
2216  if (!maybe_obj->ToObject(&obj)) return false;
2217  }
2218  roots_[entry.index] = Map::cast(obj);
2219  }
2220 
2221  { MaybeObject* maybe_obj = AllocateMap(STRING_TYPE, kVariableSizeSentinel);
2222  if (!maybe_obj->ToObject(&obj)) return false;
2223  }
2224  set_undetectable_string_map(Map::cast(obj));
2226 
2227  { MaybeObject* maybe_obj =
2229  if (!maybe_obj->ToObject(&obj)) return false;
2230  }
2231  set_undetectable_ascii_string_map(Map::cast(obj));
2233 
2234  { MaybeObject* maybe_obj =
2236  if (!maybe_obj->ToObject(&obj)) return false;
2237  }
2238  set_fixed_double_array_map(Map::cast(obj));
2239 
2240  { MaybeObject* maybe_obj =
2242  if (!maybe_obj->ToObject(&obj)) return false;
2243  }
2244  set_byte_array_map(Map::cast(obj));
2245 
2246  { MaybeObject* maybe_obj =
2248  if (!maybe_obj->ToObject(&obj)) return false;
2249  }
2250  set_free_space_map(Map::cast(obj));
2251 
2252  { MaybeObject* maybe_obj = AllocateByteArray(0, TENURED);
2253  if (!maybe_obj->ToObject(&obj)) return false;
2254  }
2255  set_empty_byte_array(ByteArray::cast(obj));
2256 
2257  { MaybeObject* maybe_obj =
2259  if (!maybe_obj->ToObject(&obj)) return false;
2260  }
2261  set_external_pixel_array_map(Map::cast(obj));
2262 
2263  { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_BYTE_ARRAY_TYPE,
2265  if (!maybe_obj->ToObject(&obj)) return false;
2266  }
2267  set_external_byte_array_map(Map::cast(obj));
2268 
2269  { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE,
2271  if (!maybe_obj->ToObject(&obj)) return false;
2272  }
2273  set_external_unsigned_byte_array_map(Map::cast(obj));
2274 
2275  { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_SHORT_ARRAY_TYPE,
2277  if (!maybe_obj->ToObject(&obj)) return false;
2278  }
2279  set_external_short_array_map(Map::cast(obj));
2280 
2281  { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE,
2283  if (!maybe_obj->ToObject(&obj)) return false;
2284  }
2285  set_external_unsigned_short_array_map(Map::cast(obj));
2286 
2287  { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_INT_ARRAY_TYPE,
2289  if (!maybe_obj->ToObject(&obj)) return false;
2290  }
2291  set_external_int_array_map(Map::cast(obj));
2292 
2293  { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_UNSIGNED_INT_ARRAY_TYPE,
2295  if (!maybe_obj->ToObject(&obj)) return false;
2296  }
2297  set_external_unsigned_int_array_map(Map::cast(obj));
2298 
2299  { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_FLOAT_ARRAY_TYPE,
2301  if (!maybe_obj->ToObject(&obj)) return false;
2302  }
2303  set_external_float_array_map(Map::cast(obj));
2304 
2305  { MaybeObject* maybe_obj =
2307  if (!maybe_obj->ToObject(&obj)) return false;
2308  }
2309  set_non_strict_arguments_elements_map(Map::cast(obj));
2310 
2311  { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_DOUBLE_ARRAY_TYPE,
2313  if (!maybe_obj->ToObject(&obj)) return false;
2314  }
2315  set_external_double_array_map(Map::cast(obj));
2316 
2317  { MaybeObject* maybe_obj = AllocateMap(CODE_TYPE, kVariableSizeSentinel);
2318  if (!maybe_obj->ToObject(&obj)) return false;
2319  }
2320  set_code_map(Map::cast(obj));
2321 
2322  { MaybeObject* maybe_obj = AllocateMap(JS_GLOBAL_PROPERTY_CELL_TYPE,
2324  if (!maybe_obj->ToObject(&obj)) return false;
2325  }
2326  set_global_property_cell_map(Map::cast(obj));
2327 
2328  { MaybeObject* maybe_obj = AllocateMap(FILLER_TYPE, kPointerSize);
2329  if (!maybe_obj->ToObject(&obj)) return false;
2330  }
2331  set_one_pointer_filler_map(Map::cast(obj));
2332 
2333  { MaybeObject* maybe_obj = AllocateMap(FILLER_TYPE, 2 * kPointerSize);
2334  if (!maybe_obj->ToObject(&obj)) return false;
2335  }
2336  set_two_pointer_filler_map(Map::cast(obj));
2337 
2338  for (unsigned i = 0; i < ARRAY_SIZE(struct_table); i++) {
2339  const StructTable& entry = struct_table[i];
2340  { MaybeObject* maybe_obj = AllocateMap(entry.type, entry.size);
2341  if (!maybe_obj->ToObject(&obj)) return false;
2342  }
2343  roots_[entry.index] = Map::cast(obj);
2344  }
2345 
2346  { MaybeObject* maybe_obj =
2348  if (!maybe_obj->ToObject(&obj)) return false;
2349  }
2350  set_hash_table_map(Map::cast(obj));
2351 
2352  { MaybeObject* maybe_obj =
2354  if (!maybe_obj->ToObject(&obj)) return false;
2355  }
2356  set_function_context_map(Map::cast(obj));
2357 
2358  { MaybeObject* maybe_obj =
2360  if (!maybe_obj->ToObject(&obj)) return false;
2361  }
2362  set_catch_context_map(Map::cast(obj));
2363 
2364  { MaybeObject* maybe_obj =
2366  if (!maybe_obj->ToObject(&obj)) return false;
2367  }
2368  set_with_context_map(Map::cast(obj));
2369 
2370  { MaybeObject* maybe_obj =
2372  if (!maybe_obj->ToObject(&obj)) return false;
2373  }
2374  set_block_context_map(Map::cast(obj));
2375 
2376  { MaybeObject* maybe_obj =
2378  if (!maybe_obj->ToObject(&obj)) return false;
2379  }
2380  set_module_context_map(Map::cast(obj));
2381 
2382  { MaybeObject* maybe_obj =
2384  if (!maybe_obj->ToObject(&obj)) return false;
2385  }
2386  Map* global_context_map = Map::cast(obj);
2387  global_context_map->set_visitor_id(StaticVisitorBase::kVisitGlobalContext);
2388  set_global_context_map(global_context_map);
2389 
2390  { MaybeObject* maybe_obj = AllocateMap(SHARED_FUNCTION_INFO_TYPE,
2392  if (!maybe_obj->ToObject(&obj)) return false;
2393  }
2394  set_shared_function_info_map(Map::cast(obj));
2395 
2396  { MaybeObject* maybe_obj = AllocateMap(JS_MESSAGE_OBJECT_TYPE,
2398  if (!maybe_obj->ToObject(&obj)) return false;
2399  }
2400  set_message_object_map(Map::cast(obj));
2401 
2402  ASSERT(!InNewSpace(empty_fixed_array()));
2403  return true;
2404 }
2405 
2406 
2407 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) {
2408  // Statically ensure that it is safe to allocate heap numbers in paged
2409  // spaces.
2411  AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2412 
2413  Object* result;
2414  { MaybeObject* maybe_result =
2416  if (!maybe_result->ToObject(&result)) return maybe_result;
2417  }
2418 
2419  HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map());
2420  HeapNumber::cast(result)->set_value(value);
2421  return result;
2422 }
2423 
2424 
2425 MaybeObject* Heap::AllocateHeapNumber(double value) {
2426  // Use general version, if we're forced to always allocate.
2427  if (always_allocate()) return AllocateHeapNumber(value, TENURED);
2428 
2429  // This version of AllocateHeapNumber is optimized for
2430  // allocation in new space.
2432  ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
2433  Object* result;
2434  { MaybeObject* maybe_result = new_space_.AllocateRaw(HeapNumber::kSize);
2435  if (!maybe_result->ToObject(&result)) return maybe_result;
2436  }
2437  HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map());
2438  HeapNumber::cast(result)->set_value(value);
2439  return result;
2440 }
2441 
2442 
2444  Object* result;
2445  { MaybeObject* maybe_result = AllocateRawCell();
2446  if (!maybe_result->ToObject(&result)) return maybe_result;
2447  }
2449  global_property_cell_map());
2450  JSGlobalPropertyCell::cast(result)->set_value(value);
2451  return result;
2452 }
2453 
2454 
2455 MaybeObject* Heap::CreateOddball(const char* to_string,
2456  Object* to_number,
2457  byte kind) {
2458  Object* result;
2459  { MaybeObject* maybe_result = Allocate(oddball_map(), OLD_POINTER_SPACE);
2460  if (!maybe_result->ToObject(&result)) return maybe_result;
2461  }
2462  return Oddball::cast(result)->Initialize(to_string, to_number, kind);
2463 }
2464 
2465 
2467  Object* obj;
2468 
2469  { MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
2470  if (!maybe_obj->ToObject(&obj)) return false;
2471  }
2472  // Don't use Smi-only elements optimizations for objects with the neander
2473  // map. There are too many cases where element values are set directly with a
2474  // bottleneck to trap the Smi-only -> fast elements transition, and there
2475  // appears to be no benefit for optimize this case.
2476  Map* new_neander_map = Map::cast(obj);
2478  set_neander_map(new_neander_map);
2479 
2480  { MaybeObject* maybe_obj = AllocateJSObjectFromMap(neander_map());
2481  if (!maybe_obj->ToObject(&obj)) return false;
2482  }
2483  Object* elements;
2484  { MaybeObject* maybe_elements = AllocateFixedArray(2);
2485  if (!maybe_elements->ToObject(&elements)) return false;
2486  }
2487  FixedArray::cast(elements)->set(0, Smi::FromInt(0));
2488  JSObject::cast(obj)->set_elements(FixedArray::cast(elements));
2489  set_message_listeners(JSObject::cast(obj));
2490 
2491  return true;
2492 }
2493 
2494 
2495 void Heap::CreateJSEntryStub() {
2496  JSEntryStub stub;
2497  set_js_entry_code(*stub.GetCode());
2498 }
2499 
2500 
2501 void Heap::CreateJSConstructEntryStub() {
2502  JSConstructEntryStub stub;
2503  set_js_construct_entry_code(*stub.GetCode());
2504 }
2505 
2506 
2507 void Heap::CreateFixedStubs() {
2508  // Here we create roots for fixed stubs. They are needed at GC
2509  // for cooking and uncooking (check out frames.cc).
2510  // The eliminates the need for doing dictionary lookup in the
2511  // stub cache for these stubs.
2512  HandleScope scope;
2513  // gcc-4.4 has problem generating correct code of following snippet:
2514  // { JSEntryStub stub;
2515  // js_entry_code_ = *stub.GetCode();
2516  // }
2517  // { JSConstructEntryStub stub;
2518  // js_construct_entry_code_ = *stub.GetCode();
2519  // }
2520  // To workaround the problem, make separate functions without inlining.
2521  Heap::CreateJSEntryStub();
2522  Heap::CreateJSConstructEntryStub();
2523 
2524  // Create stubs that should be there, so we don't unexpectedly have to
2525  // create them if we need them during the creation of another stub.
2526  // Stub creation mixes raw pointers and handles in an unsafe manner so
2527  // we cannot create stubs while we are creating stubs.
2528  CodeStub::GenerateStubsAheadOfTime();
2529 }
2530 
2531 
2532 bool Heap::CreateInitialObjects() {
2533  Object* obj;
2534 
2535  // The -0 value must be set before NumberFromDouble works.
2536  { MaybeObject* maybe_obj = AllocateHeapNumber(-0.0, TENURED);
2537  if (!maybe_obj->ToObject(&obj)) return false;
2538  }
2539  set_minus_zero_value(HeapNumber::cast(obj));
2540  ASSERT(signbit(minus_zero_value()->Number()) != 0);
2541 
2542  { MaybeObject* maybe_obj = AllocateHeapNumber(OS::nan_value(), TENURED);
2543  if (!maybe_obj->ToObject(&obj)) return false;
2544  }
2545  set_nan_value(HeapNumber::cast(obj));
2546 
2547  { MaybeObject* maybe_obj = AllocateHeapNumber(V8_INFINITY, TENURED);
2548  if (!maybe_obj->ToObject(&obj)) return false;
2549  }
2550  set_infinity_value(HeapNumber::cast(obj));
2551 
2552  // The hole has not been created yet, but we want to put something
2553  // predictable in the gaps in the symbol table, so lets make that Smi zero.
2554  set_the_hole_value(reinterpret_cast<Oddball*>(Smi::FromInt(0)));
2555 
2556  // Allocate initial symbol table.
2557  { MaybeObject* maybe_obj = SymbolTable::Allocate(kInitialSymbolTableSize);
2558  if (!maybe_obj->ToObject(&obj)) return false;
2559  }
2560  // Don't use set_symbol_table() due to asserts.
2561  roots_[kSymbolTableRootIndex] = obj;
2562 
2563  // Finish initializing oddballs after creating symboltable.
2564  { MaybeObject* maybe_obj =
2565  undefined_value()->Initialize("undefined",
2566  nan_value(),
2568  if (!maybe_obj->ToObject(&obj)) return false;
2569  }
2570 
2571  // Initialize the null_value.
2572  { MaybeObject* maybe_obj =
2573  null_value()->Initialize("null", Smi::FromInt(0), Oddball::kNull);
2574  if (!maybe_obj->ToObject(&obj)) return false;
2575  }
2576 
2577  { MaybeObject* maybe_obj = CreateOddball("true",
2578  Smi::FromInt(1),
2579  Oddball::kTrue);
2580  if (!maybe_obj->ToObject(&obj)) return false;
2581  }
2582  set_true_value(Oddball::cast(obj));
2583 
2584  { MaybeObject* maybe_obj = CreateOddball("false",
2585  Smi::FromInt(0),
2586  Oddball::kFalse);
2587  if (!maybe_obj->ToObject(&obj)) return false;
2588  }
2589  set_false_value(Oddball::cast(obj));
2590 
2591  { MaybeObject* maybe_obj = CreateOddball("hole",
2592  Smi::FromInt(-1),
2594  if (!maybe_obj->ToObject(&obj)) return false;
2595  }
2596  set_the_hole_value(Oddball::cast(obj));
2597 
2598  { MaybeObject* maybe_obj = CreateOddball("arguments_marker",
2599  Smi::FromInt(-4),
2601  if (!maybe_obj->ToObject(&obj)) return false;
2602  }
2603  set_arguments_marker(Oddball::cast(obj));
2604 
2605  { MaybeObject* maybe_obj = CreateOddball("no_interceptor_result_sentinel",
2606  Smi::FromInt(-2),
2607  Oddball::kOther);
2608  if (!maybe_obj->ToObject(&obj)) return false;
2609  }
2610  set_no_interceptor_result_sentinel(obj);
2611 
2612  { MaybeObject* maybe_obj = CreateOddball("termination_exception",
2613  Smi::FromInt(-3),
2614  Oddball::kOther);
2615  if (!maybe_obj->ToObject(&obj)) return false;
2616  }
2617  set_termination_exception(obj);
2618 
2619  // Allocate the empty string.
2620  { MaybeObject* maybe_obj = AllocateRawAsciiString(0, TENURED);
2621  if (!maybe_obj->ToObject(&obj)) return false;
2622  }
2623  set_empty_string(String::cast(obj));
2624 
2625  for (unsigned i = 0; i < ARRAY_SIZE(constant_symbol_table); i++) {
2626  { MaybeObject* maybe_obj =
2627  LookupAsciiSymbol(constant_symbol_table[i].contents);
2628  if (!maybe_obj->ToObject(&obj)) return false;
2629  }
2630  roots_[constant_symbol_table[i].index] = String::cast(obj);
2631  }
2632 
2633  // Allocate the hidden symbol which is used to identify the hidden properties
2634  // in JSObjects. The hash code has a special value so that it will not match
2635  // the empty string when searching for the property. It cannot be part of the
2636  // loop above because it needs to be allocated manually with the special
2637  // hash code in place. The hash code for the hidden_symbol is zero to ensure
2638  // that it will always be at the first entry in property descriptors.
2639  { MaybeObject* maybe_obj =
2641  if (!maybe_obj->ToObject(&obj)) return false;
2642  }
2643  hidden_symbol_ = String::cast(obj);
2644 
2645  // Allocate the foreign for __proto__.
2646  { MaybeObject* maybe_obj =
2647  AllocateForeign((Address) &Accessors::ObjectPrototype);
2648  if (!maybe_obj->ToObject(&obj)) return false;
2649  }
2650  set_prototype_accessors(Foreign::cast(obj));
2651 
2652  // Allocate the code_stubs dictionary. The initial size is set to avoid
2653  // expanding the dictionary during bootstrapping.
2654  { MaybeObject* maybe_obj = UnseededNumberDictionary::Allocate(128);
2655  if (!maybe_obj->ToObject(&obj)) return false;
2656  }
2657  set_code_stubs(UnseededNumberDictionary::cast(obj));
2658 
2659 
2660  // Allocate the non_monomorphic_cache used in stub-cache.cc. The initial size
2661  // is set to avoid expanding the dictionary during bootstrapping.
2662  { MaybeObject* maybe_obj = UnseededNumberDictionary::Allocate(64);
2663  if (!maybe_obj->ToObject(&obj)) return false;
2664  }
2665  set_non_monomorphic_cache(UnseededNumberDictionary::cast(obj));
2666 
2667  { MaybeObject* maybe_obj = AllocatePolymorphicCodeCache();
2668  if (!maybe_obj->ToObject(&obj)) return false;
2669  }
2670  set_polymorphic_code_cache(PolymorphicCodeCache::cast(obj));
2671 
2672  set_instanceof_cache_function(Smi::FromInt(0));
2673  set_instanceof_cache_map(Smi::FromInt(0));
2674  set_instanceof_cache_answer(Smi::FromInt(0));
2675 
2676  CreateFixedStubs();
2677 
2678  // Allocate the dictionary of intrinsic function names.
2679  { MaybeObject* maybe_obj = StringDictionary::Allocate(Runtime::kNumFunctions);
2680  if (!maybe_obj->ToObject(&obj)) return false;
2681  }
2682  { MaybeObject* maybe_obj = Runtime::InitializeIntrinsicFunctionNames(this,
2683  obj);
2684  if (!maybe_obj->ToObject(&obj)) return false;
2685  }
2686  set_intrinsic_function_names(StringDictionary::cast(obj));
2687 
2688  { MaybeObject* maybe_obj = AllocateInitialNumberStringCache();
2689  if (!maybe_obj->ToObject(&obj)) return false;
2690  }
2691  set_number_string_cache(FixedArray::cast(obj));
2692 
2693  // Allocate cache for single character ASCII strings.
2694  { MaybeObject* maybe_obj =
2696  if (!maybe_obj->ToObject(&obj)) return false;
2697  }
2698  set_single_character_string_cache(FixedArray::cast(obj));
2699 
2700  // Allocate cache for string split.
2701  { MaybeObject* maybe_obj =
2703  if (!maybe_obj->ToObject(&obj)) return false;
2704  }
2705  set_string_split_cache(FixedArray::cast(obj));
2706 
2707  // Allocate cache for external strings pointing to native source code.
2708  { MaybeObject* maybe_obj = AllocateFixedArray(Natives::GetBuiltinsCount());
2709  if (!maybe_obj->ToObject(&obj)) return false;
2710  }
2711  set_natives_source_cache(FixedArray::cast(obj));
2712 
2713  // Handling of script id generation is in FACTORY->NewScript.
2714  set_last_script_id(undefined_value());
2715 
2716  // Initialize keyed lookup cache.
2717  isolate_->keyed_lookup_cache()->Clear();
2718 
2719  // Initialize context slot cache.
2720  isolate_->context_slot_cache()->Clear();
2721 
2722  // Initialize descriptor cache.
2723  isolate_->descriptor_lookup_cache()->Clear();
2724 
2725  // Initialize compilation cache.
2726  isolate_->compilation_cache()->Clear();
2727 
2728  return true;
2729 }
2730 
2731 
2733  FixedArray* cache, String* string, String* pattern) {
2734  if (!string->IsSymbol() || !pattern->IsSymbol()) return Smi::FromInt(0);
2735  uint32_t hash = string->Hash();
2736  uint32_t index = ((hash & (kStringSplitCacheSize - 1)) &
2737  ~(kArrayEntriesPerCacheEntry - 1));
2738  if (cache->get(index + kStringOffset) == string &&
2739  cache->get(index + kPatternOffset) == pattern) {
2740  return cache->get(index + kArrayOffset);
2741  }
2742  index = ((index + kArrayEntriesPerCacheEntry) & (kStringSplitCacheSize - 1));
2743  if (cache->get(index + kStringOffset) == string &&
2744  cache->get(index + kPatternOffset) == pattern) {
2745  return cache->get(index + kArrayOffset);
2746  }
2747  return Smi::FromInt(0);
2748 }
2749 
2750 
2752  FixedArray* cache,
2753  String* string,
2754  String* pattern,
2755  FixedArray* array) {
2756  if (!string->IsSymbol() || !pattern->IsSymbol()) return;
2757  uint32_t hash = string->Hash();
2758  uint32_t index = ((hash & (kStringSplitCacheSize - 1)) &
2759  ~(kArrayEntriesPerCacheEntry - 1));
2760  if (cache->get(index + kStringOffset) == Smi::FromInt(0)) {
2761  cache->set(index + kStringOffset, string);
2762  cache->set(index + kPatternOffset, pattern);
2763  cache->set(index + kArrayOffset, array);
2764  } else {
2765  uint32_t index2 =
2766  ((index + kArrayEntriesPerCacheEntry) & (kStringSplitCacheSize - 1));
2767  if (cache->get(index2 + kStringOffset) == Smi::FromInt(0)) {
2768  cache->set(index2 + kStringOffset, string);
2769  cache->set(index2 + kPatternOffset, pattern);
2770  cache->set(index2 + kArrayOffset, array);
2771  } else {
2772  cache->set(index2 + kStringOffset, Smi::FromInt(0));
2773  cache->set(index2 + kPatternOffset, Smi::FromInt(0));
2774  cache->set(index2 + kArrayOffset, Smi::FromInt(0));
2775  cache->set(index + kStringOffset, string);
2776  cache->set(index + kPatternOffset, pattern);
2777  cache->set(index + kArrayOffset, array);
2778  }
2779  }
2780  if (array->length() < 100) { // Limit how many new symbols we want to make.
2781  for (int i = 0; i < array->length(); i++) {
2782  String* str = String::cast(array->get(i));
2783  Object* symbol;
2784  MaybeObject* maybe_symbol = heap->LookupSymbol(str);
2785  if (maybe_symbol->ToObject(&symbol)) {
2786  array->set(i, symbol);
2787  }
2788  }
2789  }
2790  array->set_map_no_write_barrier(heap->fixed_cow_array_map());
2791 }
2792 
2793 
2795  for (int i = 0; i < kStringSplitCacheSize; i++) {
2796  cache->set(i, Smi::FromInt(0));
2797  }
2798 }
2799 
2800 
2801 MaybeObject* Heap::AllocateInitialNumberStringCache() {
2802  MaybeObject* maybe_obj =
2803  AllocateFixedArray(kInitialNumberStringCacheSize * 2, TENURED);
2804  return maybe_obj;
2805 }
2806 
2807 
2808 int Heap::FullSizeNumberStringCacheLength() {
2809  // Compute the size of the number string cache based on the max newspace size.
2810  // The number string cache has a minimum size based on twice the initial cache
2811  // size to ensure that it is bigger after being made 'full size'.
2812  int number_string_cache_size = max_semispace_size_ / 512;
2813  number_string_cache_size = Max(kInitialNumberStringCacheSize * 2,
2814  Min(0x4000, number_string_cache_size));
2815  // There is a string and a number per entry so the length is twice the number
2816  // of entries.
2817  return number_string_cache_size * 2;
2818 }
2819 
2820 
2821 void Heap::AllocateFullSizeNumberStringCache() {
2822  // The idea is to have a small number string cache in the snapshot to keep
2823  // boot-time memory usage down. If we expand the number string cache already
2824  // while creating the snapshot then that didn't work out.
2826  MaybeObject* maybe_obj =
2827  AllocateFixedArray(FullSizeNumberStringCacheLength(), TENURED);
2828  Object* new_cache;
2829  if (maybe_obj->ToObject(&new_cache)) {
2830  // We don't bother to repopulate the cache with entries from the old cache.
2831  // It will be repopulated soon enough with new strings.
2832  set_number_string_cache(FixedArray::cast(new_cache));
2833  }
2834  // If allocation fails then we just return without doing anything. It is only
2835  // a cache, so best effort is OK here.
2836 }
2837 
2838 
2839 void Heap::FlushNumberStringCache() {
2840  // Flush the number to string cache.
2841  int len = number_string_cache()->length();
2842  for (int i = 0; i < len; i++) {
2843  number_string_cache()->set_undefined(this, i);
2844  }
2845 }
2846 
2847 
2848 static inline int double_get_hash(double d) {
2849  DoubleRepresentation rep(d);
2850  return static_cast<int>(rep.bits) ^ static_cast<int>(rep.bits >> 32);
2851 }
2852 
2853 
2854 static inline int smi_get_hash(Smi* smi) {
2855  return smi->value();
2856 }
2857 
2858 
2860  int hash;
2861  int mask = (number_string_cache()->length() >> 1) - 1;
2862  if (number->IsSmi()) {
2863  hash = smi_get_hash(Smi::cast(number)) & mask;
2864  } else {
2865  hash = double_get_hash(number->Number()) & mask;
2866  }
2867  Object* key = number_string_cache()->get(hash * 2);
2868  if (key == number) {
2869  return String::cast(number_string_cache()->get(hash * 2 + 1));
2870  } else if (key->IsHeapNumber() &&
2871  number->IsHeapNumber() &&
2872  key->Number() == number->Number()) {
2873  return String::cast(number_string_cache()->get(hash * 2 + 1));
2874  }
2875  return undefined_value();
2876 }
2877 
2878 
2879 void Heap::SetNumberStringCache(Object* number, String* string) {
2880  int hash;
2881  int mask = (number_string_cache()->length() >> 1) - 1;
2882  if (number->IsSmi()) {
2883  hash = smi_get_hash(Smi::cast(number)) & mask;
2884  } else {
2885  hash = double_get_hash(number->Number()) & mask;
2886  }
2887  if (number_string_cache()->get(hash * 2) != undefined_value() &&
2888  number_string_cache()->length() != FullSizeNumberStringCacheLength()) {
2889  // The first time we have a hash collision, we move to the full sized
2890  // number string cache.
2891  AllocateFullSizeNumberStringCache();
2892  return;
2893  }
2894  number_string_cache()->set(hash * 2, number);
2895  number_string_cache()->set(hash * 2 + 1, string);
2896 }
2897 
2898 
2899 MaybeObject* Heap::NumberToString(Object* number,
2900  bool check_number_string_cache) {
2901  isolate_->counters()->number_to_string_runtime()->Increment();
2902  if (check_number_string_cache) {
2903  Object* cached = GetNumberStringCache(number);
2904  if (cached != undefined_value()) {
2905  return cached;
2906  }
2907  }
2908 
2909  char arr[100];
2910  Vector<char> buffer(arr, ARRAY_SIZE(arr));
2911  const char* str;
2912  if (number->IsSmi()) {
2913  int num = Smi::cast(number)->value();
2914  str = IntToCString(num, buffer);
2915  } else {
2916  double num = HeapNumber::cast(number)->value();
2917  str = DoubleToCString(num, buffer);
2918  }
2919 
2920  Object* js_string;
2921  MaybeObject* maybe_js_string = AllocateStringFromAscii(CStrVector(str));
2922  if (maybe_js_string->ToObject(&js_string)) {
2923  SetNumberStringCache(number, String::cast(js_string));
2924  }
2925  return maybe_js_string;
2926 }
2927 
2928 
2929 MaybeObject* Heap::Uint32ToString(uint32_t value,
2930  bool check_number_string_cache) {
2931  Object* number;
2932  MaybeObject* maybe = NumberFromUint32(value);
2933  if (!maybe->To<Object>(&number)) return maybe;
2934  return NumberToString(number, check_number_string_cache);
2935 }
2936 
2937 
2939  return Map::cast(roots_[RootIndexForExternalArrayType(array_type)]);
2940 }
2941 
2942 
2944  ExternalArrayType array_type) {
2945  switch (array_type) {
2946  case kExternalByteArray:
2947  return kExternalByteArrayMapRootIndex;
2949  return kExternalUnsignedByteArrayMapRootIndex;
2950  case kExternalShortArray:
2951  return kExternalShortArrayMapRootIndex;
2953  return kExternalUnsignedShortArrayMapRootIndex;
2954  case kExternalIntArray:
2955  return kExternalIntArrayMapRootIndex;
2957  return kExternalUnsignedIntArrayMapRootIndex;
2958  case kExternalFloatArray:
2959  return kExternalFloatArrayMapRootIndex;
2960  case kExternalDoubleArray:
2961  return kExternalDoubleArrayMapRootIndex;
2962  case kExternalPixelArray:
2963  return kExternalPixelArrayMapRootIndex;
2964  default:
2965  UNREACHABLE();
2966  return kUndefinedValueRootIndex;
2967  }
2968 }
2969 
2970 
2971 MaybeObject* Heap::NumberFromDouble(double value, PretenureFlag pretenure) {
2972  // We need to distinguish the minus zero value and this cannot be
2973  // done after conversion to int. Doing this by comparing bit
2974  // patterns is faster than using fpclassify() et al.
2975  static const DoubleRepresentation minus_zero(-0.0);
2976 
2977  DoubleRepresentation rep(value);
2978  if (rep.bits == minus_zero.bits) {
2979  return AllocateHeapNumber(-0.0, pretenure);
2980  }
2981 
2982  int int_value = FastD2I(value);
2983  if (value == int_value && Smi::IsValid(int_value)) {
2984  return Smi::FromInt(int_value);
2985  }
2986 
2987  // Materialize the value in the heap.
2988  return AllocateHeapNumber(value, pretenure);
2989 }
2990 
2991 
2992 MaybeObject* Heap::AllocateForeign(Address address, PretenureFlag pretenure) {
2993  // Statically ensure that it is safe to allocate foreigns in paged spaces.
2995  AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2996  Foreign* result;
2997  MaybeObject* maybe_result = Allocate(foreign_map(), space);
2998  if (!maybe_result->To(&result)) return maybe_result;
2999  result->set_foreign_address(address);
3000  return result;
3001 }
3002 
3003 
3005  SharedFunctionInfo* share;
3006  MaybeObject* maybe = Allocate(shared_function_info_map(), OLD_POINTER_SPACE);
3007  if (!maybe->To<SharedFunctionInfo>(&share)) return maybe;
3008 
3009  // Set pointer fields.
3010  share->set_name(name);
3011  Code* illegal = isolate_->builtins()->builtin(Builtins::kIllegal);
3012  share->set_code(illegal);
3013  share->set_scope_info(ScopeInfo::Empty());
3014  Code* construct_stub =
3015  isolate_->builtins()->builtin(Builtins::kJSConstructStubGeneric);
3016  share->set_construct_stub(construct_stub);
3017  share->set_instance_class_name(Object_symbol());
3018  share->set_function_data(undefined_value(), SKIP_WRITE_BARRIER);
3019  share->set_script(undefined_value(), SKIP_WRITE_BARRIER);
3020  share->set_debug_info(undefined_value(), SKIP_WRITE_BARRIER);
3021  share->set_inferred_name(empty_string(), SKIP_WRITE_BARRIER);
3022  share->set_initial_map(undefined_value(), SKIP_WRITE_BARRIER);
3023  share->set_this_property_assignments(undefined_value(), SKIP_WRITE_BARRIER);
3024  share->set_ast_node_count(0);
3025  share->set_stress_deopt_counter(FLAG_deopt_every_n_times);
3026  share->set_counters(0);
3027 
3028  // Set integer fields (smi or int, depending on the architecture).
3029  share->set_length(0);
3030  share->set_formal_parameter_count(0);
3031  share->set_expected_nof_properties(0);
3032  share->set_num_literals(0);
3033  share->set_start_position_and_type(0);
3034  share->set_end_position(0);
3035  share->set_function_token_position(0);
3036  // All compiler hints default to false or 0.
3037  share->set_compiler_hints(0);
3039  share->set_opt_count(0);
3040 
3041  return share;
3042 }
3043 
3044 
3046  JSArray* arguments,
3047  int start_position,
3048  int end_position,
3049  Object* script,
3050  Object* stack_trace,
3051  Object* stack_frames) {
3052  Object* result;
3053  { MaybeObject* maybe_result = Allocate(message_object_map(), NEW_SPACE);
3054  if (!maybe_result->ToObject(&result)) return maybe_result;
3055  }
3056  JSMessageObject* message = JSMessageObject::cast(result);
3057  message->set_properties(Heap::empty_fixed_array(), SKIP_WRITE_BARRIER);
3058  message->initialize_elements();
3059  message->set_elements(Heap::empty_fixed_array(), SKIP_WRITE_BARRIER);
3060  message->set_type(type);
3061  message->set_arguments(arguments);
3062  message->set_start_position(start_position);
3063  message->set_end_position(end_position);
3064  message->set_script(script);
3065  message->set_stack_trace(stack_trace);
3066  message->set_stack_frames(stack_frames);
3067  return result;
3068 }
3069 
3070 
3071 
3072 // Returns true for a character in a range. Both limits are inclusive.
3073 static inline bool Between(uint32_t character, uint32_t from, uint32_t to) {
3074  // This makes uses of the the unsigned wraparound.
3075  return character - from <= to - from;
3076 }
3077 
3078 
3079 MUST_USE_RESULT static inline MaybeObject* MakeOrFindTwoCharacterString(
3080  Heap* heap,
3081  uint32_t c1,
3082  uint32_t c2) {
3083  String* symbol;
3084  // Numeric strings have a different hash algorithm not known by
3085  // LookupTwoCharsSymbolIfExists, so we skip this step for such strings.
3086  if ((!Between(c1, '0', '9') || !Between(c2, '0', '9')) &&
3087  heap->symbol_table()->LookupTwoCharsSymbolIfExists(c1, c2, &symbol)) {
3088  return symbol;
3089  // Now we know the length is 2, we might as well make use of that fact
3090  // when building the new string.
3091  } else if ((c1 | c2) <= String::kMaxAsciiCharCodeU) { // We can do this
3092  ASSERT(IsPowerOf2(String::kMaxAsciiCharCodeU + 1)); // because of this.
3093  Object* result;
3094  { MaybeObject* maybe_result = heap->AllocateRawAsciiString(2);
3095  if (!maybe_result->ToObject(&result)) return maybe_result;
3096  }
3097  char* dest = SeqAsciiString::cast(result)->GetChars();
3098  dest[0] = c1;
3099  dest[1] = c2;
3100  return result;
3101  } else {
3102  Object* result;
3103  { MaybeObject* maybe_result = heap->AllocateRawTwoByteString(2);
3104  if (!maybe_result->ToObject(&result)) return maybe_result;
3105  }
3106  uc16* dest = SeqTwoByteString::cast(result)->GetChars();
3107  dest[0] = c1;
3108  dest[1] = c2;
3109  return result;
3110  }
3111 }
3112 
3113 
3114 MaybeObject* Heap::AllocateConsString(String* first, String* second) {
3115  int first_length = first->length();
3116  if (first_length == 0) {
3117  return second;
3118  }
3119 
3120  int second_length = second->length();
3121  if (second_length == 0) {
3122  return first;
3123  }
3124 
3125  int length = first_length + second_length;
3126 
3127  // Optimization for 2-byte strings often used as keys in a decompression
3128  // dictionary. Check whether we already have the string in the symbol
3129  // table to prevent creation of many unneccesary strings.
3130  if (length == 2) {
3131  unsigned c1 = first->Get(0);
3132  unsigned c2 = second->Get(0);
3133  return MakeOrFindTwoCharacterString(this, c1, c2);
3134  }
3135 
3136  bool first_is_ascii = first->IsAsciiRepresentation();
3137  bool second_is_ascii = second->IsAsciiRepresentation();
3138  bool is_ascii = first_is_ascii && second_is_ascii;
3139 
3140  // Make sure that an out of memory exception is thrown if the length
3141  // of the new cons string is too large.
3142  if (length > String::kMaxLength || length < 0) {
3145  }
3146 
3147  bool is_ascii_data_in_two_byte_string = false;
3148  if (!is_ascii) {
3149  // At least one of the strings uses two-byte representation so we
3150  // can't use the fast case code for short ASCII strings below, but
3151  // we can try to save memory if all chars actually fit in ASCII.
3152  is_ascii_data_in_two_byte_string =
3153  first->HasOnlyAsciiChars() && second->HasOnlyAsciiChars();
3154  if (is_ascii_data_in_two_byte_string) {
3155  isolate_->counters()->string_add_runtime_ext_to_ascii()->Increment();
3156  }
3157  }
3158 
3159  // If the resulting string is small make a flat string.
3160  if (length < ConsString::kMinLength) {
3161  // Note that neither of the two inputs can be a slice because:
3163  ASSERT(first->IsFlat());
3164  ASSERT(second->IsFlat());
3165  if (is_ascii) {
3166  Object* result;
3167  { MaybeObject* maybe_result = AllocateRawAsciiString(length);
3168  if (!maybe_result->ToObject(&result)) return maybe_result;
3169  }
3170  // Copy the characters into the new object.
3171  char* dest = SeqAsciiString::cast(result)->GetChars();
3172  // Copy first part.
3173  const char* src;
3174  if (first->IsExternalString()) {
3175  src = ExternalAsciiString::cast(first)->GetChars();
3176  } else {
3177  src = SeqAsciiString::cast(first)->GetChars();
3178  }
3179  for (int i = 0; i < first_length; i++) *dest++ = src[i];
3180  // Copy second part.
3181  if (second->IsExternalString()) {
3182  src = ExternalAsciiString::cast(second)->GetChars();
3183  } else {
3184  src = SeqAsciiString::cast(second)->GetChars();
3185  }
3186  for (int i = 0; i < second_length; i++) *dest++ = src[i];
3187  return result;
3188  } else {
3189  if (is_ascii_data_in_two_byte_string) {
3190  Object* result;
3191  { MaybeObject* maybe_result = AllocateRawAsciiString(length);
3192  if (!maybe_result->ToObject(&result)) return maybe_result;
3193  }
3194  // Copy the characters into the new object.
3195  char* dest = SeqAsciiString::cast(result)->GetChars();
3196  String::WriteToFlat(first, dest, 0, first_length);
3197  String::WriteToFlat(second, dest + first_length, 0, second_length);
3198  isolate_->counters()->string_add_runtime_ext_to_ascii()->Increment();
3199  return result;
3200  }
3201 
3202  Object* result;
3203  { MaybeObject* maybe_result = AllocateRawTwoByteString(length);
3204  if (!maybe_result->ToObject(&result)) return maybe_result;
3205  }
3206  // Copy the characters into the new object.
3207  uc16* dest = SeqTwoByteString::cast(result)->GetChars();
3208  String::WriteToFlat(first, dest, 0, first_length);
3209  String::WriteToFlat(second, dest + first_length, 0, second_length);
3210  return result;
3211  }
3212  }
3213 
3214  Map* map = (is_ascii || is_ascii_data_in_two_byte_string) ?
3215  cons_ascii_string_map() : cons_string_map();
3216 
3217  Object* result;
3218  { MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
3219  if (!maybe_result->ToObject(&result)) return maybe_result;
3220  }
3221 
3222  AssertNoAllocation no_gc;
3223  ConsString* cons_string = ConsString::cast(result);
3224  WriteBarrierMode mode = cons_string->GetWriteBarrierMode(no_gc);
3225  cons_string->set_length(length);
3227  cons_string->set_first(first, mode);
3228  cons_string->set_second(second, mode);
3229  return result;
3230 }
3231 
3232 
3233 MaybeObject* Heap::AllocateSubString(String* buffer,
3234  int start,
3235  int end,
3236  PretenureFlag pretenure) {
3237  int length = end - start;
3238  if (length <= 0) {
3239  return empty_string();
3240  } else if (length == 1) {
3241  return LookupSingleCharacterStringFromCode(buffer->Get(start));
3242  } else if (length == 2) {
3243  // Optimization for 2-byte strings often used as keys in a decompression
3244  // dictionary. Check whether we already have the string in the symbol
3245  // table to prevent creation of many unneccesary strings.
3246  unsigned c1 = buffer->Get(start);
3247  unsigned c2 = buffer->Get(start + 1);
3248  return MakeOrFindTwoCharacterString(this, c1, c2);
3249  }
3250 
3251  // Make an attempt to flatten the buffer to reduce access time.
3252  buffer = buffer->TryFlattenGetString();
3253 
3254  if (!FLAG_string_slices ||
3255  !buffer->IsFlat() ||
3256  length < SlicedString::kMinLength ||
3257  pretenure == TENURED) {
3258  Object* result;
3259  // WriteToFlat takes care of the case when an indirect string has a
3260  // different encoding from its underlying string. These encodings may
3261  // differ because of externalization.
3262  bool is_ascii = buffer->IsAsciiRepresentation();
3263  { MaybeObject* maybe_result = is_ascii
3264  ? AllocateRawAsciiString(length, pretenure)
3265  : AllocateRawTwoByteString(length, pretenure);
3266  if (!maybe_result->ToObject(&result)) return maybe_result;
3267  }
3268  String* string_result = String::cast(result);
3269  // Copy the characters into the new object.
3270  if (is_ascii) {
3271  ASSERT(string_result->IsAsciiRepresentation());
3272  char* dest = SeqAsciiString::cast(string_result)->GetChars();
3273  String::WriteToFlat(buffer, dest, start, end);
3274  } else {
3275  ASSERT(string_result->IsTwoByteRepresentation());
3276  uc16* dest = SeqTwoByteString::cast(string_result)->GetChars();
3277  String::WriteToFlat(buffer, dest, start, end);
3278  }
3279  return result;
3280  }
3281 
3282  ASSERT(buffer->IsFlat());
3283 #if DEBUG
3284  if (FLAG_verify_heap) {
3285  buffer->StringVerify();
3286  }
3287 #endif
3288 
3289  Object* result;
3290  // When slicing an indirect string we use its encoding for a newly created
3291  // slice and don't check the encoding of the underlying string. This is safe
3292  // even if the encodings are different because of externalization. If an
3293  // indirect ASCII string is pointing to a two-byte string, the two-byte char
3294  // codes of the underlying string must still fit into ASCII (because
3295  // externalization must not change char codes).
3296  { Map* map = buffer->IsAsciiRepresentation()
3297  ? sliced_ascii_string_map()
3298  : sliced_string_map();
3299  MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
3300  if (!maybe_result->ToObject(&result)) return maybe_result;
3301  }
3302 
3303  AssertNoAllocation no_gc;
3304  SlicedString* sliced_string = SlicedString::cast(result);
3305  sliced_string->set_length(length);
3306  sliced_string->set_hash_field(String::kEmptyHashField);
3307  if (buffer->IsConsString()) {
3308  ConsString* cons = ConsString::cast(buffer);
3309  ASSERT(cons->second()->length() == 0);
3310  sliced_string->set_parent(cons->first());
3311  sliced_string->set_offset(start);
3312  } else if (buffer->IsSlicedString()) {
3313  // Prevent nesting sliced strings.
3314  SlicedString* parent_slice = SlicedString::cast(buffer);
3315  sliced_string->set_parent(parent_slice->parent());
3316  sliced_string->set_offset(start + parent_slice->offset());
3317  } else {
3318  sliced_string->set_parent(buffer);
3319  sliced_string->set_offset(start);
3320  }
3321  ASSERT(sliced_string->parent()->IsSeqString() ||
3322  sliced_string->parent()->IsExternalString());
3323  return result;
3324 }
3325 
3326 
3328  const ExternalAsciiString::Resource* resource) {
3329  size_t length = resource->length();
3330  if (length > static_cast<size_t>(String::kMaxLength)) {
3333  }
3334 
3335  ASSERT(String::IsAscii(resource->data(), static_cast<int>(length)));
3336 
3337  Map* map = external_ascii_string_map();
3338  Object* result;
3339  { MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
3340  if (!maybe_result->ToObject(&result)) return maybe_result;
3341  }
3342 
3343  ExternalAsciiString* external_string = ExternalAsciiString::cast(result);
3344  external_string->set_length(static_cast<int>(length));
3345  external_string->set_hash_field(String::kEmptyHashField);
3346  external_string->set_resource(resource);
3347 
3348  return result;
3349 }
3350 
3351 
3353  const ExternalTwoByteString::Resource* resource) {
3354  size_t length = resource->length();
3355  if (length > static_cast<size_t>(String::kMaxLength)) {
3358  }
3359 
3360  // For small strings we check whether the resource contains only
3361  // ASCII characters. If yes, we use a different string map.
3362  static const size_t kAsciiCheckLengthLimit = 32;
3363  bool is_ascii = length <= kAsciiCheckLengthLimit &&
3364  String::IsAscii(resource->data(), static_cast<int>(length));
3365  Map* map = is_ascii ?
3366  external_string_with_ascii_data_map() : external_string_map();
3367  Object* result;
3368  { MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
3369  if (!maybe_result->ToObject(&result)) return maybe_result;
3370  }
3371 
3372  ExternalTwoByteString* external_string = ExternalTwoByteString::cast(result);
3373  external_string->set_length(static_cast<int>(length));
3374  external_string->set_hash_field(String::kEmptyHashField);
3375  external_string->set_resource(resource);
3376 
3377  return result;
3378 }
3379 
3380 
3382  if (code <= String::kMaxAsciiCharCode) {
3383  Object* value = single_character_string_cache()->get(code);
3384  if (value != undefined_value()) return value;
3385 
3386  char buffer[1];
3387  buffer[0] = static_cast<char>(code);
3388  Object* result;
3389  MaybeObject* maybe_result = LookupSymbol(Vector<const char>(buffer, 1));
3390 
3391  if (!maybe_result->ToObject(&result)) return maybe_result;
3392  single_character_string_cache()->set(code, result);
3393  return result;
3394  }
3395 
3396  Object* result;
3397  { MaybeObject* maybe_result = AllocateRawTwoByteString(1);
3398  if (!maybe_result->ToObject(&result)) return maybe_result;
3399  }
3400  String* answer = String::cast(result);
3401  answer->Set(0, code);
3402  return answer;
3403 }
3404 
3405 
3406 MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
3407  if (length < 0 || length > ByteArray::kMaxLength) {
3409  }
3410  if (pretenure == NOT_TENURED) {
3411  return AllocateByteArray(length);
3412  }
3413  int size = ByteArray::SizeFor(length);
3414  Object* result;
3415  { MaybeObject* maybe_result = (size <= Page::kMaxNonCodeHeapObjectSize)
3416  ? old_data_space_->AllocateRaw(size)
3417  : lo_space_->AllocateRaw(size, NOT_EXECUTABLE);
3418  if (!maybe_result->ToObject(&result)) return maybe_result;
3419  }
3420 
3421  reinterpret_cast<ByteArray*>(result)->set_map_no_write_barrier(
3422  byte_array_map());
3423  reinterpret_cast<ByteArray*>(result)->set_length(length);
3424  return result;
3425 }
3426 
3427 
3428 MaybeObject* Heap::AllocateByteArray(int length) {
3429  if (length < 0 || length > ByteArray::kMaxLength) {
3431  }
3432  int size = ByteArray::SizeFor(length);
3433  AllocationSpace space =
3435  Object* result;
3436  { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
3437  if (!maybe_result->ToObject(&result)) return maybe_result;
3438  }
3439 
3440  reinterpret_cast<ByteArray*>(result)->set_map_no_write_barrier(
3441  byte_array_map());
3442  reinterpret_cast<ByteArray*>(result)->set_length(length);
3443  return result;
3444 }
3445 
3446 
3447 void Heap::CreateFillerObjectAt(Address addr, int size) {
3448  if (size == 0) return;
3449  HeapObject* filler = HeapObject::FromAddress(addr);
3450  if (size == kPointerSize) {
3451  filler->set_map_no_write_barrier(one_pointer_filler_map());
3452  } else if (size == 2 * kPointerSize) {
3453  filler->set_map_no_write_barrier(two_pointer_filler_map());
3454  } else {
3455  filler->set_map_no_write_barrier(free_space_map());
3456  FreeSpace::cast(filler)->set_size(size);
3457  }
3458 }
3459 
3460 
3461 MaybeObject* Heap::AllocateExternalArray(int length,
3462  ExternalArrayType array_type,
3463  void* external_pointer,
3464  PretenureFlag pretenure) {
3465  AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
3466  Object* result;
3467  { MaybeObject* maybe_result = AllocateRaw(ExternalArray::kAlignedSize,
3468  space,
3469  OLD_DATA_SPACE);
3470  if (!maybe_result->ToObject(&result)) return maybe_result;
3471  }
3472 
3473  reinterpret_cast<ExternalArray*>(result)->set_map_no_write_barrier(
3474  MapForExternalArrayType(array_type));
3475  reinterpret_cast<ExternalArray*>(result)->set_length(length);
3476  reinterpret_cast<ExternalArray*>(result)->set_external_pointer(
3477  external_pointer);
3478 
3479  return result;
3480 }
3481 
3482 
3483 MaybeObject* Heap::CreateCode(const CodeDesc& desc,
3485  Handle<Object> self_reference,
3486  bool immovable) {
3487  // Allocate ByteArray before the Code object, so that we do not risk
3488  // leaving uninitialized Code object (and breaking the heap).
3489  ByteArray* reloc_info;
3490  MaybeObject* maybe_reloc_info = AllocateByteArray(desc.reloc_size, TENURED);
3491  if (!maybe_reloc_info->To(&reloc_info)) return maybe_reloc_info;
3492 
3493  // Compute size.
3494  int body_size = RoundUp(desc.instr_size, kObjectAlignment);
3495  int obj_size = Code::SizeFor(body_size);
3496  ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment));
3497  MaybeObject* maybe_result;
3498  // Large code objects and code objects which should stay at a fixed address
3499  // are allocated in large object space.
3500  if (obj_size > code_space()->AreaSize() || immovable) {
3501  maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE);
3502  } else {
3503  maybe_result = code_space_->AllocateRaw(obj_size);
3504  }
3505 
3506  Object* result;
3507  if (!maybe_result->ToObject(&result)) return maybe_result;
3508 
3509  // Initialize the object
3510  HeapObject::cast(result)->set_map_no_write_barrier(code_map());
3511  Code* code = Code::cast(result);
3512  ASSERT(!isolate_->code_range()->exists() ||
3513  isolate_->code_range()->contains(code->address()));
3514  code->set_instruction_size(desc.instr_size);
3515  code->set_relocation_info(reloc_info);
3516  code->set_flags(flags);
3517  if (code->is_call_stub() || code->is_keyed_call_stub()) {
3519  }
3520  code->set_deoptimization_data(empty_fixed_array(), SKIP_WRITE_BARRIER);
3521  code->set_type_feedback_info(undefined_value(), SKIP_WRITE_BARRIER);
3522  code->set_handler_table(empty_fixed_array(), SKIP_WRITE_BARRIER);
3523  code->set_gc_metadata(Smi::FromInt(0));
3524  code->set_ic_age(global_ic_age_);
3525  // Allow self references to created code object by patching the handle to
3526  // point to the newly allocated Code object.
3527  if (!self_reference.is_null()) {
3528  *(self_reference.location()) = code;
3529  }
3530  // Migrate generated code.
3531  // The generated code can contain Object** values (typically from handles)
3532  // that are dereferenced during the copy to point directly to the actual heap
3533  // objects. These pointers can include references to the code object itself,
3534  // through the self_reference parameter.
3535  code->CopyFrom(desc);
3536 
3537 #ifdef DEBUG
3538  if (FLAG_verify_heap) {
3539  code->Verify();
3540  }
3541 #endif
3542  return code;
3543 }
3544 
3545 
3546 MaybeObject* Heap::CopyCode(Code* code) {
3547  // Allocate an object the same size as the code object.
3548  int obj_size = code->Size();
3549  MaybeObject* maybe_result;
3550  if (obj_size > code_space()->AreaSize()) {
3551  maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE);
3552  } else {
3553  maybe_result = code_space_->AllocateRaw(obj_size);
3554  }
3555 
3556  Object* result;
3557  if (!maybe_result->ToObject(&result)) return maybe_result;
3558 
3559  // Copy code object.
3560  Address old_addr = code->address();
3561  Address new_addr = reinterpret_cast<HeapObject*>(result)->address();
3562  CopyBlock(new_addr, old_addr, obj_size);
3563  // Relocate the copy.
3564  Code* new_code = Code::cast(result);
3565  ASSERT(!isolate_->code_range()->exists() ||
3566  isolate_->code_range()->contains(code->address()));
3567  new_code->Relocate(new_addr - old_addr);
3568  return new_code;
3569 }
3570 
3571 
3572 MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
3573  // Allocate ByteArray before the Code object, so that we do not risk
3574  // leaving uninitialized Code object (and breaking the heap).
3575  Object* reloc_info_array;
3576  { MaybeObject* maybe_reloc_info_array =
3577  AllocateByteArray(reloc_info.length(), TENURED);
3578  if (!maybe_reloc_info_array->ToObject(&reloc_info_array)) {
3579  return maybe_reloc_info_array;
3580  }
3581  }
3582 
3583  int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment);
3584 
3585  int new_obj_size = Code::SizeFor(new_body_size);
3586 
3587  Address old_addr = code->address();
3588 
3589  size_t relocation_offset =
3590  static_cast<size_t>(code->instruction_end() - old_addr);
3591 
3592  MaybeObject* maybe_result;
3593  if (new_obj_size > code_space()->AreaSize()) {
3594  maybe_result = lo_space_->AllocateRaw(new_obj_size, EXECUTABLE);
3595  } else {
3596  maybe_result = code_space_->AllocateRaw(new_obj_size);
3597  }
3598 
3599  Object* result;
3600  if (!maybe_result->ToObject(&result)) return maybe_result;
3601 
3602  // Copy code object.
3603  Address new_addr = reinterpret_cast<HeapObject*>(result)->address();
3604 
3605  // Copy header and instructions.
3606  memcpy(new_addr, old_addr, relocation_offset);
3607 
3608  Code* new_code = Code::cast(result);
3609  new_code->set_relocation_info(ByteArray::cast(reloc_info_array));
3610 
3611  // Copy patched rinfo.
3612  memcpy(new_code->relocation_start(), reloc_info.start(), reloc_info.length());
3613 
3614  // Relocate the copy.
3615  ASSERT(!isolate_->code_range()->exists() ||
3616  isolate_->code_range()->contains(code->address()));
3617  new_code->Relocate(new_addr - old_addr);
3618 
3619 #ifdef DEBUG
3620  if (FLAG_verify_heap) {
3621  code->Verify();
3622  }
3623 #endif
3624  return new_code;
3625 }
3626 
3627 
3628 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) {
3629  ASSERT(gc_state_ == NOT_IN_GC);
3630  ASSERT(map->instance_type() != MAP_TYPE);
3631  // If allocation failures are disallowed, we may allocate in a different
3632  // space when new space is full and the object is not a large object.
3633  AllocationSpace retry_space =
3634  (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type());
3635  Object* result;
3636  { MaybeObject* maybe_result =
3637  AllocateRaw(map->instance_size(), space, retry_space);
3638  if (!maybe_result->ToObject(&result)) return maybe_result;
3639  }
3640  // No need for write barrier since object is white and map is in old space.
3642  return result;
3643 }
3644 
3645 
3646 void Heap::InitializeFunction(JSFunction* function,
3647  SharedFunctionInfo* shared,
3648  Object* prototype) {
3649  ASSERT(!prototype->IsMap());
3650  function->initialize_properties();
3651  function->initialize_elements();
3652  function->set_shared(shared);
3653  function->set_code(shared->code());
3654  function->set_prototype_or_initial_map(prototype);
3655  function->set_context(undefined_value());
3656  function->set_literals_or_bindings(empty_fixed_array());
3657  function->set_next_function_link(undefined_value());
3658 }
3659 
3660 
3662  // Allocate the prototype. Make sure to use the object function
3663  // from the function's context, since the function can be from a
3664  // different context.
3665  JSFunction* object_function =
3666  function->context()->global_context()->object_function();
3667 
3668  // Each function prototype gets a copy of the object function map.
3669  // This avoid unwanted sharing of maps between prototypes of different
3670  // constructors.
3671  Map* new_map;
3672  ASSERT(object_function->has_initial_map());
3673  { MaybeObject* maybe_map =
3674  object_function->initial_map()->CopyDropTransitions(
3676  if (!maybe_map->To<Map>(&new_map)) return maybe_map;
3677  }
3678  Object* prototype;
3679  { MaybeObject* maybe_prototype = AllocateJSObjectFromMap(new_map);
3680  if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype;
3681  }
3682  // When creating the prototype for the function we must set its
3683  // constructor to the function.
3684  Object* result;
3685  { MaybeObject* maybe_result =
3687  constructor_symbol(), function, DONT_ENUM);
3688  if (!maybe_result->ToObject(&result)) return maybe_result;
3689  }
3690  return prototype;
3691 }
3692 
3693 
3694 MaybeObject* Heap::AllocateFunction(Map* function_map,
3695  SharedFunctionInfo* shared,
3696  Object* prototype,
3697  PretenureFlag pretenure) {
3698  AllocationSpace space =
3699  (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
3700  Object* result;
3701  { MaybeObject* maybe_result = Allocate(function_map, space);
3702  if (!maybe_result->ToObject(&result)) return maybe_result;
3703  }
3704  InitializeFunction(JSFunction::cast(result), shared, prototype);
3705  return result;
3706 }
3707 
3708 
3709 MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) {
3710  // To get fast allocation and map sharing for arguments objects we
3711  // allocate them based on an arguments boilerplate.
3712 
3713  JSObject* boilerplate;
3714  int arguments_object_size;
3715  bool strict_mode_callee = callee->IsJSFunction() &&
3716  !JSFunction::cast(callee)->shared()->is_classic_mode();
3717  if (strict_mode_callee) {
3718  boilerplate =
3719  isolate()->context()->global_context()->
3720  strict_mode_arguments_boilerplate();
3721  arguments_object_size = kArgumentsObjectSizeStrict;
3722  } else {
3723  boilerplate =
3724  isolate()->context()->global_context()->arguments_boilerplate();
3725  arguments_object_size = kArgumentsObjectSize;
3726  }
3727 
3728  // This calls Copy directly rather than using Heap::AllocateRaw so we
3729  // duplicate the check here.
3730  ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
3731 
3732  // Check that the size of the boilerplate matches our
3733  // expectations. The ArgumentsAccessStub::GenerateNewObject relies
3734  // on the size being a known constant.
3735  ASSERT(arguments_object_size == boilerplate->map()->instance_size());
3736 
3737  // Do the allocation.
3738  Object* result;
3739  { MaybeObject* maybe_result =
3740  AllocateRaw(arguments_object_size, NEW_SPACE, OLD_POINTER_SPACE);
3741  if (!maybe_result->ToObject(&result)) return maybe_result;
3742  }
3743 
3744  // Copy the content. The arguments boilerplate doesn't have any
3745  // fields that point to new space so it's safe to skip the write
3746  // barrier here.
3747  CopyBlock(HeapObject::cast(result)->address(),
3748  boilerplate->address(),
3750 
3751  // Set the length property.
3753  Smi::FromInt(length),
3755  // Set the callee property for non-strict mode arguments object only.
3756  if (!strict_mode_callee) {
3758  callee);
3759  }
3760 
3761  // Check the state of the object
3762  ASSERT(JSObject::cast(result)->HasFastProperties());
3763  ASSERT(JSObject::cast(result)->HasFastObjectElements());
3764 
3765  return result;
3766 }
3767 
3768 
3769 static bool HasDuplicates(DescriptorArray* descriptors) {
3770  int count = descriptors->number_of_descriptors();
3771  if (count > 1) {
3772  String* prev_key = descriptors->GetKey(0);
3773  for (int i = 1; i != count; i++) {
3774  String* current_key = descriptors->GetKey(i);
3775  if (prev_key == current_key) return true;
3776  prev_key = current_key;
3777  }
3778  }
3779  return false;
3780 }
3781 
3782 
3784  ASSERT(!fun->has_initial_map());
3785 
3786  // First create a new map with the size and number of in-object properties
3787  // suggested by the function.
3788  int instance_size = fun->shared()->CalculateInstanceSize();
3789  int in_object_properties = fun->shared()->CalculateInObjectProperties();
3790  Object* map_obj;
3791  { MaybeObject* maybe_map_obj = AllocateMap(JS_OBJECT_TYPE, instance_size);
3792  if (!maybe_map_obj->ToObject(&map_obj)) return maybe_map_obj;
3793  }
3794 
3795  // Fetch or allocate prototype.
3796  Object* prototype;
3797  if (fun->has_instance_prototype()) {
3798  prototype = fun->instance_prototype();
3799  } else {
3800  { MaybeObject* maybe_prototype = AllocateFunctionPrototype(fun);
3801  if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype;
3802  }
3803  }
3804  Map* map = Map::cast(map_obj);
3805  map->set_inobject_properties(in_object_properties);
3806  map->set_unused_property_fields(in_object_properties);
3807  map->set_prototype(prototype);
3809 
3810  // If the function has only simple this property assignments add
3811  // field descriptors for these to the initial map as the object
3812  // cannot be constructed without having these properties. Guard by
3813  // the inline_new flag so we only change the map if we generate a
3814  // specialized construct stub.
3815  ASSERT(in_object_properties <= Map::kMaxPreAllocatedPropertyFields);
3816  if (fun->shared()->CanGenerateInlineConstructor(prototype)) {
3817  int count = fun->shared()->this_property_assignments_count();
3818  if (count > in_object_properties) {
3819  // Inline constructor can only handle inobject properties.
3820  fun->shared()->ForbidInlineConstructor();
3821  } else {
3822  DescriptorArray* descriptors;
3823  { MaybeObject* maybe_descriptors_obj =
3825  if (!maybe_descriptors_obj->To<DescriptorArray>(&descriptors)) {
3826  return maybe_descriptors_obj;
3827  }
3828  }
3829  DescriptorArray::WhitenessWitness witness(descriptors);
3830  for (int i = 0; i < count; i++) {
3831  String* name = fun->shared()->GetThisPropertyAssignmentName(i);
3832  ASSERT(name->IsSymbol());
3833  FieldDescriptor field(name, i, NONE);
3834  field.SetEnumerationIndex(i);
3835  descriptors->Set(i, &field, witness);
3836  }
3837  descriptors->SetNextEnumerationIndex(count);
3838  descriptors->SortUnchecked(witness);
3839 
3840  // The descriptors may contain duplicates because the compiler does not
3841  // guarantee the uniqueness of property names (it would have required
3842  // quadratic time). Once the descriptors are sorted we can check for
3843  // duplicates in linear time.
3844  if (HasDuplicates(descriptors)) {
3845  fun->shared()->ForbidInlineConstructor();
3846  } else {
3847  map->set_instance_descriptors(descriptors);
3849  map->set_unused_property_fields(in_object_properties - count);
3850  }
3851  }
3852  }
3853 
3854  fun->shared()->StartInobjectSlackTracking(map);
3855 
3856  return map;
3857 }
3858 
3859 
3860 void Heap::InitializeJSObjectFromMap(JSObject* obj,
3861  FixedArray* properties,
3862  Map* map) {
3863  obj->set_properties(properties);
3864  obj->initialize_elements();
3865  // TODO(1240798): Initialize the object's body using valid initial values
3866  // according to the object's initial map. For example, if the map's
3867  // instance type is JS_ARRAY_TYPE, the length field should be initialized
3868  // to a number (e.g. Smi::FromInt(0)) and the elements initialized to a
3869  // fixed array (e.g. Heap::empty_fixed_array()). Currently, the object
3870  // verification code has to cope with (temporarily) invalid objects. See
3871  // for example, JSArray::JSArrayVerify).
3872  Object* filler;
3873  // We cannot always fill with one_pointer_filler_map because objects
3874  // created from API functions expect their internal fields to be initialized
3875  // with undefined_value.
3876  // Pre-allocated fields need to be initialized with undefined_value as well
3877  // so that object accesses before the constructor completes (e.g. in the
3878  // debugger) will not cause a crash.
3879  if (map->constructor()->IsJSFunction() &&
3880  JSFunction::cast(map->constructor())->shared()->
3881  IsInobjectSlackTrackingInProgress()) {
3882  // We might want to shrink the object later.
3883  ASSERT(obj->GetInternalFieldCount() == 0);
3884  filler = Heap::one_pointer_filler_map();
3885  } else {
3886  filler = Heap::undefined_value();
3887  }
3888  obj->InitializeBody(map, Heap::undefined_value(), filler);
3889 }
3890 
3891 
3892 MaybeObject* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) {
3893  // JSFunctions should be allocated using AllocateFunction to be
3894  // properly initialized.
3896 
3897  // Both types of global objects should be allocated using
3898  // AllocateGlobalObject to be properly initialized.
3901 
3902  // Allocate the backing storage for the properties.
3903  int prop_size =
3905  map->unused_property_fields() -
3906  map->inobject_properties();
3907  ASSERT(prop_size >= 0);
3908  Object* properties;
3909  { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, pretenure);
3910  if (!maybe_properties->ToObject(&properties)) return maybe_properties;
3911  }
3912 
3913  // Allocate the JSObject.
3914  AllocationSpace space =
3915  (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
3917  Object* obj;
3918  { MaybeObject* maybe_obj = Allocate(map, space);
3919  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3920  }
3921 
3922  // Initialize the JSObject.
3923  InitializeJSObjectFromMap(JSObject::cast(obj),
3924  FixedArray::cast(properties),
3925  map);
3926  ASSERT(JSObject::cast(obj)->HasFastSmiOrObjectElements());
3927  return obj;
3928 }
3929 
3930 
3931 MaybeObject* Heap::AllocateJSObject(JSFunction* constructor,
3932  PretenureFlag pretenure) {
3933  // Allocate the initial map if absent.
3934  if (!constructor->has_initial_map()) {
3935  Object* initial_map;
3936  { MaybeObject* maybe_initial_map = AllocateInitialMap(constructor);
3937  if (!maybe_initial_map->ToObject(&initial_map)) return maybe_initial_map;
3938  }
3939  constructor->set_initial_map(Map::cast(initial_map));
3940  Map::cast(initial_map)->set_constructor(constructor);
3941  }
3942  // Allocate the object based on the constructors initial map.
3943  MaybeObject* result = AllocateJSObjectFromMap(
3944  constructor->initial_map(), pretenure);
3945 #ifdef DEBUG
3946  // Make sure result is NOT a global object if valid.
3947  Object* non_failure;
3948  ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject());
3949 #endif
3950  return result;
3951 }
3952 
3953 
3954 MaybeObject* Heap::AllocateJSModule() {
3955  // Allocate a fresh map. Modules do not have a prototype.
3956  Map* map;
3957  MaybeObject* maybe_map = AllocateMap(JS_MODULE_TYPE, JSModule::kSize);
3958  if (!maybe_map->To(&map)) return maybe_map;
3959  // Allocate the object based on the map.
3960  return AllocateJSObjectFromMap(map, TENURED);
3961 }
3962 
3963 
3965  ElementsKind elements_kind,
3966  int length,
3967  int capacity,
3969  PretenureFlag pretenure) {
3970  ASSERT(capacity >= length);
3971  if (length != 0 && mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE) {
3972  elements_kind = GetHoleyElementsKind(elements_kind);
3973  }
3974  MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure);
3975  JSArray* array;
3976  if (!maybe_array->To(&array)) return maybe_array;
3977 
3978  if (capacity == 0) {
3979  array->set_length(Smi::FromInt(0));
3980  array->set_elements(empty_fixed_array());
3981  return array;
3982  }
3983 
3984  FixedArrayBase* elms;
3985  MaybeObject* maybe_elms = NULL;
3986  if (elements_kind == FAST_DOUBLE_ELEMENTS) {
3987  if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
3988  maybe_elms = AllocateUninitializedFixedDoubleArray(capacity);
3989  } else {
3991  maybe_elms = AllocateFixedDoubleArrayWithHoles(capacity);
3992  }
3993  } else {
3994  ASSERT(IsFastSmiOrObjectElementsKind(elements_kind));
3995  if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
3996  maybe_elms = AllocateUninitializedFixedArray(capacity);
3997  } else {
3999  maybe_elms = AllocateFixedArrayWithHoles(capacity);
4000  }
4001  }
4002  if (!maybe_elms->To(&elms)) return maybe_elms;
4003 
4004  array->set_elements(elms);
4005  array->set_length(Smi::FromInt(length));
4006  return array;
4007 }
4008 
4009 
4011  FixedArrayBase* elements,
4012  ElementsKind elements_kind,
4013  PretenureFlag pretenure) {
4014  MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure);
4015  JSArray* array;
4016  if (!maybe_array->To(&array)) return maybe_array;
4017 
4018  array->set_elements(elements);
4019  array->set_length(Smi::FromInt(elements->length()));
4020  array->ValidateElements();
4021  return array;
4022 }
4023 
4024 
4025 MaybeObject* Heap::AllocateJSProxy(Object* handler, Object* prototype) {
4026  // Allocate map.
4027  // TODO(rossberg): Once we optimize proxies, think about a scheme to share
4028  // maps. Will probably depend on the identity of the handler object, too.
4029  Map* map;
4030  MaybeObject* maybe_map_obj = AllocateMap(JS_PROXY_TYPE, JSProxy::kSize);
4031  if (!maybe_map_obj->To<Map>(&map)) return maybe_map_obj;
4032  map->set_prototype(prototype);
4033 
4034  // Allocate the proxy object.
4035  JSProxy* result;
4036  MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
4037  if (!maybe_result->To<JSProxy>(&result)) return maybe_result;
4038  result->InitializeBody(map->instance_size(), Smi::FromInt(0));
4039  result->set_handler(handler);
4040  result->set_hash(undefined_value(), SKIP_WRITE_BARRIER);
4041  return result;
4042 }
4043 
4044 
4046  Object* call_trap,
4047  Object* construct_trap,
4048  Object* prototype) {
4049  // Allocate map.
4050  // TODO(rossberg): Once we optimize proxies, think about a scheme to share
4051  // maps. Will probably depend on the identity of the handler object, too.
4052  Map* map;
4053  MaybeObject* maybe_map_obj =
4055  if (!maybe_map_obj->To<Map>(&map)) return maybe_map_obj;
4056  map->set_prototype(prototype);
4057 
4058  // Allocate the proxy object.
4059  JSFunctionProxy* result;
4060  MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
4061  if (!maybe_result->To<JSFunctionProxy>(&result)) return maybe_result;
4062  result->InitializeBody(map->instance_size(), Smi::FromInt(0));
4063  result->set_handler(handler);
4064  result->set_hash(undefined_value(), SKIP_WRITE_BARRIER);
4065  result->set_call_trap(call_trap);
4066  result->set_construct_trap(construct_trap);
4067  return result;
4068 }
4069 
4070 
4071 MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
4072  ASSERT(constructor->has_initial_map());
4073  Map* map = constructor->initial_map();
4074 
4075  // Make sure no field properties are described in the initial map.
4076  // This guarantees us that normalizing the properties does not
4077  // require us to change property values to JSGlobalPropertyCells.
4078  ASSERT(map->NextFreePropertyIndex() == 0);
4079 
4080  // Make sure we don't have a ton of pre-allocated slots in the
4081  // global objects. They will be unused once we normalize the object.
4082  ASSERT(map->unused_property_fields() == 0);
4083  ASSERT(map->inobject_properties() == 0);
4084 
4085  // Initial size of the backing store to avoid resize of the storage during
4086  // bootstrapping. The size differs between the JS global object ad the
4087  // builtins object.
4088  int initial_size = map->instance_type() == JS_GLOBAL_OBJECT_TYPE ? 64 : 512;
4089 
4090  // Allocate a dictionary object for backing storage.
4091  Object* obj;
4092  { MaybeObject* maybe_obj =
4094  map->NumberOfDescribedProperties() * 2 + initial_size);
4095  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
4096  }
4097  StringDictionary* dictionary = StringDictionary::cast(obj);
4098 
4099  // The global object might be created from an object template with accessors.
4100  // Fill these accessors into the dictionary.
4101  DescriptorArray* descs = map->instance_descriptors();
4102  for (int i = 0; i < descs->number_of_descriptors(); i++) {
4103  PropertyDetails details = descs->GetDetails(i);
4104  ASSERT(details.type() == CALLBACKS); // Only accessors are expected.
4105  PropertyDetails d =
4106  PropertyDetails(details.attributes(), CALLBACKS, details.index());
4107  Object* value = descs->GetCallbacksObject(i);
4108  { MaybeObject* maybe_value = AllocateJSGlobalPropertyCell(value);
4109  if (!maybe_value->ToObject(&value)) return maybe_value;
4110  }
4111 
4112  Object* result;
4113  { MaybeObject* maybe_result = dictionary->Add(descs->GetKey(i), value, d);
4114  if (!maybe_result->ToObject(&result)) return maybe_result;
4115  }
4116  dictionary = StringDictionary::cast(result);
4117  }
4118 
4119  // Allocate the global object and initialize it with the backing store.
4120  { MaybeObject* maybe_obj = Allocate(map, OLD_POINTER_SPACE);
4121  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
4122  }
4123  JSObject* global = JSObject::cast(obj);
4124  InitializeJSObjectFromMap(global, dictionary, map);
4125 
4126  // Create a new map for the global object.
4127  { MaybeObject* maybe_obj = map->CopyDropDescriptors();
4128  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
4129  }
4130  Map* new_map = Map::cast(obj);
4131 
4132  // Set up the global object as a normalized object.
4133  global->set_map(new_map);
4134  global->map()->clear_instance_descriptors();
4135  global->set_properties(dictionary);
4136 
4137  // Make sure result is a global object with properties in dictionary.
4138  ASSERT(global->IsGlobalObject());
4139  ASSERT(!global->HasFastProperties());
4140  return global;
4141 }
4142 
4143 
4144 MaybeObject* Heap::CopyJSObject(JSObject* source) {
4145  // Never used to copy functions. If functions need to be copied we
4146  // have to be careful to clear the literals array.
4147  SLOW_ASSERT(!source->IsJSFunction());
4148 
4149  // Make the clone.
4150  Map* map = source->map();
4151  int object_size = map->instance_size();
4152  Object* clone;
4153 
4155 
4156  // If we're forced to always allocate, we use the general allocation
4157  // functions which may leave us with an object in old space.
4158  if (always_allocate()) {
4159  { MaybeObject* maybe_clone =
4160  AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
4161  if (!maybe_clone->ToObject(&clone)) return maybe_clone;
4162  }
4163  Address clone_address = HeapObject::cast(clone)->address();
4164  CopyBlock(clone_address,
4165  source->address(),
4166  object_size);
4167  // Update write barrier for all fields that lie beyond the header.
4168  RecordWrites(clone_address,
4170  (object_size - JSObject::kHeaderSize) / kPointerSize);
4171  } else {
4172  wb_mode = SKIP_WRITE_BARRIER;
4173  { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size);
4174  if (!maybe_clone->ToObject(&clone)) return maybe_clone;
4175  }
4176  SLOW_ASSERT(InNewSpace(clone));
4177  // Since we know the clone is allocated in new space, we can copy
4178  // the contents without worrying about updating the write barrier.
4179  CopyBlock(HeapObject::cast(clone)->address(),
4180  source->address(),
4181  object_size);
4182  }
4183 
4184  SLOW_ASSERT(
4185  JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind());
4186  FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
4187  FixedArray* properties = FixedArray::cast(source->properties());
4188  // Update elements if necessary.
4189  if (elements->length() > 0) {
4190  Object* elem;
4191  { MaybeObject* maybe_elem;
4192  if (elements->map() == fixed_cow_array_map()) {
4193  maybe_elem = FixedArray::cast(elements);
4194  } else if (source->HasFastDoubleElements()) {
4195  maybe_elem = CopyFixedDoubleArray(FixedDoubleArray::cast(elements));
4196  } else {
4197  maybe_elem = CopyFixedArray(FixedArray::cast(elements));
4198  }
4199  if (!maybe_elem->ToObject(&elem)) return maybe_elem;
4200  }
4201  JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem), wb_mode);
4202  }
4203  // Update properties if necessary.
4204  if (properties->length() > 0) {
4205  Object* prop;
4206  { MaybeObject* maybe_prop = CopyFixedArray(properties);
4207  if (!maybe_prop->ToObject(&prop)) return maybe_prop;
4208  }
4209  JSObject::cast(clone)->set_properties(FixedArray::cast(prop), wb_mode);
4210  }
4211  // Return the new clone.
4212  return clone;
4213 }
4214 
4215 
4217  JSReceiver* object, InstanceType type, int size) {
4218  ASSERT(type >= FIRST_JS_OBJECT_TYPE);
4219 
4220  // Allocate fresh map.
4221  // TODO(rossberg): Once we optimize proxies, cache these maps.
4222  Map* map;
4223  MaybeObject* maybe = AllocateMap(type, size);
4224  if (!maybe->To<Map>(&map)) return maybe;
4225 
4226  // Check that the receiver has at least the size of the fresh object.
4227  int size_difference = object->map()->instance_size() - map->instance_size();
4228  ASSERT(size_difference >= 0);
4229 
4230  map->set_prototype(object->map()->prototype());
4231 
4232  // Allocate the backing storage for the properties.
4233  int prop_size = map->unused_property_fields() - map->inobject_properties();
4234  Object* properties;
4235  maybe = AllocateFixedArray(prop_size, TENURED);
4236  if (!maybe->ToObject(&properties)) return maybe;
4237 
4238  // Functions require some allocation, which might fail here.
4239  SharedFunctionInfo* shared = NULL;
4240  if (type == JS_FUNCTION_TYPE) {
4241  String* name;
4242  maybe = LookupAsciiSymbol("<freezing call trap>");
4243  if (!maybe->To<String>(&name)) return maybe;
4244  maybe = AllocateSharedFunctionInfo(name);
4245  if (!maybe->To<SharedFunctionInfo>(&shared)) return maybe;
4246  }
4247 
4248  // Because of possible retries of this function after failure,
4249  // we must NOT fail after this point, where we have changed the type!
4250 
4251  // Reset the map for the object.
4252  object->set_map(map);
4253  JSObject* jsobj = JSObject::cast(object);
4254 
4255  // Reinitialize the object from the constructor map.
4256  InitializeJSObjectFromMap(jsobj, FixedArray::cast(properties), map);
4257 
4258  // Functions require some minimal initialization.
4259  if (type == JS_FUNCTION_TYPE) {
4260  map->set_function_with_prototype(true);
4261  InitializeFunction(JSFunction::cast(object), shared, the_hole_value());
4262  JSFunction::cast(object)->set_context(
4263  isolate()->context()->global_context());
4264  }
4265 
4266  // Put in filler if the new object is smaller than the old.
4267  if (size_difference > 0) {
4269  object->address() + map->instance_size(), size_difference);
4270  }
4271 
4272  return object;
4273 }
4274 
4275 
4277  JSGlobalProxy* object) {
4278  ASSERT(constructor->has_initial_map());
4279  Map* map = constructor->initial_map();
4280 
4281  // Check that the already allocated object has the same size and type as
4282  // objects allocated using the constructor.
4283  ASSERT(map->instance_size() == object->map()->instance_size());
4284  ASSERT(map->instance_type() == object->map()->instance_type());
4285 
4286  // Allocate the backing storage for the properties.
4287  int prop_size = map->unused_property_fields() - map->inobject_properties();
4288  Object* properties;
4289  { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, TENURED);
4290  if (!maybe_properties->ToObject(&properties)) return maybe_properties;
4291  }
4292 
4293  // Reset the map for the object.
4294  object->set_map(constructor->initial_map());
4295 
4296  // Reinitialize the object from the constructor map.
4297  InitializeJSObjectFromMap(object, FixedArray::cast(properties), map);
4298  return object;
4299 }
4300 
4301 
4303  PretenureFlag pretenure) {
4304  if (string.length() == 1) {
4306  }
4307  Object* result;
4308  { MaybeObject* maybe_result =
4309  AllocateRawAsciiString(string.length(), pretenure);
4310  if (!maybe_result->ToObject(&result)) return maybe_result;
4311  }
4312 
4313  // Copy the characters into the new object.
4314  SeqAsciiString* string_result = SeqAsciiString::cast(result);
4315  for (int i = 0; i < string.length(); i++) {
4316  string_result->SeqAsciiStringSet(i, string[i]);
4317  }
4318  return result;
4319 }
4320 
4321 
4323  PretenureFlag pretenure) {
4324  // Count the number of characters in the UTF-8 string and check if
4325  // it is an ASCII string.
4327  decoder(isolate_->unicode_cache()->utf8_decoder());
4328  decoder->Reset(string.start(), string.length());
4329  int chars = 0;
4330  while (decoder->has_more()) {
4331  uint32_t r = decoder->GetNext();
4333  chars++;
4334  } else {
4335  chars += 2;
4336  }
4337  }
4338 
4339  Object* result;
4340  { MaybeObject* maybe_result = AllocateRawTwoByteString(chars, pretenure);
4341  if (!maybe_result->ToObject(&result)) return maybe_result;
4342  }
4343 
4344  // Convert and copy the characters into the new object.
4345  String* string_result = String::cast(result);
4346  decoder->Reset(string.start(), string.length());
4347  int i = 0;
4348  while (i < chars) {
4349  uint32_t r = decoder->GetNext();
4351  string_result->Set(i++, unibrow::Utf16::LeadSurrogate(r));
4352  string_result->Set(i++, unibrow::Utf16::TrailSurrogate(r));
4353  } else {
4354  string_result->Set(i++, r);
4355  }
4356  }
4357  return result;
4358 }
4359 
4360 
4362  PretenureFlag pretenure) {
4363  // Check if the string is an ASCII string.
4364  MaybeObject* maybe_result;
4365  if (String::IsAscii(string.start(), string.length())) {
4366  maybe_result = AllocateRawAsciiString(string.length(), pretenure);
4367  } else { // It's not an ASCII string.
4368  maybe_result = AllocateRawTwoByteString(string.length(), pretenure);
4369  }
4370  Object* result;
4371  if (!maybe_result->ToObject(&result)) return maybe_result;
4372 
4373  // Copy the characters into the new object, which may be either ASCII or
4374  // UTF-16.
4375  String* string_result = String::cast(result);
4376  for (int i = 0; i < string.length(); i++) {
4377  string_result->Set(i, string[i]);
4378  }
4379  return result;
4380 }
4381 
4382 
4384  // If the string is in new space it cannot be used as a symbol.
4385  if (InNewSpace(string)) return NULL;
4386 
4387  // Find the corresponding symbol map for strings.
4388  switch (string->map()->instance_type()) {
4389  case STRING_TYPE: return symbol_map();
4390  case ASCII_STRING_TYPE: return ascii_symbol_map();
4391  case CONS_STRING_TYPE: return cons_symbol_map();
4392  case CONS_ASCII_STRING_TYPE: return cons_ascii_symbol_map();
4393  case EXTERNAL_STRING_TYPE: return external_symbol_map();
4394  case EXTERNAL_ASCII_STRING_TYPE: return external_ascii_symbol_map();
4396  return external_symbol_with_ascii_data_map();
4397  case SHORT_EXTERNAL_STRING_TYPE: return short_external_symbol_map();
4399  return short_external_ascii_symbol_map();
4401  return short_external_symbol_with_ascii_data_map();
4402  default: return NULL; // No match found.
4403  }
4404 }
4405 
4406 
4408  int chars,
4409  uint32_t hash_field) {
4410  ASSERT(chars >= 0);
4411  // Ensure the chars matches the number of characters in the buffer.
4412  ASSERT(static_cast<unsigned>(chars) == buffer->Utf16Length());
4413  // Determine whether the string is ASCII.
4414  bool is_ascii = true;
4415  while (buffer->has_more()) {
4416  if (buffer->GetNext() > unibrow::Utf8::kMaxOneByteChar) {
4417  is_ascii = false;
4418  break;
4419  }
4420  }
4421  buffer->Rewind();
4422 
4423  // Compute map and object size.
4424  int size;
4425  Map* map;
4426 
4427  if (is_ascii) {
4428  if (chars > SeqAsciiString::kMaxLength) {
4430  }
4431  map = ascii_symbol_map();
4432  size = SeqAsciiString::SizeFor(chars);
4433  } else {
4434  if (chars > SeqTwoByteString::kMaxLength) {
4436  }
4437  map = symbol_map();
4438  size = SeqTwoByteString::SizeFor(chars);
4439  }
4440 
4441  // Allocate string.
4442  Object* result;
4443  { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
4444  ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
4445  : old_data_space_->AllocateRaw(size);
4446  if (!maybe_result->ToObject(&result)) return maybe_result;
4447  }
4448 
4449  reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map);
4450  // Set length and hash fields of the allocated string.
4451  String* answer = String::cast(result);
4452  answer->set_length(chars);
4453  answer->set_hash_field(hash_field);
4454 
4455  ASSERT_EQ(size, answer->Size());
4456 
4457  // Fill in the characters.
4458  int i = 0;
4459  while (i < chars) {
4460  uint32_t character = buffer->GetNext();
4461  if (character > unibrow::Utf16::kMaxNonSurrogateCharCode) {
4462  answer->Set(i++, unibrow::Utf16::LeadSurrogate(character));
4463  answer->Set(i++, unibrow::Utf16::TrailSurrogate(character));
4464  } else {
4465  answer->Set(i++, character);
4466  }
4467  }
4468  return answer;
4469 }
4470 
4471 
4472 MaybeObject* Heap::AllocateRawAsciiString(int length, PretenureFlag pretenure) {
4473  if (length < 0 || length > SeqAsciiString::kMaxLength) {
4475  }
4476 
4477  int size = SeqAsciiString::SizeFor(length);
4479 
4480  AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
4481  AllocationSpace retry_space = OLD_DATA_SPACE;
4482 
4483  if (space == NEW_SPACE) {
4484  if (size > kMaxObjectSizeInNewSpace) {
4485  // Allocate in large object space, retry space will be ignored.
4486  space = LO_SPACE;
4487  } else if (size > Page::kMaxNonCodeHeapObjectSize) {
4488  // Allocate in new space, retry in large object space.
4489  retry_space = LO_SPACE;
4490  }
4491  } else if (space == OLD_DATA_SPACE &&
4493  space = LO_SPACE;
4494  }
4495  Object* result;
4496  { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
4497  if (!maybe_result->ToObject(&result)) return maybe_result;
4498  }
4499 
4500  // Partially initialize the object.
4501  HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map());
4502  String::cast(result)->set_length(length);
4504  ASSERT_EQ(size, HeapObject::cast(result)->Size());
4505 
4506 #ifdef DEBUG
4507  if (FLAG_verify_heap) {
4508  // Initialize string's content to ensure ASCII-ness (character range 0-127)
4509  // as required when verifying the heap.
4510  char* dest = SeqAsciiString::cast(result)->GetChars();
4511  memset(dest, 0x0F, length * kCharSize);
4512  }
4513 #endif // DEBUG
4514 
4515  return result;
4516 }
4517 
4518 
4519 MaybeObject* Heap::AllocateRawTwoByteString(int length,
4520  PretenureFlag pretenure) {
4521  if (length < 0 || length > SeqTwoByteString::kMaxLength) {
4523  }
4524  int size = SeqTwoByteString::SizeFor(length);
4526  AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
4527  AllocationSpace retry_space = OLD_DATA_SPACE;
4528 
4529  if (space == NEW_SPACE) {
4530  if (size > kMaxObjectSizeInNewSpace) {
4531  // Allocate in large object space, retry space will be ignored.
4532  space = LO_SPACE;
4533  } else if (size > Page::kMaxNonCodeHeapObjectSize) {
4534  // Allocate in new space, retry in large object space.
4535  retry_space = LO_SPACE;
4536  }
4537  } else if (space == OLD_DATA_SPACE &&
4539  space = LO_SPACE;
4540  }
4541  Object* result;
4542  { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
4543  if (!maybe_result->ToObject(&result)) return maybe_result;
4544  }
4545 
4546  // Partially initialize the object.
4547  HeapObject::cast(result)->set_map_no_write_barrier(string_map());
4548  String::cast(result)->set_length(length);
4550  ASSERT_EQ(size, HeapObject::cast(result)->Size());
4551  return result;
4552 }
4553 
4554 
4555 MaybeObject* Heap::AllocateJSArray(
4556  ElementsKind elements_kind,
4557  PretenureFlag pretenure) {
4558  Context* global_context = isolate()->context()->global_context();
4559  JSFunction* array_function = global_context->array_function();
4560  Map* map = array_function->initial_map();
4561  Object* maybe_map_array = global_context->js_array_maps();
4562  if (!maybe_map_array->IsUndefined()) {
4563  Object* maybe_transitioned_map =
4564  FixedArray::cast(maybe_map_array)->get(elements_kind);
4565  if (!maybe_transitioned_map->IsUndefined()) {
4566  map = Map::cast(maybe_transitioned_map);
4567  }
4568  }
4569 
4570  return AllocateJSObjectFromMap(map, pretenure);
4571 }
4572 
4573 
4574 MaybeObject* Heap::AllocateEmptyFixedArray() {
4575  int size = FixedArray::SizeFor(0);
4576  Object* result;
4577  { MaybeObject* maybe_result =
4579  if (!maybe_result->ToObject(&result)) return maybe_result;
4580  }
4581  // Initialize the object.
4582  reinterpret_cast<FixedArray*>(result)->set_map_no_write_barrier(
4583  fixed_array_map());
4584  reinterpret_cast<FixedArray*>(result)->set_length(0);
4585  return result;
4586 }
4587 
4588 
4589 MaybeObject* Heap::AllocateRawFixedArray(int length) {
4590  if (length < 0 || length > FixedArray::kMaxLength) {
4592  }
4593  ASSERT(length > 0);
4594  // Use the general function if we're forced to always allocate.
4595  if (always_allocate()) return AllocateFixedArray(length, TENURED);
4596  // Allocate the raw data for a fixed array.
4597  int size = FixedArray::SizeFor(length);
4598  return size <= kMaxObjectSizeInNewSpace
4599  ? new_space_.AllocateRaw(size)
4600  : lo_space_->AllocateRaw(size, NOT_EXECUTABLE);
4601 }
4602 
4603 
4604 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
4605  int len = src->length();
4606  Object* obj;
4607  { MaybeObject* maybe_obj = AllocateRawFixedArray(len);
4608  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
4609  }
4610  if (InNewSpace(obj)) {
4611  HeapObject* dst = HeapObject::cast(obj);
4612  dst->set_map_no_write_barrier(map);
4613  CopyBlock(dst->address() + kPointerSize,
4614  src->address() + kPointerSize,
4616  return obj;
4617  }
4619  FixedArray* result = FixedArray::cast(obj);
4620  result->set_length(len);
4621 
4622  // Copy the content
4623  AssertNoAllocation no_gc;
4624  WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
4625  for (int i = 0; i < len; i++) result->set(i, src->get(i), mode);
4626  return result;
4627 }
4628 
4629 
4631  Map* map) {
4632  int len = src->length();
4633  Object* obj;
4634  { MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(len, NOT_TENURED);
4635  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
4636  }
4637  HeapObject* dst = HeapObject::cast(obj);
4638  dst->set_map_no_write_barrier(map);
4639  CopyBlock(
4643  return obj;
4644 }
4645 
4646 
4647 MaybeObject* Heap::AllocateFixedArray(int length) {
4648  ASSERT(length >= 0);
4649  if (length == 0) return empty_fixed_array();
4650  Object* result;
4651  { MaybeObject* maybe_result = AllocateRawFixedArray(length);
4652  if (!maybe_result->ToObject(&result)) return maybe_result;
4653  }
4654  // Initialize header.
4655  FixedArray* array = reinterpret_cast<FixedArray*>(result);
4656  array->set_map_no_write_barrier(fixed_array_map());
4657  array->set_length(length);
4658  // Initialize body.
4659  ASSERT(!InNewSpace(undefined_value()));
4660  MemsetPointer(array->data_start(), undefined_value(), length);
4661  return result;
4662 }
4663 
4664 
4665 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
4666  if (length < 0 || length > FixedArray::kMaxLength) {
4668  }
4669 
4670  AllocationSpace space =
4671  (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
4672  int size = FixedArray::SizeFor(length);
4673  if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
4674  // Too big for new space.
4675  space = LO_SPACE;
4676  } else if (space == OLD_POINTER_SPACE &&
4678  // Too big for old pointer space.
4679  space = LO_SPACE;
4680  }
4681 
4682  AllocationSpace retry_space =
4684 
4685  return AllocateRaw(size, space, retry_space);
4686 }
4687 
4688 
4689 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
4690  Heap* heap,
4691  int length,
4692  PretenureFlag pretenure,
4693  Object* filler) {
4694  ASSERT(length >= 0);
4695  ASSERT(heap->empty_fixed_array()->IsFixedArray());
4696  if (length == 0) return heap->empty_fixed_array();
4697 
4698  ASSERT(!heap->InNewSpace(filler));
4699  Object* result;
4700  { MaybeObject* maybe_result = heap->AllocateRawFixedArray(length, pretenure);
4701  if (!maybe_result->ToObject(&result)) return maybe_result;
4702  }
4703 
4704  HeapObject::cast(result)->set_map_no_write_barrier(heap->fixed_array_map());
4705  FixedArray* array = FixedArray::cast(result);
4706  array->set_length(length);
4707  MemsetPointer(array->data_start(), filler, length);
4708  return array;
4709 }
4710 
4711 
4712 MaybeObject* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
4713  return AllocateFixedArrayWithFiller(this,
4714  length,
4715  pretenure,
4716  undefined_value());
4717 }
4718 
4719 
4720 MaybeObject* Heap::AllocateFixedArrayWithHoles(int length,
4721  PretenureFlag pretenure) {
4722  return AllocateFixedArrayWithFiller(this,
4723  length,
4724  pretenure,
4725  the_hole_value());
4726 }
4727 
4728 
4729 MaybeObject* Heap::AllocateUninitializedFixedArray(int length) {
4730  if (length == 0) return empty_fixed_array();
4731 
4732  Object* obj;
4733  { MaybeObject* maybe_obj = AllocateRawFixedArray(length);
4734  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
4735  }
4736 
4737  reinterpret_cast<FixedArray*>(obj)->set_map_no_write_barrier(
4738  fixed_array_map());
4739  FixedArray::cast(obj)->set_length(length);
4740  return obj;
4741 }
4742 
4743 
4744 MaybeObject* Heap::AllocateEmptyFixedDoubleArray() {
4745  int size = FixedDoubleArray::SizeFor(0);
4746  Object* result;
4747  { MaybeObject* maybe_result =
4749  if (!maybe_result->ToObject(&result)) return maybe_result;
4750  }
4751  // Initialize the object.
4752  reinterpret_cast<FixedDoubleArray*>(result)->set_map_no_write_barrier(
4753  fixed_double_array_map());
4754  reinterpret_cast<FixedDoubleArray*>(result)->set_length(0);
4755  return result;
4756 }
4757 
4758 
4760  int length,
4761  PretenureFlag pretenure) {
4762  if (length == 0) return empty_fixed_array();
4763 
4764  Object* elements_object;
4765  MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(length, pretenure);
4766  if (!maybe_obj->ToObject(&elements_object)) return maybe_obj;
4767  FixedDoubleArray* elements =
4768  reinterpret_cast<FixedDoubleArray*>(elements_object);
4769 
4770  elements->set_map_no_write_barrier(fixed_double_array_map());
4771  elements->set_length(length);
4772  return elements;
4773 }
4774 
4775 
4777  int length,
4778  PretenureFlag pretenure) {
4779  if (length == 0) return empty_fixed_array();
4780 
4781  Object* elements_object;
4782  MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(length, pretenure);
4783  if (!maybe_obj->ToObject(&elements_object)) return maybe_obj;
4784  FixedDoubleArray* elements =
4785  reinterpret_cast<FixedDoubleArray*>(elements_object);
4786 
4787  for (int i = 0; i < length; ++i) {
4788  elements->set_the_hole(i);
4789  }
4790 
4791  elements->set_map_no_write_barrier(fixed_double_array_map());
4792  elements->set_length(length);
4793  return elements;
4794 }
4795 
4796 
4797 MaybeObject* Heap::AllocateRawFixedDoubleArray(int length,
4798  PretenureFlag pretenure) {
4799  if (length < 0 || length > FixedDoubleArray::kMaxLength) {
4801  }
4802 
4803  AllocationSpace space =
4804  (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
4805  int size = FixedDoubleArray::SizeFor(length);
4806 
4807 #ifndef V8_HOST_ARCH_64_BIT
4808  size += kPointerSize;
4809 #endif
4810 
4811  if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
4812  // Too big for new space.
4813  space = LO_SPACE;
4814  } else if (space == OLD_DATA_SPACE &&
4816  // Too big for old data space.
4817  space = LO_SPACE;
4818  }
4819 
4820  AllocationSpace retry_space =
4822 
4823  HeapObject* object;
4824  { MaybeObject* maybe_object = AllocateRaw(size, space, retry_space);
4825  if (!maybe_object->To<HeapObject>(&object)) return maybe_object;
4826  }
4827 
4828  return EnsureDoubleAligned(this, object, size);
4829 }
4830 
4831 
4832 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
4833  Object* result;
4834  { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure);
4835  if (!maybe_result->ToObject(&result)) return maybe_result;
4836  }
4837  reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(
4838  hash_table_map());
4839  ASSERT(result->IsHashTable());
4840  return result;
4841 }
4842 
4843 
4845  Object* result;
4846  { MaybeObject* maybe_result =
4848  if (!maybe_result->ToObject(&result)) return maybe_result;
4849  }
4850  Context* context = reinterpret_cast<Context*>(result);
4851  context->set_map_no_write_barrier(global_context_map());
4852  context->set_js_array_maps(undefined_value());
4853  ASSERT(context->IsGlobalContext());
4854  ASSERT(result->IsContext());
4855  return result;
4856 }
4857 
4858 
4859 MaybeObject* Heap::AllocateModuleContext(Context* previous,
4860  ScopeInfo* scope_info) {
4861  Object* result;
4862  { MaybeObject* maybe_result =
4864  if (!maybe_result->ToObject(&result)) return maybe_result;
4865  }
4866  Context* context = reinterpret_cast<Context*>(result);
4867  context->set_map_no_write_barrier(module_context_map());
4868  context->set_previous(previous);
4869  context->set_extension(scope_info);
4870  context->set_global(previous->global());
4871  return context;
4872 }
4873 
4874 
4875 MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) {
4877  Object* result;
4878  { MaybeObject* maybe_result = AllocateFixedArray(length);
4879  if (!maybe_result->ToObject(&result)) return maybe_result;
4880  }
4881  Context* context = reinterpret_cast<Context*>(result);
4882  context->set_map_no_write_barrier(function_context_map());
4883  context->set_closure(function);
4884  context->set_previous(function->context());
4885  context->set_extension(NULL);
4886  context->set_global(function->context()->global());
4887  return context;
4888 }
4889 
4890 
4892  Context* previous,
4893  String* name,
4894  Object* thrown_object) {
4896  Object* result;
4897  { MaybeObject* maybe_result =
4899  if (!maybe_result->ToObject(&result)) return maybe_result;
4900  }
4901  Context* context = reinterpret_cast<Context*>(result);
4902  context->set_map_no_write_barrier(catch_context_map());
4903  context->set_closure(function);
4904  context->set_previous(previous);
4905  context->set_extension(name);
4906  context->set_global(previous->global());
4907  context->set(Context::THROWN_OBJECT_INDEX, thrown_object);
4908  return context;
4909 }
4910 
4911 
4912 MaybeObject* Heap::AllocateWithContext(JSFunction* function,
4913  Context* previous,
4914  JSObject* extension) {
4915  Object* result;
4916  { MaybeObject* maybe_result = AllocateFixedArray(Context::MIN_CONTEXT_SLOTS);
4917  if (!maybe_result->ToObject(&result)) return maybe_result;
4918  }
4919  Context* context = reinterpret_cast<Context*>(result);
4920  context->set_map_no_write_barrier(with_context_map());
4921  context->set_closure(function);
4922  context->set_previous(previous);
4923  context->set_extension(extension);
4924  context->set_global(previous->global());
4925  return context;
4926 }
4927 
4928 
4930  Context* previous,
4931  ScopeInfo* scope_info) {
4932  Object* result;
4933  { MaybeObject* maybe_result =
4935  if (!maybe_result->ToObject(&result)) return maybe_result;
4936  }
4937  Context* context = reinterpret_cast<Context*>(result);
4938  context->set_map_no_write_barrier(block_context_map());
4939  context->set_closure(function);
4940  context->set_previous(previous);
4941  context->set_extension(scope_info);
4942  context->set_global(previous->global());
4943  return context;
4944 }
4945 
4946 
4947 MaybeObject* Heap::AllocateScopeInfo(int length) {
4948  FixedArray* scope_info;
4949  MaybeObject* maybe_scope_info = AllocateFixedArray(length, TENURED);
4950  if (!maybe_scope_info->To(&scope_info)) return maybe_scope_info;
4951  scope_info->set_map_no_write_barrier(scope_info_map());
4952  return scope_info;
4953 }
4954 
4955 
4957  Map* map;
4958  switch (type) {
4959 #define MAKE_CASE(NAME, Name, name) \
4960  case NAME##_TYPE: map = name##_map(); break;
4962 #undef MAKE_CASE
4963  default:
4964  UNREACHABLE();
4965  return Failure::InternalError();
4966  }
4967  int size = map->instance_size();
4968  AllocationSpace space =
4970  Object* result;
4971  { MaybeObject* maybe_result = Allocate(map, space);
4972  if (!maybe_result->ToObject(&result)) return maybe_result;
4973  }
4974  Struct::cast(result)->InitializeBody(size);
4975  return result;
4976 }
4977 
4978 
4980  return (!old_pointer_space()->was_swept_conservatively() &&
4981  !old_data_space()->was_swept_conservatively());
4982 }
4983 
4984 
4986  ASSERT(IsAllocationAllowed());
4987  if (!IsHeapIterable()) {
4988  CollectAllGarbage(kMakeHeapIterableMask, "Heap::EnsureHeapIsIterable");
4989  }
4991 }
4992 
4993 
4994 void Heap::AdvanceIdleIncrementalMarking(intptr_t step_size) {
4995  incremental_marking()->Step(step_size,
4997 
4998  if (incremental_marking()->IsComplete()) {
4999  bool uncommit = false;
5000  if (gc_count_at_last_idle_gc_ == gc_count_) {
5001  // No GC since the last full GC, the mutator is probably not active.
5002  isolate_->compilation_cache()->Clear();
5003  uncommit = true;
5004  }
5005  CollectAllGarbage(kNoGCFlags, "idle notification: finalize incremental");
5006  gc_count_at_last_idle_gc_ = gc_count_;
5007  if (uncommit) {
5008  new_space_.Shrink();
5010  }
5011  }
5012 }
5013 
5014 
5015 bool Heap::IdleNotification(int hint) {
5016  const int kMaxHint = 1000;
5017  intptr_t size_factor = Min(Max(hint, 20), kMaxHint) / 4;
5018  // The size factor is in range [5..250]. The numbers here are chosen from
5019  // experiments. If you changes them, make sure to test with
5020  // chrome/performance_ui_tests --gtest_filter="GeneralMixMemoryTest.*
5021  intptr_t step_size = size_factor * IncrementalMarking::kAllocatedThreshold;
5022 
5023  if (contexts_disposed_ > 0) {
5024  if (hint >= kMaxHint) {
5025  // The embedder is requesting a lot of GC work after context disposal,
5026  // we age inline caches so that they don't keep objects from
5027  // the old context alive.
5028  AgeInlineCaches();
5029  }
5030  int mark_sweep_time = Min(TimeMarkSweepWouldTakeInMs(), 1000);
5031  if (hint >= mark_sweep_time && !FLAG_expose_gc &&
5032  incremental_marking()->IsStopped()) {
5033  HistogramTimerScope scope(isolate_->counters()->gc_context());
5035  "idle notification: contexts disposed");
5036  } else {
5037  AdvanceIdleIncrementalMarking(step_size);
5038  contexts_disposed_ = 0;
5039  }
5040  // Make sure that we have no pending context disposals.
5041  // Take into account that we might have decided to delay full collection
5042  // because incremental marking is in progress.
5043  ASSERT((contexts_disposed_ == 0) || !incremental_marking()->IsStopped());
5044  // After context disposal there is likely a lot of garbage remaining, reset
5045  // the idle notification counters in order to trigger more incremental GCs
5046  // on subsequent idle notifications.
5047  StartIdleRound();
5048  return false;
5049  }
5050 
5051  if (!FLAG_incremental_marking || FLAG_expose_gc || Serializer::enabled()) {
5052  return IdleGlobalGC();
5053  }
5054 
5055  // By doing small chunks of GC work in each IdleNotification,
5056  // perform a round of incremental GCs and after that wait until
5057  // the mutator creates enough garbage to justify a new round.
5058  // An incremental GC progresses as follows:
5059  // 1. many incremental marking steps,
5060  // 2. one old space mark-sweep-compact,
5061  // 3. many lazy sweep steps.
5062  // Use mark-sweep-compact events to count incremental GCs in a round.
5063 
5064 
5065  if (incremental_marking()->IsStopped()) {
5066  if (!IsSweepingComplete() &&
5067  !AdvanceSweepers(static_cast<int>(step_size))) {
5068  return false;
5069  }
5070  }
5071 
5072  if (mark_sweeps_since_idle_round_started_ >= kMaxMarkSweepsInIdleRound) {
5073  if (EnoughGarbageSinceLastIdleRound()) {
5074  StartIdleRound();
5075  } else {
5076  return true;
5077  }
5078  }
5079 
5080  int new_mark_sweeps = ms_count_ - ms_count_at_last_idle_notification_;
5081  mark_sweeps_since_idle_round_started_ += new_mark_sweeps;
5082  ms_count_at_last_idle_notification_ = ms_count_;
5083 
5084  if (mark_sweeps_since_idle_round_started_ >= kMaxMarkSweepsInIdleRound) {
5085  FinishIdleRound();
5086  return true;
5087  }
5088 
5089  if (incremental_marking()->IsStopped()) {
5091  }
5092 
5093  AdvanceIdleIncrementalMarking(step_size);
5094  return false;
5095 }
5096 
5097 
5098 bool Heap::IdleGlobalGC() {
5099  static const int kIdlesBeforeScavenge = 4;
5100  static const int kIdlesBeforeMarkSweep = 7;
5101  static const int kIdlesBeforeMarkCompact = 8;
5102  static const int kMaxIdleCount = kIdlesBeforeMarkCompact + 1;
5103  static const unsigned int kGCsBetweenCleanup = 4;
5104 
5105  if (!last_idle_notification_gc_count_init_) {
5106  last_idle_notification_gc_count_ = gc_count_;
5107  last_idle_notification_gc_count_init_ = true;
5108  }
5109 
5110  bool uncommit = true;
5111  bool finished = false;
5112 
5113  // Reset the number of idle notifications received when a number of
5114  // GCs have taken place. This allows another round of cleanup based
5115  // on idle notifications if enough work has been carried out to
5116  // provoke a number of garbage collections.
5117  if (gc_count_ - last_idle_notification_gc_count_ < kGCsBetweenCleanup) {
5118  number_idle_notifications_ =
5119  Min(number_idle_notifications_ + 1, kMaxIdleCount);
5120  } else {
5121  number_idle_notifications_ = 0;
5122  last_idle_notification_gc_count_ = gc_count_;
5123  }
5124 
5125  if (number_idle_notifications_ == kIdlesBeforeScavenge) {
5126  CollectGarbage(NEW_SPACE, "idle notification");
5127  new_space_.Shrink();
5128  last_idle_notification_gc_count_ = gc_count_;
5129  } else if (number_idle_notifications_ == kIdlesBeforeMarkSweep) {
5130  // Before doing the mark-sweep collections we clear the
5131  // compilation cache to avoid hanging on to source code and
5132  // generated code for cached functions.
5133  isolate_->compilation_cache()->Clear();
5134 
5135  CollectAllGarbage(kReduceMemoryFootprintMask, "idle notification");
5136  new_space_.Shrink();
5137  last_idle_notification_gc_count_ = gc_count_;
5138 
5139  } else if (number_idle_notifications_ == kIdlesBeforeMarkCompact) {
5140  CollectAllGarbage(kReduceMemoryFootprintMask, "idle notification");
5141  new_space_.Shrink();
5142  last_idle_notification_gc_count_ = gc_count_;
5143  number_idle_notifications_ = 0;
5144  finished = true;
5145  } else if (number_idle_notifications_ > kIdlesBeforeMarkCompact) {
5146  // If we have received more than kIdlesBeforeMarkCompact idle
5147  // notifications we do not perform any cleanup because we don't
5148  // expect to gain much by doing so.
5149  finished = true;
5150  }
5151 
5152  if (uncommit) UncommitFromSpace();
5153 
5154  return finished;
5155 }
5156 
5157 
5158 #ifdef DEBUG
5159 
5160 void Heap::Print() {
5161  if (!HasBeenSetUp()) return;
5162  isolate()->PrintStack();
5163  AllSpaces spaces;
5164  for (Space* space = spaces.next(); space != NULL; space = spaces.next())
5165  space->Print();
5166 }
5167 
5168 
5169 void Heap::ReportCodeStatistics(const char* title) {
5170  PrintF(">>>>>> Code Stats (%s) >>>>>>\n", title);
5171  PagedSpace::ResetCodeStatistics();
5172  // We do not look for code in new space, map space, or old space. If code
5173  // somehow ends up in those spaces, we would miss it here.
5174  code_space_->CollectCodeStatistics();
5175  lo_space_->CollectCodeStatistics();
5176  PagedSpace::ReportCodeStatistics();
5177 }
5178 
5179 
5180 // This function expects that NewSpace's allocated objects histogram is
5181 // populated (via a call to CollectStatistics or else as a side effect of a
5182 // just-completed scavenge collection).
5183 void Heap::ReportHeapStatistics(const char* title) {
5184  USE(title);
5185  PrintF(">>>>>> =============== %s (%d) =============== >>>>>>\n",
5186  title, gc_count_);
5187  PrintF("old_gen_promotion_limit_ %" V8_PTR_PREFIX "d\n",
5188  old_gen_promotion_limit_);
5189  PrintF("old_gen_allocation_limit_ %" V8_PTR_PREFIX "d\n",
5190  old_gen_allocation_limit_);
5191  PrintF("old_gen_limit_factor_ %d\n", old_gen_limit_factor_);
5192 
5193  PrintF("\n");
5194  PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles());
5195  isolate_->global_handles()->PrintStats();
5196  PrintF("\n");
5197 
5198  PrintF("Heap statistics : ");
5199  isolate_->memory_allocator()->ReportStatistics();
5200  PrintF("To space : ");
5201  new_space_.ReportStatistics();
5202  PrintF("Old pointer space : ");
5203  old_pointer_space_->ReportStatistics();
5204  PrintF("Old data space : ");
5205  old_data_space_->ReportStatistics();
5206  PrintF("Code space : ");
5207  code_space_->ReportStatistics();
5208  PrintF("Map space : ");
5209  map_space_->ReportStatistics();
5210  PrintF("Cell space : ");
5211  cell_space_->ReportStatistics();
5212  PrintF("Large object space : ");
5213  lo_space_->ReportStatistics();
5214  PrintF(">>>>>> ========================================= >>>>>>\n");
5215 }
5216 
5217 #endif // DEBUG
5218 
5220  return Contains(value->address());
5221 }
5222 
5223 
5225  if (OS::IsOutsideAllocatedSpace(addr)) return false;
5226  return HasBeenSetUp() &&
5227  (new_space_.ToSpaceContains(addr) ||
5228  old_pointer_space_->Contains(addr) ||
5229  old_data_space_->Contains(addr) ||
5230  code_space_->Contains(addr) ||
5231  map_space_->Contains(addr) ||
5232  cell_space_->Contains(addr) ||
5233  lo_space_->SlowContains(addr));
5234 }
5235 
5236 
5238  return InSpace(value->address(), space);
5239 }
5240 
5241 
5243  if (OS::IsOutsideAllocatedSpace(addr)) return false;
5244  if (!HasBeenSetUp()) return false;
5245 
5246  switch (space) {
5247  case NEW_SPACE:
5248  return new_space_.ToSpaceContains(addr);
5249  case OLD_POINTER_SPACE:
5250  return old_pointer_space_->Contains(addr);
5251  case OLD_DATA_SPACE:
5252  return old_data_space_->Contains(addr);
5253  case CODE_SPACE:
5254  return code_space_->Contains(addr);
5255  case MAP_SPACE:
5256  return map_space_->Contains(addr);
5257  case CELL_SPACE:
5258  return cell_space_->Contains(addr);
5259  case LO_SPACE:
5260  return lo_space_->SlowContains(addr);
5261  }
5262 
5263  return false;
5264 }
5265 
5266 
5267 #ifdef DEBUG
5268 void Heap::Verify() {
5269  ASSERT(HasBeenSetUp());
5270 
5271  store_buffer()->Verify();
5272 
5273  VerifyPointersVisitor visitor;
5274  IterateRoots(&visitor, VISIT_ONLY_STRONG);
5275 
5276  new_space_.Verify();
5277 
5278  old_pointer_space_->Verify(&visitor);
5279  map_space_->Verify(&visitor);
5280 
5281  VerifyPointersVisitor no_dirty_regions_visitor;
5282  old_data_space_->Verify(&no_dirty_regions_visitor);
5283  code_space_->Verify(&no_dirty_regions_visitor);
5284  cell_space_->Verify(&no_dirty_regions_visitor);
5285 
5286  lo_space_->Verify();
5287 
5288  VerifyNoAccessorPairSharing();
5289 }
5290 
5291 
5292 void Heap::VerifyNoAccessorPairSharing() {
5293  // Verification is done in 2 phases: First we mark all AccessorPairs, checking
5294  // that we mark only unmarked pairs, then we clear all marks, restoring the
5295  // initial state. We use the Smi tag of the AccessorPair's getter as the
5296  // marking bit, because we can never see a Smi as the getter.
5297  for (int phase = 0; phase < 2; phase++) {
5298  HeapObjectIterator iter(map_space());
5299  for (HeapObject* obj = iter.Next(); obj != NULL; obj = iter.Next()) {
5300  if (obj->IsMap()) {
5301  DescriptorArray* descs = Map::cast(obj)->instance_descriptors();
5302  for (int i = 0; i < descs->number_of_descriptors(); i++) {
5303  if (descs->GetType(i) == CALLBACKS &&
5304  descs->GetValue(i)->IsAccessorPair()) {
5305  AccessorPair* accessors = AccessorPair::cast(descs->GetValue(i));
5306  uintptr_t before = reinterpret_cast<intptr_t>(accessors->getter());
5307  uintptr_t after = (phase == 0) ?
5308  ((before & ~kSmiTagMask) | kSmiTag) :
5309  ((before & ~kHeapObjectTag) | kHeapObjectTag);
5310  CHECK(before != after);
5311  accessors->set_getter(reinterpret_cast<Object*>(after));
5312  }
5313  }
5314  }
5315  }
5316  }
5317 }
5318 #endif // DEBUG
5319 
5320 
5322  Object* symbol = NULL;
5323  Object* new_table;
5324  { MaybeObject* maybe_new_table =
5325  symbol_table()->LookupSymbol(string, &symbol);
5326  if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table;
5327  }
5328  // Can't use set_symbol_table because SymbolTable::cast knows that
5329  // SymbolTable is a singleton and checks for identity.
5330  roots_[kSymbolTableRootIndex] = new_table;
5331  ASSERT(symbol != NULL);
5332  return symbol;
5333 }
5334 
5335 
5337  Object* symbol = NULL;
5338  Object* new_table;
5339  { MaybeObject* maybe_new_table =
5340  symbol_table()->LookupAsciiSymbol(string, &symbol);
5341  if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table;
5342  }
5343  // Can't use set_symbol_table because SymbolTable::cast knows that
5344  // SymbolTable is a singleton and checks for identity.
5345  roots_[kSymbolTableRootIndex] = new_table;
5346  ASSERT(symbol != NULL);
5347  return symbol;
5348 }
5349 
5350 
5352  int from,
5353  int length) {
5354  Object* symbol = NULL;
5355  Object* new_table;
5356  { MaybeObject* maybe_new_table =
5357  symbol_table()->LookupSubStringAsciiSymbol(string,
5358  from,
5359  length,
5360  &symbol);
5361  if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table;
5362  }
5363  // Can't use set_symbol_table because SymbolTable::cast knows that
5364  // SymbolTable is a singleton and checks for identity.
5365  roots_[kSymbolTableRootIndex] = new_table;
5366  ASSERT(symbol != NULL);
5367  return symbol;
5368 }
5369 
5370 
5372  Object* symbol = NULL;
5373  Object* new_table;
5374  { MaybeObject* maybe_new_table =
5375  symbol_table()->LookupTwoByteSymbol(string, &symbol);
5376  if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table;
5377  }
5378  // Can't use set_symbol_table because SymbolTable::cast knows that
5379  // SymbolTable is a singleton and checks for identity.
5380  roots_[kSymbolTableRootIndex] = new_table;
5381  ASSERT(symbol != NULL);
5382  return symbol;
5383 }
5384 
5385 
5386 MaybeObject* Heap::LookupSymbol(String* string) {
5387  if (string->IsSymbol()) return string;
5388  Object* symbol = NULL;
5389  Object* new_table;
5390  { MaybeObject* maybe_new_table =
5391  symbol_table()->LookupString(string, &symbol);
5392  if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table;
5393  }
5394  // Can't use set_symbol_table because SymbolTable::cast knows that
5395  // SymbolTable is a singleton and checks for identity.
5396  roots_[kSymbolTableRootIndex] = new_table;
5397  ASSERT(symbol != NULL);
5398  return symbol;
5399 }
5400 
5401 
5402 bool Heap::LookupSymbolIfExists(String* string, String** symbol) {
5403  if (string->IsSymbol()) {
5404  *symbol = string;
5405  return true;
5406  }
5407  return symbol_table()->LookupSymbolIfExists(string, symbol);
5408 }
5409 
5410 
5411 #ifdef DEBUG
5412 void Heap::ZapFromSpace() {
5413  NewSpacePageIterator it(new_space_.FromSpaceStart(),
5414  new_space_.FromSpaceEnd());
5415  while (it.has_next()) {
5416  NewSpacePage* page = it.next();
5417  for (Address cursor = page->area_start(), limit = page->area_end();
5418  cursor < limit;
5419  cursor += kPointerSize) {
5421  }
5422  }
5423 }
5424 #endif // DEBUG
5425 
5426 
5428  Address end,
5429  ObjectSlotCallback callback) {
5430  Address slot_address = start;
5431 
5432  // We are not collecting slots on new space objects during mutation
5433  // thus we have to scan for pointers to evacuation candidates when we
5434  // promote objects. But we should not record any slots in non-black
5435  // objects. Grey object's slots would be rescanned.
5436  // White object might not survive until the end of collection
5437  // it would be a violation of the invariant to record it's slots.
5438  bool record_slots = false;
5439  if (incremental_marking()->IsCompacting()) {
5441  record_slots = Marking::IsBlack(mark_bit);
5442  }
5443 
5444  while (slot_address < end) {
5445  Object** slot = reinterpret_cast<Object**>(slot_address);
5446  Object* object = *slot;
5447  // If the store buffer becomes overfull we mark pages as being exempt from
5448  // the store buffer. These pages are scanned to find pointers that point
5449  // to the new space. In that case we may hit newly promoted objects and
5450  // fix the pointers before the promotion queue gets to them. Thus the 'if'.
5451  if (object->IsHeapObject()) {
5452  if (Heap::InFromSpace(object)) {
5453  callback(reinterpret_cast<HeapObject**>(slot),
5454  HeapObject::cast(object));
5455  Object* new_object = *slot;
5456  if (InNewSpace(new_object)) {
5457  SLOW_ASSERT(Heap::InToSpace(new_object));
5458  SLOW_ASSERT(new_object->IsHeapObject());
5459  store_buffer_.EnterDirectlyIntoStoreBuffer(
5460  reinterpret_cast<Address>(slot));
5461  }
5462  SLOW_ASSERT(!MarkCompactCollector::IsOnEvacuationCandidate(new_object));
5463  } else if (record_slots &&
5464  MarkCompactCollector::IsOnEvacuationCandidate(object)) {
5465  mark_compact_collector()->RecordSlot(slot, slot, object);
5466  }
5467  }
5468  slot_address += kPointerSize;
5469  }
5470 }
5471 
5472 
5473 #ifdef DEBUG
5474 typedef bool (*CheckStoreBufferFilter)(Object** addr);
5475 
5476 
5477 bool IsAMapPointerAddress(Object** addr) {
5478  uintptr_t a = reinterpret_cast<uintptr_t>(addr);
5479  int mod = a % Map::kSize;
5480  return mod >= Map::kPointerFieldsBeginOffset &&
5482 }
5483 
5484 
5485 bool EverythingsAPointer(Object** addr) {
5486  return true;
5487 }
5488 
5489 
5490 static void CheckStoreBuffer(Heap* heap,
5491  Object** current,
5492  Object** limit,
5493  Object**** store_buffer_position,
5494  Object*** store_buffer_top,
5495  CheckStoreBufferFilter filter,
5496  Address special_garbage_start,
5497  Address special_garbage_end) {
5498  Map* free_space_map = heap->free_space_map();
5499  for ( ; current < limit; current++) {
5500  Object* o = *current;
5501  Address current_address = reinterpret_cast<Address>(current);
5502  // Skip free space.
5503  if (o == free_space_map) {
5504  Address current_address = reinterpret_cast<Address>(current);
5505  FreeSpace* free_space =
5506  FreeSpace::cast(HeapObject::FromAddress(current_address));
5507  int skip = free_space->Size();
5508  ASSERT(current_address + skip <= reinterpret_cast<Address>(limit));
5509  ASSERT(skip > 0);
5510  current_address += skip - kPointerSize;
5511  current = reinterpret_cast<Object**>(current_address);
5512  continue;
5513  }
5514  // Skip the current linear allocation space between top and limit which is
5515  // unmarked with the free space map, but can contain junk.
5516  if (current_address == special_garbage_start &&
5517  special_garbage_end != special_garbage_start) {
5518  current_address = special_garbage_end - kPointerSize;
5519  current = reinterpret_cast<Object**>(current_address);
5520  continue;
5521  }
5522  if (!(*filter)(current)) continue;
5523  ASSERT(current_address < special_garbage_start ||
5524  current_address >= special_garbage_end);
5525  ASSERT(reinterpret_cast<uintptr_t>(o) != kFreeListZapValue);
5526  // We have to check that the pointer does not point into new space
5527  // without trying to cast it to a heap object since the hash field of
5528  // a string can contain values like 1 and 3 which are tagged null
5529  // pointers.
5530  if (!heap->InNewSpace(o)) continue;
5531  while (**store_buffer_position < current &&
5532  *store_buffer_position < store_buffer_top) {
5533  (*store_buffer_position)++;
5534  }
5535  if (**store_buffer_position != current ||
5536  *store_buffer_position == store_buffer_top) {
5537  Object** obj_start = current;
5538  while (!(*obj_start)->IsMap()) obj_start--;
5539  UNREACHABLE();
5540  }
5541  }
5542 }
5543 
5544 
5545 // Check that the store buffer contains all intergenerational pointers by
5546 // scanning a page and ensuring that all pointers to young space are in the
5547 // store buffer.
5548 void Heap::OldPointerSpaceCheckStoreBuffer() {
5549  OldSpace* space = old_pointer_space();
5550  PageIterator pages(space);
5551 
5552  store_buffer()->SortUniq();
5553 
5554  while (pages.has_next()) {
5555  Page* page = pages.next();
5556  Object** current = reinterpret_cast<Object**>(page->area_start());
5557 
5558  Address end = page->area_end();
5559 
5560  Object*** store_buffer_position = store_buffer()->Start();
5561  Object*** store_buffer_top = store_buffer()->Top();
5562 
5563  Object** limit = reinterpret_cast<Object**>(end);
5564  CheckStoreBuffer(this,
5565  current,
5566  limit,
5567  &store_buffer_position,
5568  store_buffer_top,
5569  &EverythingsAPointer,
5570  space->top(),
5571  space->limit());
5572  }
5573 }
5574 
5575 
5576 void Heap::MapSpaceCheckStoreBuffer() {
5577  MapSpace* space = map_space();
5578  PageIterator pages(space);
5579 
5580  store_buffer()->SortUniq();
5581 
5582  while (pages.has_next()) {
5583  Page* page = pages.next();
5584  Object** current = reinterpret_cast<Object**>(page->area_start());
5585 
5586  Address end = page->area_end();
5587 
5588  Object*** store_buffer_position = store_buffer()->Start();
5589  Object*** store_buffer_top = store_buffer()->Top();
5590 
5591  Object** limit = reinterpret_cast<Object**>(end);
5592  CheckStoreBuffer(this,
5593  current,
5594  limit,
5595  &store_buffer_position,
5596  store_buffer_top,
5597  &IsAMapPointerAddress,
5598  space->top(),
5599  space->limit());
5600  }
5601 }
5602 
5603 
5604 void Heap::LargeObjectSpaceCheckStoreBuffer() {
5605  LargeObjectIterator it(lo_space());
5606  for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
5607  // We only have code, sequential strings, or fixed arrays in large
5608  // object space, and only fixed arrays can possibly contain pointers to
5609  // the young generation.
5610  if (object->IsFixedArray()) {
5611  Object*** store_buffer_position = store_buffer()->Start();
5612  Object*** store_buffer_top = store_buffer()->Top();
5613  Object** current = reinterpret_cast<Object**>(object->address());
5614  Object** limit =
5615  reinterpret_cast<Object**>(object->address() + object->Size());
5616  CheckStoreBuffer(this,
5617  current,
5618  limit,
5619  &store_buffer_position,
5620  store_buffer_top,
5621  &EverythingsAPointer,
5622  NULL,
5623  NULL);
5624  }
5625  }
5626 }
5627 #endif
5628 
5629 
5630 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
5631  IterateStrongRoots(v, mode);
5632  IterateWeakRoots(v, mode);
5633 }
5634 
5635 
5636 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) {
5637  v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex]));
5638  v->Synchronize(VisitorSynchronization::kSymbolTable);
5639  if (mode != VISIT_ALL_IN_SCAVENGE &&
5640  mode != VISIT_ALL_IN_SWEEP_NEWSPACE) {
5641  // Scavenge collections have special processing for this.
5642  external_string_table_.Iterate(v);
5643  }
5644  v->Synchronize(VisitorSynchronization::kExternalStringsTable);
5645 }
5646 
5647 
5648 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
5649  v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]);
5650  v->Synchronize(VisitorSynchronization::kStrongRootList);
5651 
5652  v->VisitPointer(BitCast<Object**>(&hidden_symbol_));
5653  v->Synchronize(VisitorSynchronization::kSymbol);
5654 
5655  isolate_->bootstrapper()->Iterate(v);
5656  v->Synchronize(VisitorSynchronization::kBootstrapper);
5657  isolate_->Iterate(v);
5658  v->Synchronize(VisitorSynchronization::kTop);
5659  Relocatable::Iterate(v);
5660  v->Synchronize(VisitorSynchronization::kRelocatable);
5661 
5662 #ifdef ENABLE_DEBUGGER_SUPPORT
5663  isolate_->debug()->Iterate(v);
5664  if (isolate_->deoptimizer_data() != NULL) {
5665  isolate_->deoptimizer_data()->Iterate(v);
5666  }
5667 #endif
5668  v->Synchronize(VisitorSynchronization::kDebug);
5669  isolate_->compilation_cache()->Iterate(v);
5670  v->Synchronize(VisitorSynchronization::kCompilationCache);
5671 
5672  // Iterate over local handles in handle scopes.
5673  isolate_->handle_scope_implementer()->Iterate(v);
5674  v->Synchronize(VisitorSynchronization::kHandleScope);
5675 
5676  // Iterate over the builtin code objects and code stubs in the
5677  // heap. Note that it is not necessary to iterate over code objects
5678  // on scavenge collections.
5679  if (mode != VISIT_ALL_IN_SCAVENGE) {
5680  isolate_->builtins()->IterateBuiltins(v);
5681  }
5682  v->Synchronize(VisitorSynchronization::kBuiltins);
5683 
5684  // Iterate over global handles.
5685  switch (mode) {
5686  case VISIT_ONLY_STRONG:
5687  isolate_->global_handles()->IterateStrongRoots(v);
5688  break;
5689  case VISIT_ALL_IN_SCAVENGE:
5691  break;
5693  case VISIT_ALL:
5694  isolate_->global_handles()->IterateAllRoots(v);
5695  break;
5696  }
5697  v->Synchronize(VisitorSynchronization::kGlobalHandles);
5698 
5699  // Iterate over pointers being held by inactive threads.
5700  isolate_->thread_manager()->Iterate(v);
5701  v->Synchronize(VisitorSynchronization::kThreadManager);
5702 
5703  // Iterate over the pointers the Serialization/Deserialization code is
5704  // holding.
5705  // During garbage collection this keeps the partial snapshot cache alive.
5706  // During deserialization of the startup snapshot this creates the partial
5707  // snapshot cache and deserializes the objects it refers to. During
5708  // serialization this does nothing, since the partial snapshot cache is
5709  // empty. However the next thing we do is create the partial snapshot,
5710  // filling up the partial snapshot cache with objects it needs as we go.
5712  // We don't do a v->Synchronize call here, because in debug mode that will
5713  // output a flag to the snapshot. However at this point the serializer and
5714  // deserializer are deliberately a little unsynchronized (see above) so the
5715  // checking of the sync flag in the snapshot would fail.
5716 }
5717 
5718 
5719 // TODO(1236194): Since the heap size is configurable on the command line
5720 // and through the API, we should gracefully handle the case that the heap
5721 // size is not big enough to fit all the initial objects.
5722 bool Heap::ConfigureHeap(int max_semispace_size,
5723  intptr_t max_old_gen_size,
5724  intptr_t max_executable_size) {
5725  if (HasBeenSetUp()) return false;
5726 
5727  if (FLAG_stress_compaction) {
5728  // This will cause more frequent GCs when stressing.
5729  max_semispace_size_ = Page::kPageSize;
5730  }
5731 
5732  if (max_semispace_size > 0) {
5733  if (max_semispace_size < Page::kPageSize) {
5734  max_semispace_size = Page::kPageSize;
5735  if (FLAG_trace_gc) {
5736  PrintF("Max semispace size cannot be less than %dkbytes\n",
5737  Page::kPageSize >> 10);
5738  }
5739  }
5740  max_semispace_size_ = max_semispace_size;
5741  }
5742 
5743  if (Snapshot::IsEnabled()) {
5744  // If we are using a snapshot we always reserve the default amount
5745  // of memory for each semispace because code in the snapshot has
5746  // write-barrier code that relies on the size and alignment of new
5747  // space. We therefore cannot use a larger max semispace size
5748  // than the default reserved semispace size.
5749  if (max_semispace_size_ > reserved_semispace_size_) {
5750  max_semispace_size_ = reserved_semispace_size_;
5751  if (FLAG_trace_gc) {
5752  PrintF("Max semispace size cannot be more than %dkbytes\n",
5753  reserved_semispace_size_ >> 10);
5754  }
5755  }
5756  } else {
5757  // If we are not using snapshots we reserve space for the actual
5758  // max semispace size.
5759  reserved_semispace_size_ = max_semispace_size_;
5760  }
5761 
5762  if (max_old_gen_size > 0) max_old_generation_size_ = max_old_gen_size;
5763  if (max_executable_size > 0) {
5764  max_executable_size_ = RoundUp(max_executable_size, Page::kPageSize);
5765  }
5766 
5767  // The max executable size must be less than or equal to the max old
5768  // generation size.
5769  if (max_executable_size_ > max_old_generation_size_) {
5770  max_executable_size_ = max_old_generation_size_;
5771  }
5772 
5773  // The new space size must be a power of two to support single-bit testing
5774  // for containment.
5775  max_semispace_size_ = RoundUpToPowerOf2(max_semispace_size_);
5776  reserved_semispace_size_ = RoundUpToPowerOf2(reserved_semispace_size_);
5777  initial_semispace_size_ = Min(initial_semispace_size_, max_semispace_size_);
5778  external_allocation_limit_ = 10 * max_semispace_size_;
5779 
5780  // The old generation is paged and needs at least one page for each space.
5781  int paged_space_count = LAST_PAGED_SPACE - FIRST_PAGED_SPACE + 1;
5782  max_old_generation_size_ = Max(static_cast<intptr_t>(paged_space_count *
5783  Page::kPageSize),
5784  RoundUp(max_old_generation_size_,
5785  Page::kPageSize));
5786 
5787  configured_ = true;
5788  return true;
5789 }
5790 
5791 
5793  return ConfigureHeap(static_cast<intptr_t>(FLAG_max_new_space_size / 2) * KB,
5794  static_cast<intptr_t>(FLAG_max_old_space_size) * MB,
5795  static_cast<intptr_t>(FLAG_max_executable_size) * MB);
5796 }
5797 
5798 
5799 void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
5802  *stats->new_space_size = new_space_.SizeAsInt();
5803  *stats->new_space_capacity = static_cast<int>(new_space_.Capacity());
5804  *stats->old_pointer_space_size = old_pointer_space_->SizeOfObjects();
5805  *stats->old_pointer_space_capacity = old_pointer_space_->Capacity();
5806  *stats->old_data_space_size = old_data_space_->SizeOfObjects();
5807  *stats->old_data_space_capacity = old_data_space_->Capacity();
5808  *stats->code_space_size = code_space_->SizeOfObjects();
5809  *stats->code_space_capacity = code_space_->Capacity();
5810  *stats->map_space_size = map_space_->SizeOfObjects();
5811  *stats->map_space_capacity = map_space_->Capacity();
5812  *stats->cell_space_size = cell_space_->SizeOfObjects();
5813  *stats->cell_space_capacity = cell_space_->Capacity();
5814  *stats->lo_space_size = lo_space_->Size();
5815  isolate_->global_handles()->RecordStats(stats);
5817  *stats->memory_allocator_capacity =
5818  isolate()->memory_allocator()->Size() +
5820  *stats->os_error = OS::GetLastError();
5822  if (take_snapshot) {
5823  HeapIterator iterator;
5824  for (HeapObject* obj = iterator.next();
5825  obj != NULL;
5826  obj = iterator.next()) {
5827  InstanceType type = obj->map()->instance_type();
5828  ASSERT(0 <= type && type <= LAST_TYPE);
5829  stats->objects_per_type[type]++;
5830  stats->size_per_type[type] += obj->Size();
5831  }
5832  }
5833 }
5834 
5835 
5837  return old_pointer_space_->SizeOfObjects()
5838  + old_data_space_->SizeOfObjects()
5839  + code_space_->SizeOfObjects()
5840  + map_space_->SizeOfObjects()
5841  + cell_space_->SizeOfObjects()
5842  + lo_space_->SizeOfObjects();
5843 }
5844 
5845 
5846 intptr_t Heap::PromotedExternalMemorySize() {
5847  if (amount_of_external_allocated_memory_
5848  <= amount_of_external_allocated_memory_at_last_global_gc_) return 0;
5849  return amount_of_external_allocated_memory_
5850  - amount_of_external_allocated_memory_at_last_global_gc_;
5851 }
5852 
5853 #ifdef DEBUG
5854 
5855 // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
5856 static const int kMarkTag = 2;
5857 
5858 
5859 class HeapDebugUtils {
5860  public:
5861  explicit HeapDebugUtils(Heap* heap)
5862  : search_for_any_global_(false),
5863  search_target_(NULL),
5864  found_target_(false),
5865  object_stack_(20),
5866  heap_(heap) {
5867  }
5868 
5869  class MarkObjectVisitor : public ObjectVisitor {
5870  public:
5871  explicit MarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
5872 
5873  void VisitPointers(Object** start, Object** end) {
5874  // Copy all HeapObject pointers in [start, end)
5875  for (Object** p = start; p < end; p++) {
5876  if ((*p)->IsHeapObject())
5877  utils_->MarkObjectRecursively(p);
5878  }
5879  }
5880 
5881  HeapDebugUtils* utils_;
5882  };
5883 
5884  void MarkObjectRecursively(Object** p) {
5885  if (!(*p)->IsHeapObject()) return;
5886 
5887  HeapObject* obj = HeapObject::cast(*p);
5888 
5889  Object* map = obj->map();
5890 
5891  if (!map->IsHeapObject()) return; // visited before
5892 
5893  if (found_target_) return; // stop if target found
5894  object_stack_.Add(obj);
5895  if ((search_for_any_global_ && obj->IsJSGlobalObject()) ||
5896  (!search_for_any_global_ && (obj == search_target_))) {
5897  found_target_ = true;
5898  return;
5899  }
5900 
5901  // not visited yet
5902  Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
5903 
5904  Address map_addr = map_p->address();
5905 
5906  obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag));
5907 
5908  MarkObjectRecursively(&map);
5909 
5910  MarkObjectVisitor mark_visitor(this);
5911 
5912  obj->IterateBody(map_p->instance_type(), obj->SizeFromMap(map_p),
5913  &mark_visitor);
5914 
5915  if (!found_target_) // don't pop if found the target
5916  object_stack_.RemoveLast();
5917  }
5918 
5919 
5920  class UnmarkObjectVisitor : public ObjectVisitor {
5921  public:
5922  explicit UnmarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
5923 
5924  void VisitPointers(Object** start, Object** end) {
5925  // Copy all HeapObject pointers in [start, end)
5926  for (Object** p = start; p < end; p++) {
5927  if ((*p)->IsHeapObject())
5928  utils_->UnmarkObjectRecursively(p);
5929  }
5930  }
5931 
5932  HeapDebugUtils* utils_;
5933  };
5934 
5935 
5936  void UnmarkObjectRecursively(Object** p) {
5937  if (!(*p)->IsHeapObject()) return;
5938 
5939  HeapObject* obj = HeapObject::cast(*p);
5940 
5941  Object* map = obj->map();
5942 
5943  if (map->IsHeapObject()) return; // unmarked already
5944 
5945  Address map_addr = reinterpret_cast<Address>(map);
5946 
5947  map_addr -= kMarkTag;
5948 
5949  ASSERT_TAG_ALIGNED(map_addr);
5950 
5951  HeapObject* map_p = HeapObject::FromAddress(map_addr);
5952 
5953  obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_p));
5954 
5955  UnmarkObjectRecursively(reinterpret_cast<Object**>(&map_p));
5956 
5957  UnmarkObjectVisitor unmark_visitor(this);
5958 
5959  obj->IterateBody(Map::cast(map_p)->instance_type(),
5960  obj->SizeFromMap(Map::cast(map_p)),
5961  &unmark_visitor);
5962  }
5963 
5964 
5965  void MarkRootObjectRecursively(Object** root) {
5966  if (search_for_any_global_) {
5967  ASSERT(search_target_ == NULL);
5968  } else {
5969  ASSERT(search_target_->IsHeapObject());
5970  }
5971  found_target_ = false;
5972  object_stack_.Clear();
5973 
5974  MarkObjectRecursively(root);
5975  UnmarkObjectRecursively(root);
5976 
5977  if (found_target_) {
5978  PrintF("=====================================\n");
5979  PrintF("==== Path to object ====\n");
5980  PrintF("=====================================\n\n");
5981 
5982  ASSERT(!object_stack_.is_empty());
5983  for (int i = 0; i < object_stack_.length(); i++) {
5984  if (i > 0) PrintF("\n |\n |\n V\n\n");
5985  Object* obj = object_stack_[i];
5986  obj->Print();
5987  }
5988  PrintF("=====================================\n");
5989  }
5990  }
5991 
5992  // Helper class for visiting HeapObjects recursively.
5993  class MarkRootVisitor: public ObjectVisitor {
5994  public:
5995  explicit MarkRootVisitor(HeapDebugUtils* utils) : utils_(utils) { }
5996 
5997  void VisitPointers(Object** start, Object** end) {
5998  // Visit all HeapObject pointers in [start, end)
5999  for (Object** p = start; p < end; p++) {
6000  if ((*p)->IsHeapObject())
6001  utils_->MarkRootObjectRecursively(p);
6002  }
6003  }
6004 
6005  HeapDebugUtils* utils_;
6006  };
6007 
6008  bool search_for_any_global_;
6009  Object* search_target_;
6010  bool found_target_;
6011  List<Object*> object_stack_;
6012  Heap* heap_;
6013 
6014  friend class Heap;
6015 };
6016 
6017 #endif
6018 
6019 
6020 V8_DECLARE_ONCE(initialize_gc_once);
6021 
6022 static void InitializeGCOnce() {
6023  InitializeScavengingVisitorsTables();
6026 }
6027 
6028 bool Heap::SetUp(bool create_heap_objects) {
6029 #ifdef DEBUG
6030  allocation_timeout_ = FLAG_gc_interval;
6031  debug_utils_ = new HeapDebugUtils(this);
6032 #endif
6033 
6034  // Initialize heap spaces and initial maps and objects. Whenever something
6035  // goes wrong, just return false. The caller should check the results and
6036  // call Heap::TearDown() to release allocated memory.
6037  //
6038  // If the heap is not yet configured (e.g. through the API), configure it.
6039  // Configuration is based on the flags new-space-size (really the semispace
6040  // size) and old-space-size if set or the initial values of semispace_size_
6041  // and old_generation_size_ otherwise.
6042  if (!configured_) {
6043  if (!ConfigureHeapDefault()) return false;
6044  }
6045 
6046  CallOnce(&initialize_gc_once, &InitializeGCOnce);
6047 
6048  MarkMapPointersAsEncoded(false);
6049 
6050  // Set up memory allocator.
6051  if (!isolate_->memory_allocator()->SetUp(MaxReserved(), MaxExecutableSize()))
6052  return false;
6053 
6054  // Set up new space.
6055  if (!new_space_.SetUp(reserved_semispace_size_, max_semispace_size_)) {
6056  return false;
6057  }
6058 
6059  // Initialize old pointer space.
6060  old_pointer_space_ =
6061  new OldSpace(this,
6062  max_old_generation_size_,
6064  NOT_EXECUTABLE);
6065  if (old_pointer_space_ == NULL) return false;
6066  if (!old_pointer_space_->SetUp()) return false;
6067 
6068  // Initialize old data space.
6069  old_data_space_ =
6070  new OldSpace(this,
6071  max_old_generation_size_,
6073  NOT_EXECUTABLE);
6074  if (old_data_space_ == NULL) return false;
6075  if (!old_data_space_->SetUp()) return false;
6076 
6077  // Initialize the code space, set its maximum capacity to the old
6078  // generation size. It needs executable memory.
6079  // On 64-bit platform(s), we put all code objects in a 2 GB range of
6080  // virtual address space, so that they can call each other with near calls.
6081  if (code_range_size_ > 0) {
6082  if (!isolate_->code_range()->SetUp(code_range_size_)) {
6083  return false;
6084  }
6085  }
6086 
6087  code_space_ =
6088  new OldSpace(this, max_old_generation_size_, CODE_SPACE, EXECUTABLE);
6089  if (code_space_ == NULL) return false;
6090  if (!code_space_->SetUp()) return false;
6091 
6092  // Initialize map space.
6093  map_space_ = new MapSpace(this, max_old_generation_size_, MAP_SPACE);
6094  if (map_space_ == NULL) return false;
6095  if (!map_space_->SetUp()) return false;
6096 
6097  // Initialize global property cell space.
6098  cell_space_ = new CellSpace(this, max_old_generation_size_, CELL_SPACE);
6099  if (cell_space_ == NULL) return false;
6100  if (!cell_space_->SetUp()) return false;
6101 
6102  // The large object code space may contain code or data. We set the memory
6103  // to be non-executable here for safety, but this means we need to enable it
6104  // explicitly when allocating large code objects.
6105  lo_space_ = new LargeObjectSpace(this, max_old_generation_size_, LO_SPACE);
6106  if (lo_space_ == NULL) return false;
6107  if (!lo_space_->SetUp()) return false;
6108 
6109  // Set up the seed that is used to randomize the string hash function.
6110  ASSERT(hash_seed() == 0);
6111  if (FLAG_randomize_hashes) {
6112  if (FLAG_hash_seed == 0) {
6113  set_hash_seed(
6114  Smi::FromInt(V8::RandomPrivate(isolate()) & 0x3fffffff));
6115  } else {
6116  set_hash_seed(Smi::FromInt(FLAG_hash_seed));
6117  }
6118  }
6119 
6120  if (create_heap_objects) {
6121  // Create initial maps.
6122  if (!CreateInitialMaps()) return false;
6123  if (!CreateApiObjects()) return false;
6124 
6125  // Create initial objects
6126  if (!CreateInitialObjects()) return false;
6127 
6128  global_contexts_list_ = undefined_value();
6129  }
6130 
6131  LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
6132  LOG(isolate_, IntPtrTEvent("heap-available", Available()));
6133 
6134  store_buffer()->SetUp();
6135 
6136  return true;
6137 }
6138 
6139 
6141  ASSERT(isolate_ != NULL);
6142  ASSERT(isolate_ == isolate());
6143  // On 64 bit machines, pointers are generally out of range of Smis. We write
6144  // something that looks like an out of range Smi to the GC.
6145 
6146  // Set up the special root array entries containing the stack limits.
6147  // These are actually addresses, but the tag makes the GC ignore it.
6148  roots_[kStackLimitRootIndex] =
6149  reinterpret_cast<Object*>(
6150  (isolate_->stack_guard()->jslimit() & ~kSmiTagMask) | kSmiTag);
6151  roots_[kRealStackLimitRootIndex] =
6152  reinterpret_cast<Object*>(
6153  (isolate_->stack_guard()->real_jslimit() & ~kSmiTagMask) | kSmiTag);
6154 }
6155 
6156 
6158 #ifdef DEBUG
6159  if (FLAG_verify_heap) {
6160  Verify();
6161  }
6162 #endif
6163  if (FLAG_print_cumulative_gc_stat) {
6164  PrintF("\n\n");
6165  PrintF("gc_count=%d ", gc_count_);
6166  PrintF("mark_sweep_count=%d ", ms_count_);
6167  PrintF("max_gc_pause=%d ", get_max_gc_pause());
6168  PrintF("min_in_mutator=%d ", get_min_in_mutator());
6169  PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ",
6171  PrintF("\n\n");
6172  }
6173 
6174  isolate_->global_handles()->TearDown();
6175 
6176  external_string_table_.TearDown();
6177 
6178  new_space_.TearDown();
6179 
6180  if (old_pointer_space_ != NULL) {
6181  old_pointer_space_->TearDown();
6182  delete old_pointer_space_;
6183  old_pointer_space_ = NULL;
6184  }
6185 
6186  if (old_data_space_ != NULL) {
6187  old_data_space_->TearDown();
6188  delete old_data_space_;
6189  old_data_space_ = NULL;
6190  }
6191 
6192  if (code_space_ != NULL) {
6193  code_space_->TearDown();
6194  delete code_space_;
6195  code_space_ = NULL;
6196  }
6197 
6198  if (map_space_ != NULL) {
6199  map_space_->TearDown();
6200  delete map_space_;
6201  map_space_ = NULL;
6202  }
6203 
6204  if (cell_space_ != NULL) {
6205  cell_space_->TearDown();
6206  delete cell_space_;
6207  cell_space_ = NULL;
6208  }
6209 
6210  if (lo_space_ != NULL) {
6211  lo_space_->TearDown();
6212  delete lo_space_;
6213  lo_space_ = NULL;
6214  }
6215 
6216  store_buffer()->TearDown();
6218 
6219  isolate_->memory_allocator()->TearDown();
6220 
6221 #ifdef DEBUG
6222  delete debug_utils_;
6223  debug_utils_ = NULL;
6224 #endif
6225 }
6226 
6227 
6229  // Try to shrink all paged spaces.
6230  PagedSpaces spaces;
6231  for (PagedSpace* space = spaces.next();
6232  space != NULL;
6233  space = spaces.next()) {
6234  space->ReleaseAllUnusedPages();
6235  }
6236 }
6237 
6238 
6240  ASSERT(callback != NULL);
6241  GCPrologueCallbackPair pair(callback, gc_type);
6242  ASSERT(!gc_prologue_callbacks_.Contains(pair));
6243  return gc_prologue_callbacks_.Add(pair);
6244 }
6245 
6246 
6248  ASSERT(callback != NULL);
6249  for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
6250  if (gc_prologue_callbacks_[i].callback == callback) {
6251  gc_prologue_callbacks_.Remove(i);
6252  return;
6253  }
6254  }
6255  UNREACHABLE();
6256 }
6257 
6258 
6260  ASSERT(callback != NULL);
6261  GCEpilogueCallbackPair pair(callback, gc_type);
6262  ASSERT(!gc_epilogue_callbacks_.Contains(pair));
6263  return gc_epilogue_callbacks_.Add(pair);
6264 }
6265 
6266 
6268  ASSERT(callback != NULL);
6269  for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
6270  if (gc_epilogue_callbacks_[i].callback == callback) {
6271  gc_epilogue_callbacks_.Remove(i);
6272  return;
6273  }
6274  }
6275  UNREACHABLE();
6276 }
6277 
6278 
6279 #ifdef DEBUG
6280 
6281 class PrintHandleVisitor: public ObjectVisitor {
6282  public:
6283  void VisitPointers(Object** start, Object** end) {
6284  for (Object** p = start; p < end; p++)
6285  PrintF(" handle %p to %p\n",
6286  reinterpret_cast<void*>(p),
6287  reinterpret_cast<void*>(*p));
6288  }
6289 };
6290 
6291 void Heap::PrintHandles() {
6292  PrintF("Handles:\n");
6293  PrintHandleVisitor v;
6294  isolate_->handle_scope_implementer()->Iterate(&v);
6295 }
6296 
6297 #endif
6298 
6299 
6300 Space* AllSpaces::next() {
6301  switch (counter_++) {
6302  case NEW_SPACE:
6303  return HEAP->new_space();
6304  case OLD_POINTER_SPACE:
6305  return HEAP->old_pointer_space();
6306  case OLD_DATA_SPACE:
6307  return HEAP->old_data_space();
6308  case CODE_SPACE:
6309  return HEAP->code_space();
6310  case MAP_SPACE:
6311  return HEAP->map_space();
6312  case CELL_SPACE:
6313  return HEAP->cell_space();
6314  case LO_SPACE:
6315  return HEAP->lo_space();
6316  default:
6317  return NULL;
6318  }
6319 }
6320 
6321 
6322 PagedSpace* PagedSpaces::next() {
6323  switch (counter_++) {
6324  case OLD_POINTER_SPACE:
6325  return HEAP->old_pointer_space();
6326  case OLD_DATA_SPACE:
6327  return HEAP->old_data_space();
6328  case CODE_SPACE:
6329  return HEAP->code_space();
6330  case MAP_SPACE:
6331  return HEAP->map_space();
6332  case CELL_SPACE:
6333  return HEAP->cell_space();
6334  default:
6335  return NULL;
6336  }
6337 }
6338 
6339 
6340 
6341 OldSpace* OldSpaces::next() {
6342  switch (counter_++) {
6343  case OLD_POINTER_SPACE:
6344  return HEAP->old_pointer_space();
6345  case OLD_DATA_SPACE:
6346  return HEAP->old_data_space();
6347  case CODE_SPACE:
6348  return HEAP->code_space();
6349  default:
6350  return NULL;
6351  }
6352 }
6353 
6354 
6356  : current_space_(FIRST_SPACE),
6357  iterator_(NULL),
6358  size_func_(NULL) {
6359 }
6360 
6361 
6363  : current_space_(FIRST_SPACE),
6364  iterator_(NULL),
6365  size_func_(size_func) {
6366 }
6367 
6368 
6370  // Delete active iterator if any.
6371  delete iterator_;
6372 }
6373 
6374 
6376  // Iterate until no more spaces.
6377  return current_space_ != LAST_SPACE;
6378 }
6379 
6380 
6382  if (iterator_ != NULL) {
6383  delete iterator_;
6384  iterator_ = NULL;
6385  // Move to the next space
6386  current_space_++;
6387  if (current_space_ > LAST_SPACE) {
6388  return NULL;
6389  }
6390  }
6391 
6392  // Return iterator for the new current space.
6393  return CreateIterator();
6394 }
6395 
6396 
6397 // Create an iterator for the space to iterate.
6398 ObjectIterator* SpaceIterator::CreateIterator() {
6399  ASSERT(iterator_ == NULL);
6400 
6401  switch (current_space_) {
6402  case NEW_SPACE:
6403  iterator_ = new SemiSpaceIterator(HEAP->new_space(), size_func_);
6404  break;
6405  case OLD_POINTER_SPACE:
6406  iterator_ = new HeapObjectIterator(HEAP->old_pointer_space(), size_func_);
6407  break;
6408  case OLD_DATA_SPACE:
6409  iterator_ = new HeapObjectIterator(HEAP->old_data_space(), size_func_);
6410  break;
6411  case CODE_SPACE:
6412  iterator_ = new HeapObjectIterator(HEAP->code_space(), size_func_);
6413  break;
6414  case MAP_SPACE:
6415  iterator_ = new HeapObjectIterator(HEAP->map_space(), size_func_);
6416  break;
6417  case CELL_SPACE:
6418  iterator_ = new HeapObjectIterator(HEAP->cell_space(), size_func_);
6419  break;
6420  case LO_SPACE:
6421  iterator_ = new LargeObjectIterator(HEAP->lo_space(), size_func_);
6422  break;
6423  }
6424 
6425  // Return the newly allocated iterator;
6426  ASSERT(iterator_ != NULL);
6427  return iterator_;
6428 }
6429 
6430 
6432  public:
6433  virtual ~HeapObjectsFilter() {}
6434  virtual bool SkipObject(HeapObject* object) = 0;
6435 };
6436 
6437 
6439  public:
6441  MarkReachableObjects();
6442  }
6443 
6445  Isolate::Current()->heap()->mark_compact_collector()->ClearMarkbits();
6446  }
6447 
6448  bool SkipObject(HeapObject* object) {
6449  MarkBit mark_bit = Marking::MarkBitFrom(object);
6450  return !mark_bit.Get();
6451  }
6452 
6453  private:
6454  class MarkingVisitor : public ObjectVisitor {
6455  public:
6456  MarkingVisitor() : marking_stack_(10) {}
6457 
6458  void VisitPointers(Object** start, Object** end) {
6459  for (Object** p = start; p < end; p++) {
6460  if (!(*p)->IsHeapObject()) continue;
6461  HeapObject* obj = HeapObject::cast(*p);
6462  MarkBit mark_bit = Marking::MarkBitFrom(obj);
6463  if (!mark_bit.Get()) {
6464  mark_bit.Set();
6465  marking_stack_.Add(obj);
6466  }
6467  }
6468  }
6469 
6470  void TransitiveClosure() {
6471  while (!marking_stack_.is_empty()) {
6472  HeapObject* obj = marking_stack_.RemoveLast();
6473  obj->Iterate(this);
6474  }
6475  }
6476 
6477  private:
6478  List<HeapObject*> marking_stack_;
6479  };
6480 
6481  void MarkReachableObjects() {
6482  Heap* heap = Isolate::Current()->heap();
6483  MarkingVisitor visitor;
6484  heap->IterateRoots(&visitor, VISIT_ALL);
6485  visitor.TransitiveClosure();
6486  }
6487 
6488  AssertNoAllocation no_alloc;
6489 };
6490 
6491 
6492 HeapIterator::HeapIterator()
6493  : filtering_(HeapIterator::kNoFiltering),
6494  filter_(NULL) {
6495  Init();
6496 }
6497 
6498 
6499 HeapIterator::HeapIterator(HeapIterator::HeapObjectsFiltering filtering)
6500  : filtering_(filtering),
6501  filter_(NULL) {
6502  Init();
6503 }
6504 
6505 
6506 HeapIterator::~HeapIterator() {
6507  Shutdown();
6508 }
6509 
6510 
6511 void HeapIterator::Init() {
6512  // Start the iteration.
6513  space_iterator_ = new SpaceIterator;
6514  switch (filtering_) {
6515  case kFilterUnreachable:
6516  filter_ = new UnreachableObjectsFilter;
6517  break;
6518  default:
6519  break;
6520  }
6521  object_iterator_ = space_iterator_->next();
6522 }
6523 
6524 
6525 void HeapIterator::Shutdown() {
6526 #ifdef DEBUG
6527  // Assert that in filtering mode we have iterated through all
6528  // objects. Otherwise, heap will be left in an inconsistent state.
6529  if (filtering_ != kNoFiltering) {
6530  ASSERT(object_iterator_ == NULL);
6531  }
6532 #endif
6533  // Make sure the last iterator is deallocated.
6534  delete space_iterator_;
6535  space_iterator_ = NULL;
6536  object_iterator_ = NULL;
6537  delete filter_;
6538  filter_ = NULL;
6539 }
6540 
6541 
6542 HeapObject* HeapIterator::next() {
6543  if (filter_ == NULL) return NextObject();
6544 
6545  HeapObject* obj = NextObject();
6546  while (obj != NULL && filter_->SkipObject(obj)) obj = NextObject();
6547  return obj;
6548 }
6549 
6550 
6551 HeapObject* HeapIterator::NextObject() {
6552  // No iterator means we are done.
6553  if (object_iterator_ == NULL) return NULL;
6554 
6555  if (HeapObject* obj = object_iterator_->next_object()) {
6556  // If the current iterator has more objects we are fine.
6557  return obj;
6558  } else {
6559  // Go though the spaces looking for one that has objects.
6560  while (space_iterator_->has_next()) {
6561  object_iterator_ = space_iterator_->next();
6562  if (HeapObject* obj = object_iterator_->next_object()) {
6563  return obj;
6564  }
6565  }
6566  }
6567  // Done with the last space.
6568  object_iterator_ = NULL;
6569  return NULL;
6570 }
6571 
6572 
6573 void HeapIterator::reset() {
6574  // Restart the iterator.
6575  Shutdown();
6576  Init();
6577 }
6578 
6579 
6580 #if defined(DEBUG) || defined(LIVE_OBJECT_LIST)
6581 
6582 Object* const PathTracer::kAnyGlobalObject = reinterpret_cast<Object*>(NULL);
6583 
6584 class PathTracer::MarkVisitor: public ObjectVisitor {
6585  public:
6586  explicit MarkVisitor(PathTracer* tracer) : tracer_(tracer) {}
6587  void VisitPointers(Object** start, Object** end) {
6588  // Scan all HeapObject pointers in [start, end)
6589  for (Object** p = start; !tracer_->found() && (p < end); p++) {
6590  if ((*p)->IsHeapObject())
6591  tracer_->MarkRecursively(p, this);
6592  }
6593  }
6594 
6595  private:
6596  PathTracer* tracer_;
6597 };
6598 
6599 
6600 class PathTracer::UnmarkVisitor: public ObjectVisitor {
6601  public:
6602  explicit UnmarkVisitor(PathTracer* tracer) : tracer_(tracer) {}
6603  void VisitPointers(Object** start, Object** end) {
6604  // Scan all HeapObject pointers in [start, end)
6605  for (Object** p = start; p < end; p++) {
6606  if ((*p)->IsHeapObject())
6607  tracer_->UnmarkRecursively(p, this);
6608  }
6609  }
6610 
6611  private:
6612  PathTracer* tracer_;
6613 };
6614 
6615 
6616 void PathTracer::VisitPointers(Object** start, Object** end) {
6617  bool done = ((what_to_find_ == FIND_FIRST) && found_target_);
6618  // Visit all HeapObject pointers in [start, end)
6619  for (Object** p = start; !done && (p < end); p++) {
6620  if ((*p)->IsHeapObject()) {
6621  TracePathFrom(p);
6622  done = ((what_to_find_ == FIND_FIRST) && found_target_);
6623  }
6624  }
6625 }
6626 
6627 
6628 void PathTracer::Reset() {
6629  found_target_ = false;
6630  object_stack_.Clear();
6631 }
6632 
6633 
6634 void PathTracer::TracePathFrom(Object** root) {
6635  ASSERT((search_target_ == kAnyGlobalObject) ||
6636  search_target_->IsHeapObject());
6637  found_target_in_trace_ = false;
6638  object_stack_.Clear();
6639 
6640  MarkVisitor mark_visitor(this);
6641  MarkRecursively(root, &mark_visitor);
6642 
6643  UnmarkVisitor unmark_visitor(this);
6644  UnmarkRecursively(root, &unmark_visitor);
6645 
6646  ProcessResults();
6647 }
6648 
6649 
6650 static bool SafeIsGlobalContext(HeapObject* obj) {
6651  return obj->map() == obj->GetHeap()->raw_unchecked_global_context_map();
6652 }
6653 
6654 
6655 void PathTracer::MarkRecursively(Object** p, MarkVisitor* mark_visitor) {
6656  if (!(*p)->IsHeapObject()) return;
6657 
6658  HeapObject* obj = HeapObject::cast(*p);
6659 
6660  Object* map = obj->map();
6661 
6662  if (!map->IsHeapObject()) return; // visited before
6663 
6664  if (found_target_in_trace_) return; // stop if target found
6665  object_stack_.Add(obj);
6666  if (((search_target_ == kAnyGlobalObject) && obj->IsJSGlobalObject()) ||
6667  (obj == search_target_)) {
6668  found_target_in_trace_ = true;
6669  found_target_ = true;
6670  return;
6671  }
6672 
6673  bool is_global_context = SafeIsGlobalContext(obj);
6674 
6675  // not visited yet
6676  Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
6677 
6678  Address map_addr = map_p->address();
6679 
6680  obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag));
6681 
6682  // Scan the object body.
6683  if (is_global_context && (visit_mode_ == VISIT_ONLY_STRONG)) {
6684  // This is specialized to scan Context's properly.
6685  Object** start = reinterpret_cast<Object**>(obj->address() +
6687  Object** end = reinterpret_cast<Object**>(obj->address() +
6689  mark_visitor->VisitPointers(start, end);
6690  } else {
6691  obj->IterateBody(map_p->instance_type(),
6692  obj->SizeFromMap(map_p),
6693  mark_visitor);
6694  }
6695 
6696  // Scan the map after the body because the body is a lot more interesting
6697  // when doing leak detection.
6698  MarkRecursively(&map, mark_visitor);
6699 
6700  if (!found_target_in_trace_) // don't pop if found the target
6701  object_stack_.RemoveLast();
6702 }
6703 
6704 
6705 void PathTracer::UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor) {
6706  if (!(*p)->IsHeapObject()) return;
6707 
6708  HeapObject* obj = HeapObject::cast(*p);
6709 
6710  Object* map = obj->map();
6711 
6712  if (map->IsHeapObject()) return; // unmarked already
6713 
6714  Address map_addr = reinterpret_cast<Address>(map);
6715 
6716  map_addr -= kMarkTag;
6717 
6718  ASSERT_TAG_ALIGNED(map_addr);
6719 
6720  HeapObject* map_p = HeapObject::FromAddress(map_addr);
6721 
6722  obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_p));
6723 
6724  UnmarkRecursively(reinterpret_cast<Object**>(&map_p), unmark_visitor);
6725 
6726  obj->IterateBody(Map::cast(map_p)->instance_type(),
6727  obj->SizeFromMap(Map::cast(map_p)),
6728  unmark_visitor);
6729 }
6730 
6731 
6732 void PathTracer::ProcessResults() {
6733  if (found_target_) {
6734  PrintF("=====================================\n");
6735  PrintF("==== Path to object ====\n");
6736  PrintF("=====================================\n\n");
6737 
6738  ASSERT(!object_stack_.is_empty());
6739  for (int i = 0; i < object_stack_.length(); i++) {
6740  if (i > 0) PrintF("\n |\n |\n V\n\n");
6741  Object* obj = object_stack_[i];
6742 #ifdef OBJECT_PRINT
6743  obj->Print();
6744 #else
6745  obj->ShortPrint();
6746 #endif
6747  }
6748  PrintF("=====================================\n");
6749  }
6750 }
6751 #endif // DEBUG || LIVE_OBJECT_LIST
6752 
6753 
6754 #ifdef DEBUG
6755 // Triggers a depth-first traversal of reachable objects from roots
6756 // and finds a path to a specific heap object and prints it.
6757 void Heap::TracePathToObject(Object* target) {
6758  PathTracer tracer(target, PathTracer::FIND_ALL, VISIT_ALL);
6759  IterateRoots(&tracer, VISIT_ONLY_STRONG);
6760 }
6761 
6762 
6763 // Triggers a depth-first traversal of reachable objects from roots
6764 // and finds a path to any global object and prints it. Useful for
6765 // determining the source for leaks of global objects.
6766 void Heap::TracePathToGlobal() {
6767  PathTracer tracer(PathTracer::kAnyGlobalObject,
6768  PathTracer::FIND_ALL,
6769  VISIT_ALL);
6770  IterateRoots(&tracer, VISIT_ONLY_STRONG);
6771 }
6772 #endif
6773 
6774 
6775 static intptr_t CountTotalHolesSize() {
6776  intptr_t holes_size = 0;
6777  OldSpaces spaces;
6778  for (OldSpace* space = spaces.next();
6779  space != NULL;
6780  space = spaces.next()) {
6781  holes_size += space->Waste() + space->Available();
6782  }
6783  return holes_size;
6784 }
6785 
6786 
6787 GCTracer::GCTracer(Heap* heap,
6788  const char* gc_reason,
6789  const char* collector_reason)
6790  : start_time_(0.0),
6791  start_object_size_(0),
6792  start_memory_size_(0),
6793  gc_count_(0),
6794  full_gc_count_(0),
6795  allocated_since_last_gc_(0),
6796  spent_in_mutator_(0),
6797  promoted_objects_size_(0),
6798  heap_(heap),
6799  gc_reason_(gc_reason),
6800  collector_reason_(collector_reason) {
6801  if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
6802  start_time_ = OS::TimeCurrentMillis();
6803  start_object_size_ = heap_->SizeOfObjects();
6804  start_memory_size_ = heap_->isolate()->memory_allocator()->Size();
6805 
6806  for (int i = 0; i < Scope::kNumberOfScopes; i++) {
6807  scopes_[i] = 0;
6808  }
6809 
6810  in_free_list_or_wasted_before_gc_ = CountTotalHolesSize();
6811 
6812  allocated_since_last_gc_ =
6813  heap_->SizeOfObjects() - heap_->alive_after_last_gc_;
6814 
6815  if (heap_->last_gc_end_timestamp_ > 0) {
6816  spent_in_mutator_ = Max(start_time_ - heap_->last_gc_end_timestamp_, 0.0);
6817  }
6818 
6819  steps_count_ = heap_->incremental_marking()->steps_count();
6820  steps_took_ = heap_->incremental_marking()->steps_took();
6821  longest_step_ = heap_->incremental_marking()->longest_step();
6822  steps_count_since_last_gc_ =
6823  heap_->incremental_marking()->steps_count_since_last_gc();
6824  steps_took_since_last_gc_ =
6825  heap_->incremental_marking()->steps_took_since_last_gc();
6826 }
6827 
6828 
6829 GCTracer::~GCTracer() {
6830  // Printf ONE line iff flag is set.
6831  if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
6832 
6833  bool first_gc = (heap_->last_gc_end_timestamp_ == 0);
6834 
6835  heap_->alive_after_last_gc_ = heap_->SizeOfObjects();
6836  heap_->last_gc_end_timestamp_ = OS::TimeCurrentMillis();
6837 
6838  int time = static_cast<int>(heap_->last_gc_end_timestamp_ - start_time_);
6839 
6840  // Update cumulative GC statistics if required.
6841  if (FLAG_print_cumulative_gc_stat) {
6842  heap_->max_gc_pause_ = Max(heap_->max_gc_pause_, time);
6843  heap_->max_alive_after_gc_ = Max(heap_->max_alive_after_gc_,
6844  heap_->alive_after_last_gc_);
6845  if (!first_gc) {
6846  heap_->min_in_mutator_ = Min(heap_->min_in_mutator_,
6847  static_cast<int>(spent_in_mutator_));
6848  }
6849  }
6850 
6851  PrintF("%8.0f ms: ", heap_->isolate()->time_millis_since_init());
6852 
6853  if (!FLAG_trace_gc_nvp) {
6854  int external_time = static_cast<int>(scopes_[Scope::EXTERNAL]);
6855 
6856  double end_memory_size_mb =
6857  static_cast<double>(heap_->isolate()->memory_allocator()->Size()) / MB;
6858 
6859  PrintF("%s %.1f (%.1f) -> %.1f (%.1f) MB, ",
6860  CollectorString(),
6861  static_cast<double>(start_object_size_) / MB,
6862  static_cast<double>(start_memory_size_) / MB,
6863  SizeOfHeapObjects(),
6864  end_memory_size_mb);
6865 
6866  if (external_time > 0) PrintF("%d / ", external_time);
6867  PrintF("%d ms", time);
6868  if (steps_count_ > 0) {
6869  if (collector_ == SCAVENGER) {
6870  PrintF(" (+ %d ms in %d steps since last GC)",
6871  static_cast<int>(steps_took_since_last_gc_),
6872  steps_count_since_last_gc_);
6873  } else {
6874  PrintF(" (+ %d ms in %d steps since start of marking, "
6875  "biggest step %f ms)",
6876  static_cast<int>(steps_took_),
6877  steps_count_,
6878  longest_step_);
6879  }
6880  }
6881 
6882  if (gc_reason_ != NULL) {
6883  PrintF(" [%s]", gc_reason_);
6884  }
6885 
6886  if (collector_reason_ != NULL) {
6887  PrintF(" [%s]", collector_reason_);
6888  }
6889 
6890  PrintF(".\n");
6891  } else {
6892  PrintF("pause=%d ", time);
6893  PrintF("mutator=%d ",
6894  static_cast<int>(spent_in_mutator_));
6895 
6896  PrintF("gc=");
6897  switch (collector_) {
6898  case SCAVENGER:
6899  PrintF("s");
6900  break;
6901  case MARK_COMPACTOR:
6902  PrintF("ms");
6903  break;
6904  default:
6905  UNREACHABLE();
6906  }
6907  PrintF(" ");
6908 
6909  PrintF("external=%d ", static_cast<int>(scopes_[Scope::EXTERNAL]));
6910  PrintF("mark=%d ", static_cast<int>(scopes_[Scope::MC_MARK]));
6911  PrintF("sweep=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP]));
6912  PrintF("sweepns=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP_NEWSPACE]));
6913  PrintF("evacuate=%d ", static_cast<int>(scopes_[Scope::MC_EVACUATE_PAGES]));
6914  PrintF("new_new=%d ",
6915  static_cast<int>(scopes_[Scope::MC_UPDATE_NEW_TO_NEW_POINTERS]));
6916  PrintF("root_new=%d ",
6917  static_cast<int>(scopes_[Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS]));
6918  PrintF("old_new=%d ",
6919  static_cast<int>(scopes_[Scope::MC_UPDATE_OLD_TO_NEW_POINTERS]));
6920  PrintF("compaction_ptrs=%d ",
6921  static_cast<int>(scopes_[Scope::MC_UPDATE_POINTERS_TO_EVACUATED]));
6922  PrintF("intracompaction_ptrs=%d ", static_cast<int>(scopes_[
6923  Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED]));
6924  PrintF("misc_compaction=%d ",
6925  static_cast<int>(scopes_[Scope::MC_UPDATE_MISC_POINTERS]));
6926 
6927  PrintF("total_size_before=%" V8_PTR_PREFIX "d ", start_object_size_);
6928  PrintF("total_size_after=%" V8_PTR_PREFIX "d ", heap_->SizeOfObjects());
6929  PrintF("holes_size_before=%" V8_PTR_PREFIX "d ",
6930  in_free_list_or_wasted_before_gc_);
6931  PrintF("holes_size_after=%" V8_PTR_PREFIX "d ", CountTotalHolesSize());
6932 
6933  PrintF("allocated=%" V8_PTR_PREFIX "d ", allocated_since_last_gc_);
6934  PrintF("promoted=%" V8_PTR_PREFIX "d ", promoted_objects_size_);
6935 
6936  if (collector_ == SCAVENGER) {
6937  PrintF("stepscount=%d ", steps_count_since_last_gc_);
6938  PrintF("stepstook=%d ", static_cast<int>(steps_took_since_last_gc_));
6939  } else {
6940  PrintF("stepscount=%d ", steps_count_);
6941  PrintF("stepstook=%d ", static_cast<int>(steps_took_));
6942  }
6943 
6944  PrintF("\n");
6945  }
6946 
6947  heap_->PrintShortHeapStatistics();
6948 }
6949 
6950 
6951 const char* GCTracer::CollectorString() {
6952  switch (collector_) {
6953  case SCAVENGER:
6954  return "Scavenge";
6955  case MARK_COMPACTOR:
6956  return "Mark-sweep";
6957  }
6958  return "Unknown GC";
6959 }
6960 
6961 
6962 int KeyedLookupCache::Hash(Map* map, String* name) {
6963  // Uses only lower 32 bits if pointers are larger.
6964  uintptr_t addr_hash =
6965  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map)) >> kMapHashShift;
6966  return static_cast<uint32_t>((addr_hash ^ name->Hash()) & kCapacityMask);
6967 }
6968 
6969 
6971  int index = (Hash(map, name) & kHashMask);
6972  for (int i = 0; i < kEntriesPerBucket; i++) {
6973  Key& key = keys_[index + i];
6974  if ((key.map == map) && key.name->Equals(name)) {
6975  return field_offsets_[index + i];
6976  }
6977  }
6978  return kNotFound;
6979 }
6980 
6981 
6982 void KeyedLookupCache::Update(Map* map, String* name, int field_offset) {
6983  String* symbol;
6984  if (HEAP->LookupSymbolIfExists(name, &symbol)) {
6985  int index = (Hash(map, symbol) & kHashMask);
6986  // After a GC there will be free slots, so we use them in order (this may
6987  // help to get the most frequently used one in position 0).
6988  for (int i = 0; i< kEntriesPerBucket; i++) {
6989  Key& key = keys_[index];
6990  Object* free_entry_indicator = NULL;
6991  if (key.map == free_entry_indicator) {
6992  key.map = map;
6993  key.name = symbol;
6994  field_offsets_[index + i] = field_offset;
6995  return;
6996  }
6997  }
6998  // No free entry found in this bucket, so we move them all down one and
6999  // put the new entry at position zero.
7000  for (int i = kEntriesPerBucket - 1; i > 0; i--) {
7001  Key& key = keys_[index + i];
7002  Key& key2 = keys_[index + i - 1];
7003  key = key2;
7004  field_offsets_[index + i] = field_offsets_[index + i - 1];
7005  }
7006 
7007  // Write the new first entry.
7008  Key& key = keys_[index];
7009  key.map = map;
7010  key.name = symbol;
7011  field_offsets_[index] = field_offset;
7012  }
7013 }
7014 
7015 
7017  for (int index = 0; index < kLength; index++) keys_[index].map = NULL;
7018 }
7019 
7020 
7022  for (int index = 0; index < kLength; index++) keys_[index].array = NULL;
7023 }
7024 
7025 
7026 #ifdef DEBUG
7027 void Heap::GarbageCollectionGreedyCheck() {
7028  ASSERT(FLAG_gc_greedy);
7029  if (isolate_->bootstrapper()->IsActive()) return;
7030  if (disallow_allocation_failure()) return;
7032 }
7033 #endif
7034 
7035 
7036 TranscendentalCache::SubCache::SubCache(Type t)
7037  : type_(t),
7038  isolate_(Isolate::Current()) {
7039  uint32_t in0 = 0xffffffffu; // Bit-pattern for a NaN that isn't
7040  uint32_t in1 = 0xffffffffu; // generated by the FPU.
7041  for (int i = 0; i < kCacheSize; i++) {
7042  elements_[i].in[0] = in0;
7043  elements_[i].in[1] = in1;
7044  elements_[i].output = NULL;
7045  }
7046 }
7047 
7048 
7050  for (int i = 0; i < kNumberOfCaches; i++) {
7051  if (caches_[i] != NULL) {
7052  delete caches_[i];
7053  caches_[i] = NULL;
7054  }
7055  }
7056 }
7057 
7058 
7060  int last = 0;
7061  for (int i = 0; i < new_space_strings_.length(); ++i) {
7062  if (new_space_strings_[i] == heap_->raw_unchecked_the_hole_value()) {
7063  continue;
7064  }
7065  if (heap_->InNewSpace(new_space_strings_[i])) {
7066  new_space_strings_[last++] = new_space_strings_[i];
7067  } else {
7068  old_space_strings_.Add(new_space_strings_[i]);
7069  }
7070  }
7071  new_space_strings_.Rewind(last);
7072  last = 0;
7073  for (int i = 0; i < old_space_strings_.length(); ++i) {
7074  if (old_space_strings_[i] == heap_->raw_unchecked_the_hole_value()) {
7075  continue;
7076  }
7077  ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
7078  old_space_strings_[last++] = old_space_strings_[i];
7079  }
7080  old_space_strings_.Rewind(last);
7081  if (FLAG_verify_heap) {
7082  Verify();
7083  }
7084 }
7085 
7086 
7088  new_space_strings_.Free();
7089  old_space_strings_.Free();
7090 }
7091 
7092 
7094  chunk->set_next_chunk(chunks_queued_for_free_);
7095  chunks_queued_for_free_ = chunk;
7096 }
7097 
7098 
7100  if (chunks_queued_for_free_ == NULL) return;
7101  MemoryChunk* next;
7102  MemoryChunk* chunk;
7103  for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) {
7104  next = chunk->next_chunk();
7106 
7107  if (chunk->owner()->identity() == LO_SPACE) {
7108  // StoreBuffer::Filter relies on MemoryChunk::FromAnyPointerAddress.
7109  // If FromAnyPointerAddress encounters a slot that belongs to a large
7110  // chunk queued for deletion it will fail to find the chunk because
7111  // it try to perform a search in the list of pages owned by of the large
7112  // object space and queued chunks were detached from that list.
7113  // To work around this we split large chunk into normal kPageSize aligned
7114  // pieces and initialize size, owner and flags field of every piece.
7115  // If FromAnyPointerAddress encounters a slot that belongs to one of
7116  // these smaller pieces it will treat it as a slot on a normal Page.
7117  Address chunk_end = chunk->address() + chunk->size();
7119  chunk->address() + Page::kPageSize);
7120  MemoryChunk* inner_last = MemoryChunk::FromAddress(chunk_end - 1);
7121  while (inner <= inner_last) {
7122  // Size of a large chunk is always a multiple of
7123  // OS::AllocateAlignment() so there is always
7124  // enough space for a fake MemoryChunk header.
7125  Address area_end = Min(inner->address() + Page::kPageSize, chunk_end);
7126  // Guard against overflow.
7127  if (area_end < inner->address()) area_end = chunk_end;
7128  inner->SetArea(inner->address(), area_end);
7129  inner->set_size(Page::kPageSize);
7130  inner->set_owner(lo_space());
7132  inner = MemoryChunk::FromAddress(
7133  inner->address() + Page::kPageSize);
7134  }
7135  }
7136  }
7137  isolate_->heap()->store_buffer()->Compact();
7139  for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) {
7140  next = chunk->next_chunk();
7141  isolate_->memory_allocator()->Free(chunk);
7142  }
7143  chunks_queued_for_free_ = NULL;
7144 }
7145 
7146 
7147 void Heap::RememberUnmappedPage(Address page, bool compacted) {
7148  uintptr_t p = reinterpret_cast<uintptr_t>(page);
7149  // Tag the page pointer to make it findable in the dump file.
7150  if (compacted) {
7151  p ^= 0xc1ead & (Page::kPageSize - 1); // Cleared.
7152  } else {
7153  p ^= 0x1d1ed & (Page::kPageSize - 1); // I died.
7154  }
7155  remembered_unmapped_pages_[remembered_unmapped_pages_index_] =
7156  reinterpret_cast<Address>(p);
7157  remembered_unmapped_pages_index_++;
7158  remembered_unmapped_pages_index_ %= kRememberedUnmappedPages;
7159 }
7160 
7161 } } // namespace v8::internal
static int SizeOfMarkedObject(HeapObject *object)
Definition: heap.h:2639
static bool IsBlack(MarkBit mark_bit)
Definition: mark-compact.h:70
byte * Address
Definition: globals.h:172
Address FromSpaceEnd()
Definition: spaces.h:2209
virtual bool ReserveSpace(int bytes)
Definition: spaces.cc:2185
intptr_t OldGenPromotionLimit(intptr_t old_gen_size)
Definition: heap.h:1366
ContextSlotCache * context_slot_cache()
Definition: isolate.h:838
const uint32_t kShortcutTypeTag
Definition: objects.h:496
void GarbageCollectionEpilogue()
Definition: heap.cc:419
static const int kPointerFieldsEndOffset
Definition: objects.h:4977
MUST_USE_RESULT MaybeObject * CopyCode(Code *code)
Definition: heap.cc:3546
void set_elements_kind(ElementsKind elements_kind)
Definition: objects.h:4677
static uchar TrailSurrogate(int char_code)
Definition: unicode.h:146
virtual intptr_t Size()
Definition: spaces.h:2491
static const int kMaxLength
Definition: objects.h:2301
Code * builtin(Name name)
Definition: builtins.h:312
TranscendentalCache * transcendental_cache() const
Definition: isolate.h:826
static int NumberOfHandles()
Definition: handles.cc:48
#define SLOW_ASSERT(condition)
Definition: checks.h:276
const intptr_t kSmiTagMask
Definition: v8.h:3855
void Reset()
Definition: flags.cc:1446
static uchar LeadSurrogate(int char_code)
Definition: unicode.h:143
const intptr_t kDoubleAlignmentMask
Definition: v8globals.h:53
static const int kCodeEntryOffset
Definition: objects.h:5981
MUST_USE_RESULT MaybeObject * AllocateFixedArray(int length, PretenureFlag pretenure)
Definition: heap.cc:4712
void TearDown()
Definition: heap.cc:6157
static const int kMaxAsciiCharCode
Definition: objects.h:7107
bool Contains(const T &elm) const
Definition: list-inl.h:178
void SetStackLimits()
Definition: heap.cc:6140
bool NextGCIsLikelyToBeFull()
Definition: heap.h:1443
static const int kZeroHash
Definition: objects.h:7163
MUST_USE_RESULT MaybeObject * AllocateExternalStringFromAscii(const ExternalAsciiString::Resource *resource)
Definition: heap.cc:3327
MUST_USE_RESULT MaybeObject * AllocateSymbol(Vector< const char > str, int chars, uint32_t hash_field)
Definition: heap-inl.h:98
CodeRange * code_range()
Definition: isolate.h:810
void Callback(MemoryChunk *page, StoreBufferEvent event)
Definition: heap.cc:1054
intptr_t Available()
Definition: spaces.h:1486
static const int kSize
Definition: objects.h:7422
#define STRUCT_TABLE_ELEMENT(NAME, Name, name)
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
Definition: heap-inl.h:178
intptr_t * old_pointer_space_size
Definition: heap.h:2071
MUST_USE_RESULT MaybeObject * AllocateFunctionPrototype(JSFunction *function)
Definition: heap.cc:3661
void set(int index, Object *value)
Definition: objects-inl.h:1695
CompilationCache * compilation_cache()
Definition: isolate.h:812
intptr_t * cell_space_size
Definition: heap.h:2079
static const int kMapHashShift
Definition: heap.h:2235
void PrintF(const char *format,...)
Definition: v8utils.cc:40
void PrintStack(StringStream *accumulator)
Definition: isolate.cc:713
#define ASSERT_TAG_ALIGNED(address)
Definition: v8checks.h:59
bool OldGenerationPromotionLimitReached()
Definition: heap.h:1350
void set_function_with_prototype(bool value)
Definition: objects-inl.h:2918
bool InNewSpace(Object *object)
Definition: heap-inl.h:292
static const int kPadStart
Definition: objects.h:4970
static String * cast(Object *obj)
MUST_USE_RESULT MaybeObject * Add(Key key, Object *value, PropertyDetails details)
Definition: objects.cc:12305
static const int kArgumentsObjectSize
Definition: heap.h:863
void SortUnchecked(const WhitenessWitness &)
Definition: objects.cc:5967
GlobalObject * global()
Definition: contexts.h:319
bool IsHeapIterable()
Definition: heap.cc:4979
void IdentifyNewSpaceWeakIndependentHandles(WeakSlotCallbackWithHeap f)
MUST_USE_RESULT MaybeObject * AllocateFunctionContext(int length, JSFunction *function)
Definition: heap.cc:4875
MUST_USE_RESULT MaybeObject * Allocate(Map *map, AllocationSpace space)
Definition: heap.cc:3628
MUST_USE_RESULT MaybeObject * AllocateSubString(String *buffer, int start, int end, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:3233
int * new_space_capacity
Definition: heap.h:2070
void(* ObjectSlotCallback)(HeapObject **from, HeapObject *to)
Definition: store-buffer.h:42
HandleScopeImplementer * handle_scope_implementer()
Definition: isolate.h:849
void set_opt_count(int opt_count)
unsigned Utf16Length()
Definition: unicode.cc:342
static DescriptorArray * cast(Object *obj)
static Failure * InternalError()
Definition: objects-inl.h:1011
void IterateWeakRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:5636
bool SkipObject(HeapObject *object)
Definition: heap.cc:6448
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes)
Definition: spaces-inl.h:263
static int SizeOf(Map *map, HeapObject *object)
Definition: objects.h:2328
void clear_instance_descriptors()
Definition: objects-inl.h:3424
Isolate * isolate()
Definition: heap-inl.h:494
MUST_USE_RESULT MaybeObject * ReinitializeJSGlobalProxy(JSFunction *constructor, JSGlobalProxy *global)
Definition: heap.cc:4276
int unused_property_fields()
Definition: objects-inl.h:2874
void set_length(Smi *length)
Definition: objects-inl.h:4991
bool SetUp(const size_t requested_size)
Definition: spaces.cc:135
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArrayWithMap(FixedDoubleArray *src, Map *map)
Definition: heap.cc:4630
MUST_USE_RESULT MaybeObject * AllocateGlobalObject(JSFunction *constructor)
Definition: heap.cc:4071
void Prepare(GCTracer *tracer)
void set_scan_on_scavenge(bool scan)
Definition: spaces-inl.h:185
static Smi * FromInt(int value)
Definition: objects-inl.h:973
virtual intptr_t Waste()
Definition: spaces.h:1503
#define LOG(isolate, Call)
Definition: log.h:81
MUST_USE_RESULT MaybeObject * AllocateJSFunctionProxy(Object *handler, Object *call_trap, Object *construct_trap, Object *prototype)
Definition: heap.cc:4045
const int KB
Definition: globals.h:221
void set_second(String *second, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:2448
static Object * GetObjectFromEntryAddress(Address location_of_address)
Definition: objects-inl.h:3391
MUST_USE_RESULT MaybeObject * AllocateJSObject(JSFunction *constructor, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:3931
intptr_t MaxReserved()
Definition: heap.h:465
void CompletelyClearInstanceofCache()
Definition: heap-inl.h:646
V8_DECLARE_ONCE(initialize_gc_once)
static MemoryChunk * FromAddress(Address a)
Definition: spaces.h:304
static MUST_USE_RESULT MaybeObject * Allocate(int at_least_space_for, PretenureFlag pretenure=NOT_TENURED)
void set_ic_age(int count)
void CollectAllGarbage(int flags, const char *gc_reason=NULL)
Definition: heap.cc:452
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
Address FromSpaceStart()
Definition: spaces.h:2208
void init_instance_descriptors()
Definition: objects-inl.h:3419
bool is_logging()
Definition: log.h:273
static HeapObject * cast(Object *obj)
Map * MapForExternalArrayType(ExternalArrayType array_type)
Definition: heap.cc:2938
void SetNumberStringCache(Object *number, String *str)
Definition: heap.cc:2879
static const byte kArgumentMarker
Definition: objects.h:7740
void AgeInlineCaches()
Definition: heap.h:1599
MUST_USE_RESULT MaybeObject * AllocateCodeCache()
Definition: heap.cc:2047
void set_pre_allocated_property_fields(int value)
Definition: objects-inl.h:2856
void CallOnce(OnceType *once, NoArgFunction init_func)
Definition: once.h:105
static const byte kUndefined
Definition: objects.h:7741
T Max(T a, T b)
Definition: utils.h:222
static AccessorPair * cast(Object *obj)
const int kVariableSizeSentinel
Definition: objects.h:182
static const int kAlignedSize
Definition: objects.h:5704
RootListIndex RootIndexForExternalArrayType(ExternalArrayType array_type)
Definition: heap.cc:2943
MUST_USE_RESULT MaybeObject * LookupAsciiSymbol(Vector< const char > str)
Definition: heap.cc:5336
static Failure * OutOfMemoryException()
Definition: objects-inl.h:1021
static bool IsOutsideAllocatedSpace(void *pointer)
bool SetUp(intptr_t max_capacity, intptr_t capacity_executable)
Definition: spaces.cc:273
bool IsAsciiRepresentation()
Definition: objects-inl.h:289
static ExternalTwoByteString * cast(Object *obj)
void VisitExternalResources(v8::ExternalResourceVisitor *visitor)
Definition: heap.cc:1476
intptr_t OldGenAllocationLimit(intptr_t old_gen_size)
Definition: heap.h:1376
static Map * cast(Object *obj)
void set_start_position(int value)
static const int kEmptyHashField
Definition: objects.h:7159
void ResetAllocationInfo()
Definition: spaces.cc:1189
MUST_USE_RESULT MaybeObject * AllocateByteArray(int length, PretenureFlag pretenure)
Definition: heap.cc:3406
static const byte kTheHole
Definition: objects.h:7738
static ByteArray * cast(Object *obj)
bool has_fast_object_elements()
Definition: objects.h:4696
Flag flags[]
Definition: flags.cc:1467
Builtins * builtins()
Definition: isolate.h:909
static Object * Lookup(FixedArray *cache, String *string, String *pattern)
Definition: heap.cc:2732
void set_end_position(int end_position)
void set_context(Object *context)
Definition: objects-inl.h:4123
static FreeSpace * cast(Object *obj)
void mark_out_of_memory()
Definition: isolate.h:1421
void RemoveGCPrologueCallback(GCEpilogueCallback callback)
Definition: heap.cc:6247
Bootstrapper * bootstrapper()
Definition: isolate.h:803
void Set(int descriptor_number, Descriptor *desc, const WhitenessWitness &)
Definition: objects-inl.h:2079
bool InFromSpace(Object *object)
Definition: heap-inl.h:306
MUST_USE_RESULT MaybeObject * Uint32ToString(uint32_t value, bool check_number_string_cache=true)
Definition: heap.cc:2929
void Relocate(intptr_t delta)
Definition: objects.cc:8112
PromotionQueue * promotion_queue()
Definition: heap.h:1095
const int kMaxInt
Definition: globals.h:224
void SetTop(Object ***top)
Definition: store-buffer.h:99
static Foreign * cast(Object *obj)
static bool enabled()
Definition: serialize.h:480
void set_map(Map *value)
Definition: objects-inl.h:1135
Map * SymbolMapForString(String *str)
Definition: heap.cc:4383
void set_ic_with_type_info_count(int count)
intptr_t inline_allocation_limit_step()
Definition: spaces.h:2272
intptr_t * code_space_size
Definition: heap.h:2075
static const int kSize
Definition: objects.h:7880
void AddGCEpilogueCallback(GCEpilogueCallback callback, GCType gc_type_filter)
Definition: heap.cc:6259
MUST_USE_RESULT MaybeObject * AllocateRawAsciiString(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4472
byte * instruction_end()
Definition: objects-inl.h:4381
Context * global_context()
Definition: contexts.cc:58
FlagType type_
Definition: flags.cc:1351
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:5648
#define ASSERT(condition)
Definition: checks.h:270
bool InSpace(Address addr, AllocationSpace space)
Definition: heap.cc:5242
void(* GCPrologueCallback)(GCType type, GCCallbackFlags flags)
Definition: v8.h:2729
v8::Handle< v8::Value > Print(const v8::Arguments &args)
static void IncrementLiveBytesFromGC(Address address, int by)
Definition: spaces.h:471
void Step(intptr_t allocated, CompletionAction action)
#define PROFILE(isolate, Call)
Definition: cpu-profiler.h:190
KeyedLookupCache * keyed_lookup_cache()
Definition: isolate.h:834
static const int kReduceMemoryFootprintMask
Definition: heap.h:1051
void AddGCPrologueCallback(GCEpilogueCallback callback, GCType gc_type_filter)
Definition: heap.cc:6239
MUST_USE_RESULT MaybeObject * LookupTwoByteSymbol(Vector< const uc16 > str)
Definition: heap.cc:5371
ExternalArrayType
Definition: v8.h:1407
unsigned short uint16_t
Definition: unicode.cc:46
void IterateStrongRoots(ObjectVisitor *v)
virtual Object * RetainAs(Object *object)
Definition: heap.cc:1144
static Context * cast(Object *context)
Definition: contexts.h:207
static const int kMaxLength
Definition: objects.h:7375
const intptr_t kCodeAlignment
Definition: v8globals.h:67
MUST_USE_RESULT MaybeObject * LookupSymbol(Vector< const char > str)
Definition: heap.cc:5321
bool SetUp(bool create_heap_objects)
Definition: heap.cc:6028
ThreadManager * thread_manager()
Definition: isolate.h:867
static bool IsEnabled()
Definition: snapshot.h:47
MUST_USE_RESULT MaybeObject * AllocateBlockContext(JSFunction *function, Context *previous, ScopeInfo *info)
Definition: heap.cc:4929
int SizeFromMap(Map *map)
Definition: objects-inl.h:2809
static const int kMaxPreAllocatedPropertyFields
Definition: objects.h:4948
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8Slow(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4322
intptr_t CommittedMemoryExecutable()
Definition: heap.cc:209
#define CHECK(condition)
Definition: checks.h:56
ObjectIterator * next()
Definition: heap.cc:6381
void set_is_undetectable()
Definition: objects.h:4657
static void Iterate(ObjectVisitor *visitor)
Definition: serialize.cc:1203
static const int kSize
Definition: objects.h:7921
void VisitPointers(Object **start, Object **end)
Definition: heap.cc:974
#define STRING_TYPE_LIST(V)
Definition: objects.h:306
MUST_USE_RESULT MaybeObject * CopyJSObject(JSObject *source)
Definition: heap.cc:4144
void set_first(String *first, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:2432
static ExternalAsciiString * cast(Object *obj)
static const int kMaxSize
Definition: objects.h:7372
static const int kPageSize
Definition: spaces.h:695
void set_foreign_address(Address value)
Definition: objects-inl.h:4309
void ReserveSpace(int new_space_size, int pointer_space_size, int data_space_size, int code_space_size, int map_space_size, int cell_space_size, int large_object_size)
Definition: heap.cc:610
friend class GCTracer
Definition: heap.h:2049
static Code * cast(Object *obj)
virtual const uint16_t * data() const =0
MUST_USE_RESULT MaybeObject * AllocateInternalSymbol(unibrow::CharacterStream *buffer, int chars, uint32_t hash_field)
Definition: heap.cc:4407
static bool IsAtEnd(Address addr)
Definition: spaces.h:1765
void IterateAndMarkPointersToFromSpace(Address start, Address end, ObjectSlotCallback callback)
Definition: heap.cc:5427
static PolymorphicCodeCache * cast(Object *obj)
MUST_USE_RESULT MaybeObject * AllocateJSArrayWithElements(FixedArrayBase *array_base, ElementsKind elements_kind, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4010
ArrayStorageAllocationMode
Definition: heap.h:430
static const int kSize
Definition: objects.h:5821
virtual Object * RetainAs(Object *object)=0
static Object ** RawField(HeapObject *obj, int offset)
Definition: objects-inl.h:963
StoreBuffer * store_buffer()
Definition: heap.h:1516
static const byte kOther
Definition: objects.h:7742
MUST_USE_RESULT MaybeObject * AllocateHeapNumber(double value, PretenureFlag pretenure)
Definition: heap.cc:2407
static Smi * cast(Object *object)
void set_function_token_position(int function_token_position)
void set_global(GlobalObject *global)
Definition: contexts.h:324
#define STRING_TYPE_ELEMENT(type, size, name, camel_name)
#define V8_INFINITY
Definition: globals.h:32
static bool IsAscii(const char *chars, int length)
Definition: objects.h:7198
static MUST_USE_RESULT MaybeObject * InitializeIntrinsicFunctionNames(Heap *heap, Object *dictionary)
Definition: runtime.cc:13538
bool CollectGarbage(AllocationSpace space, GarbageCollector collector, const char *gc_reason, const char *collector_reason)
Definition: heap.cc:491
void set_closure(JSFunction *closure)
Definition: contexts.h:302
static MarkBit MarkBitFrom(Address addr)
StackGuard * stack_guard()
Definition: isolate.h:819
void set_context_exit_happened(bool context_exit_happened)
Definition: isolate.h:1030
MUST_USE_RESULT MaybeObject * AllocateWithContext(JSFunction *function, Context *previous, JSObject *extension)
Definition: heap.cc:4912
void Free(MemoryChunk *chunk)
Definition: spaces.cc:595
void set_size(int value)
static const int kStringSplitCacheSize
Definition: heap.h:2511
intptr_t * lo_space_size
Definition: heap.h:2081
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
Definition: heap.cc:4604
uint8_t byte
Definition: globals.h:171
Object * InObjectPropertyAtPut(int index, Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:1566
MUST_USE_RESULT MaybeObject * AllocateConsString(String *first, String *second)
Definition: heap.cc:3114
static Struct * cast(Object *that)
void InitializeBody(int object_size, Object *value)
Definition: objects-inl.h:4290
MUST_USE_RESULT MaybeObject * NumberToString(Object *number, bool check_number_string_cache=true)
Definition: heap.cc:2899
static const int kMinLength
Definition: objects.h:7433
UnicodeCache * unicode_cache()
Definition: isolate.h:855
T ** location() const
Definition: handles.h:75
String *(* ExternalStringTableUpdaterCallback)(Heap *heap, Object **pointer)
Definition: heap.h:256
String * GetKey(int descriptor_number)
Definition: objects-inl.h:1970
uintptr_t real_jslimit()
Definition: execution.h:220
static const int kEndMarker
Definition: heap.h:2066
bool IdleNotification(int hint)
Definition: heap.cc:5015
MUST_USE_RESULT MaybeObject * AllocateStringFromAscii(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4302
virtual size_t length() const =0
#define UNREACHABLE()
Definition: checks.h:50
void EnsureHeapIsIterable()
Definition: heap.cc:4985
static const int kArgumentsObjectSizeStrict
Definition: heap.h:866
T * start() const
Definition: utils.h:389
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedDoubleArray(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4759
bool PostGarbageCollectionProcessing(GarbageCollector collector)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
int(* HeapObjectCallback)(HeapObject *obj)
Definition: v8globals.h:245
bool always_allocate()
Definition: heap.h:507
static const int kMaxLength
Definition: objects.h:3652
const char * IntToCString(int n, Vector< char > buffer)
Definition: conversions.cc:123
void set_ic_total_count(int count)
void set_unchecked(int index, Smi *value)
Definition: objects-inl.h:1848
void Register(StaticVisitorBase::VisitorId id, Callback callback)
intptr_t CommittedMemory()
Definition: spaces.h:1471
static bool IsMarked(HeapObject *object)
Definition: heap.h:2618
#define MUST_USE_RESULT
Definition: globals.h:360
bool Contains(Address a)
Definition: spaces-inl.h:178
void IteratePointersToNewSpace(ObjectSlotCallback callback)
#define HEAP_PROFILE(heap, call)
Definition: heap-profiler.h:39
void RemoveGCEpilogueCallback(GCEpilogueCallback callback)
Definition: heap.cc:6267
static SlicedString * cast(Object *obj)
Address ToSpaceEnd()
Definition: spaces.h:2213
void SetFlag(int flag)
Definition: spaces.h:417
RuntimeProfiler * runtime_profiler()
Definition: isolate.h:811
intptr_t CommittedMemory()
Definition: spaces.h:2131
int pre_allocated_property_fields()
Definition: objects-inl.h:2804
void set_expected_nof_properties(int value)
Address ToSpaceStart()
Definition: spaces.h:2212
void set_instruction_size(int value)
void InitializeBody(int object_size)
Definition: objects-inl.h:1629
virtual intptr_t SizeOfObjects()
Definition: spaces.h:1495
void LowerInlineAllocationLimit(intptr_t step)
Definition: spaces.h:2191
static const int kStoreBufferSize
Definition: store-buffer.h:85
static const uchar kMaxNonSurrogateCharCode
Definition: unicode.h:133
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1051
void set_resource(const Resource *buffer)
Definition: objects-inl.h:2473
#define MAKE_CASE(NAME, Name, name)
void CollectAllAvailableGarbage(const char *gc_reason=NULL)
Definition: heap.cc:462
bool ConfigureHeapDefault()
Definition: heap.cc:5792
void set_aliased_context_slot(int count)
ElementsKind GetElementsKind()
Definition: objects-inl.h:4503
static const int kNoGCFlags
Definition: heap.h:1049
MUST_USE_RESULT MaybeObject * AllocateFixedArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4720
const int kPointerSize
Definition: globals.h:234
virtual intptr_t Size()
Definition: spaces.h:2108
MemoryAllocator * memory_allocator()
Definition: isolate.h:830
MUST_USE_RESULT MaybeObject * AllocateInitialMap(JSFunction *fun)
Definition: heap.cc:3783
static Oddball * cast(Object *obj)
static Address & Address_at(Address addr)
Definition: v8memory.h:71
MUST_USE_RESULT MaybeObject * AllocateForeign(Address address, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:2992
const char * DoubleToCString(double v, Vector< char > buffer)
Definition: conversions.cc:68
MUST_USE_RESULT MaybeObject * AllocateModuleContext(Context *previous, ScopeInfo *scope_info)
Definition: heap.cc:4859
intptr_t OffsetFrom(T x)
Definition: utils.h:126
static UnseededNumberDictionary * cast(Object *obj)
Definition: objects.h:3255
void QueueMemoryChunkForFree(MemoryChunk *chunk)
Definition: heap.cc:7093
MUST_USE_RESULT MaybeObject * AllocateGlobalContext()
Definition: heap.cc:4844
MUST_USE_RESULT MaybeObject * AllocateExternalArray(int length, ExternalArrayType array_type, void *external_pointer, PretenureFlag pretenure)
Definition: heap.cc:3461
#define CONSTANT_SYMBOL_ELEMENT(name, contents)
const int kHeapObjectTag
Definition: v8.h:3848
intptr_t * cell_space_capacity
Definition: heap.h:2080
bool IsAligned(T value, U alignment)
Definition: utils.h:206
intptr_t * memory_allocator_size
Definition: heap.h:2087
T Remove(int i)
Definition: list-inl.h:116
static SeqAsciiString * cast(Object *obj)
void set_inobject_properties(int value)
Definition: objects-inl.h:2850
void set_hash_field(uint32_t value)
Definition: objects-inl.h:2267
friend class Page
Definition: heap.h:2053
void Iterate(ObjectVisitor *v)
Definition: isolate.cc:475
GlobalHandles * global_handles()
Definition: isolate.h:865
intptr_t Available()
Definition: spaces.h:2137
virtual bool ReserveSpace(int bytes)
Definition: spaces.cc:2242
void IncrementYoungSurvivorsCounter(int survived)
Definition: heap.h:1437
MUST_USE_RESULT MaybeObject * AllocatePolymorphicCodeCache()
Definition: heap.cc:2058
intptr_t * code_space_capacity
Definition: heap.h:2076
void VisitPointer(Object **p)
Definition: heap.cc:972
void Update(Map *map, String *name, int field_offset)
Definition: heap.cc:6982
const uint32_t kShortcutTypeMask
Definition: objects.h:492
static Handle< Object > SetLocalPropertyIgnoreAttributes(Handle< JSObject > object, Handle< String > key, Handle< Object > value, PropertyAttributes attributes)
Definition: objects.cc:2924
void set_end_position(int value)
int length() const
Definition: utils.h:383
OldSpace * old_pointer_space()
Definition: heap.h:500
bool ConfigureHeap(int max_semispace_size, intptr_t max_old_gen_size, intptr_t max_executable_size)
Definition: heap.cc:5722
T RoundUp(T x, intptr_t m)
Definition: utils.h:150
intptr_t * map_space_size
Definition: heap.h:2077
#define LUMP_OF_MEMORY
static double TimeCurrentMillis()
static FixedDoubleArray * cast(Object *obj)
MUST_USE_RESULT MaybeObject * AllocateTypeFeedbackInfo()
Definition: heap.cc:2074
bool CreateApiObjects()
Definition: heap.cc:2466
size_t size() const
Definition: spaces.h:504
GCType
Definition: v8.h:2718
HeapState gc_state()
Definition: heap.h:1280
bool IsTwoByteRepresentation()
Definition: objects-inl.h:295
static const int kSize
Definition: objects.h:4972
#define SYMBOL_LIST(V)
Definition: heap.h:159
void set_age_mark(Address mark)
Definition: spaces.h:2164
void IterateAllRoots(ObjectVisitor *v)
static const int kMaxNonCodeHeapObjectSize
Definition: spaces.h:701
bool contains(Address address)
Definition: spaces.h:830
static const int kMinLength
Definition: objects.h:7485
OldSpace * code_space()
Definition: heap.h:502
static const int kMakeHeapIterableMask
Definition: heap.h:1056
MUST_USE_RESULT MaybeObject * AllocateJSArrayAndStorage(ElementsKind elements_kind, int length, int capacity, ArrayStorageAllocationMode mode=DONT_INITIALIZE_ARRAY_ELEMENTS, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:3964
void EnsureSpace(intptr_t space_needed)
void Iterate(ObjectVisitor *v)
Definition: heap-inl.h:581
void set_kind(byte kind)
Definition: objects-inl.h:1418
#define V8_PTR_PREFIX
Definition: globals.h:196
static const int kNextFunctionLinkOffset
Definition: objects.h:5989
bool InToSpace(Object *object)
Definition: heap-inl.h:311
bool UncommitFromSpace()
Definition: heap.h:523
void CopyFrom(const CodeDesc &desc)
Definition: objects.cc:8120
static int SizeFor(int length)
Definition: objects.h:2369
static int IterateBody(Map *map, HeapObject *obj)
void set_start_position_and_type(int value)
WriteBarrierMode GetWriteBarrierMode(const AssertNoAllocation &)
Definition: objects-inl.h:1769
bool IsPowerOf2(T x)
Definition: utils.h:50
void set_resource(const Resource *buffer)
Definition: objects-inl.h:2505
PropertyDetails GetDetails(int descriptor_number)
Definition: objects-inl.h:1996
void GarbageCollectionPrologue()
Definition: heap.cc:386
static void Clear(FixedArray *cache)
Definition: heap.cc:2794
void Iterate(ObjectVisitor *v)
Definition: v8threads.cc:370
void Iterate(ObjectVisitor *v)
bool is_call_stub()
Definition: objects.h:4264
bool HasBeenSetUp()
Definition: heap.cc:228
byte * relocation_start()
Definition: objects-inl.h:4402
LargeObjectSpace * lo_space()
Definition: heap.h:505
static ScopeInfo * Empty()
Definition: scopeinfo.cc:152
const Address kFromSpaceZapValue
Definition: v8globals.h:91
bool ToSpaceContains(Address address)
Definition: spaces.h:2215
MUST_USE_RESULT MaybeObject * AllocateJSMessageObject(String *type, JSArray *arguments, int start_position, int end_position, Object *script, Object *stack_trace, Object *stack_frames)
Definition: heap.cc:3045
DeoptimizerData * deoptimizer_data()
Definition: isolate.h:823
static MUST_USE_RESULT MaybeObject * Allocate(int at_least_space_for)
Callback GetVisitorById(StaticVisitorBase::VisitorId id)
MUST_USE_RESULT MaybeObject * AllocateExternalStringFromTwoByte(const ExternalTwoByteString::Resource *resource)
Definition: heap.cc:3352
MUST_USE_RESULT MaybeObject * AllocatePartialMap(InstanceType instance_type, int instance_size)
Definition: heap.cc:1989
MUST_USE_RESULT MaybeObject * CreateCode(const CodeDesc &desc, Code::Flags flags, Handle< Object > self_reference, bool immovable=false)
Definition: heap.cc:3483
void PerformScavenge()
Definition: heap.cc:564
virtual bool SkipObject(HeapObject *object)=0
DescriptorLookupCache * descriptor_lookup_cache()
Definition: isolate.h:842
void set_map_no_write_barrier(Map *value)
Definition: objects-inl.h:1146
void set_check_type(CheckType value)
Definition: objects-inl.h:3216
static JSMessageObject * cast(Object *obj)
Definition: objects-inl.h:4361
static const int kAbortIncrementalMarkingMask
Definition: heap.h:1052
static const int kNonWeakFieldsEndOffset
Definition: objects.h:5988
Vector< const char > CStrVector(const char *data)
Definition: utils.h:525
void FreeQueuedChunks()
Definition: heap.cc:7099
CellSpace * cell_space()
Definition: heap.h:504
static Local< Context > ToLocal(v8::internal::Handle< v8::internal::Context > obj)
intptr_t CommittedMemory()
Definition: heap.cc:197
Object * GetNumberStringCache(Object *number)
Definition: heap.cc:2859
intptr_t SizeOfObjects()
Definition: heap.cc:410
MUST_USE_RESULT MaybeObject * AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure)
Definition: heap.cc:4797
static int SizeFor(int length)
Definition: objects.h:2288
void SetArea(Address area_start, Address area_end)
Definition: spaces.h:510
static const int kMaxSize
Definition: objects.h:7318
void IterateNewSpaceStrongAndDependentRoots(ObjectVisitor *v)
Context * context()
Definition: isolate.h:518
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
void RecordWrites(Address address, int start, int len)
Definition: heap-inl.h:340
static SeqTwoByteString * cast(Object *obj)
static const int kSize
Definition: objects.h:7733
static JSFunctionResultCache * cast(Object *obj)
void Iterate(ObjectVisitor *v)
void(* GCEpilogueCallback)(GCType type, GCCallbackFlags flags)
Definition: v8.h:2730
intptr_t get_max_alive_after_gc()
Definition: heap.h:1507
void UpdateReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
Definition: heap.cc:1332
MUST_USE_RESULT MaybeObject * AllocateJSProxy(Object *handler, Object *prototype)
Definition: heap.cc:4025
void ProcessWeakReferences(WeakObjectRetainer *retainer)
Definition: heap.cc:1395
void ClearNormalizedMapCaches()
Definition: heap.cc:720
static const int kHeaderSize
Definition: objects.h:2233
static void VisitPointer(Heap *heap, Object **p)
Definition: heap.cc:1500
MUST_USE_RESULT MaybeObject * NumberFromDouble(double value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:2971
bool SlowContains(Address addr)
Definition: spaces.h:2536
static const int kLength
Definition: heap.h:2233
intptr_t * old_data_space_capacity
Definition: heap.h:2074
static int SizeFor(int length)
Definition: objects.h:3610
int signbit(double x)
intptr_t Available()
Definition: heap.cc:216
Space * owner() const
Definition: spaces.h:321
MUST_USE_RESULT MaybeObject * AllocateArgumentsObject(Object *callee, int length)
Definition: heap.cc:3709
int Lookup(Map *map, String *name)
Definition: heap.cc:6970
InnerPointerToCodeCache * inner_pointer_to_code_cache()
Definition: isolate.h:859
void set_instance_type(InstanceType value)
Definition: objects-inl.h:2869
static HeapNumber * cast(Object *obj)
static void WriteToFlat(String *source, sinkchar *sink, int from, int to)
Definition: objects.cc:6819
int get_min_in_mutator()
Definition: heap.h:1510
intptr_t Capacity()
Definition: spaces.h:2125
int get_max_gc_pause()
Definition: heap.h:1504
static StringDictionary * cast(Object *obj)
Definition: objects.h:3153
void set_value(double value)
Definition: objects-inl.h:1195
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
Definition: heap-inl.h:173
virtual size_t length() const =0
void IterateRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:5630
static const int kLengthOffset
Definition: objects.h:2232
static double nan_value()
static const int kSize
Definition: objects.h:1315
MUST_USE_RESULT MaybeObject * ReinitializeJSReceiver(JSReceiver *object, InstanceType type, int size)
Definition: heap.cc:4216
MUST_USE_RESULT MaybeObject * AllocateAccessorPair()
Definition: heap.cc:2063
bool is_null() const
Definition: handles.h:87
MUST_USE_RESULT MaybeObject * AllocateCatchContext(JSFunction *function, Context *previous, String *name, Object *thrown_object)
Definition: heap.cc:4891
const uint32_t kFreeListZapValue
Definition: v8globals.h:94
#define STRUCT_LIST(V)
Definition: objects.h:429
static uint32_t RandomPrivate(Isolate *isolate)
Definition: v8.cc:178
static const int kArgumentsLengthIndex
Definition: heap.h:869
static int SizeFor(int length)
Definition: objects.h:7313
void CheckNewSpaceExpansionCriteria()
Definition: heap.cc:1027
const intptr_t kObjectAlignment
Definition: v8globals.h:44
INLINE(static HeapObject *EnsureDoubleAligned(Heap *heap, HeapObject *object, int size))
static NewSpacePage * FromLimit(Address address_limit)
Definition: spaces.h:1784
static void Enter(Heap *heap, FixedArray *cache, String *string, String *pattern, FixedArray *array)
Definition: heap.cc:2751
void RecordStats(HeapStats *stats)
MUST_USE_RESULT MaybeObject * AllocateScopeInfo(int length)
Definition: heap.cc:4947
bool LookupSymbolIfExists(String *str, String **symbol)
Definition: heap.cc:5402
static JSGlobalPropertyCell * cast(Object *obj)
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:237
virtual intptr_t Size()
Definition: spaces.h:1491
IncrementalMarking * incremental_marking()
Definition: heap.h:1524
bool Contains(Address addr)
Definition: heap.cc:5224
uint16_t uc16
Definition: globals.h:273
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedArray(int length)
Definition: heap.cc:4729
void set_extension(Object *object)
Definition: contexts.h:313
static const int kStartMarker
Definition: heap.h:2065
void set_bit_field(byte value)
Definition: objects-inl.h:2889
static TypeFeedbackCells * cast(Object *obj)
static int SizeFor(int length)
Definition: objects.h:7367
static const int kSize
Definition: objects.h:6349
virtual const char * data() const =0
MUST_USE_RESULT MaybeObject * Initialize(const char *to_string, Object *to_number, byte kind)
Definition: objects.cc:7647
virtual bool ReserveSpace(int bytes)
Definition: spaces.cc:2267
void Iterate(v8::internal::ObjectVisitor *v)
NewSpacePage * next_page() const
Definition: spaces.h:1738
void MemsetPointer(T **dest, U *value, int counter)
Definition: v8utils.h:146
void set_owner(Space *space)
Definition: spaces.h:330
LoggingAndProfiling
Definition: heap.cc:1577
MUST_USE_RESULT MaybeObject * AllocateJSObjectFromMap(Map *map, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:3892
void RememberUnmappedPage(Address page, bool compacted)
Definition: heap.cc:7147
static void UpdateReferencesForScavengeGC()
void Set(int index, uint16_t value)
Definition: objects-inl.h:2326
static const int kNotFound
Definition: heap.h:2238
#define HEAP
Definition: isolate.h:1408
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
static const byte kNull
Definition: objects.h:7739
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
static const int kBodyOffset
Definition: spaces.h:494
MUST_USE_RESULT MaybeObject * LookupSingleCharacterStringFromCode(uint16_t code)
Definition: heap.cc:3381
InstanceType instance_type()
Definition: objects-inl.h:2864
static void CopyBlock(Address dst, Address src, int byte_size)
Definition: heap-inl.h:383
MUST_USE_RESULT MaybeObject * AllocateJSGlobalPropertyCell(Object *value)
Definition: heap.cc:2443
static HeapObject * FromAddress(Address address)
Definition: objects-inl.h:1163
void USE(T)
Definition: globals.h:303
void set_size(size_t size)
Definition: spaces.h:506
MUST_USE_RESULT MaybeObject * AllocateFixedDoubleArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4776
MUST_USE_RESULT MaybeObject * AllocateRawFixedArray(int length)
Definition: heap.cc:4589
Counters * counters()
Definition: isolate.h:804
ScavengeVisitor(Heap *heap)
Definition: heap.cc:970
MUST_USE_RESULT MaybeObject * CopyDropTransitions(DescriptorArray::SharedMode shared_mode)
Definition: objects.cc:4920
static const unsigned kMaxAsciiCharCodeU
Definition: objects.h:7108
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping true
Definition: flags.cc:157
static const int kArgumentsCalleeIndex
Definition: heap.h:871
const int kSmiTag
Definition: v8.h:3853
MUST_USE_RESULT MaybeObject * AllocateHashTable(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4832
virtual void Rewind()=0
static FixedArray * cast(Object *obj)
static const unsigned kMaxOneByteChar
Definition: unicode.h:164
static const int kHeaderSize
Definition: objects.h:2115
void SeqAsciiStringSet(int index, uint16_t value)
Definition: objects-inl.h:2359
void set_parent(String *parent)
Definition: objects-inl.h:2413
Object * FindCodeObject(Address a)
Definition: heap.cc:961
MapSpace * map_space()
Definition: heap.h:503
void set_previous(Context *context)
Definition: contexts.h:309
intptr_t PromotedSpaceSizeOfObjects()
Definition: heap.cc:5836
void IterateNewSpaceWeakIndependentRoots(ObjectVisitor *v)
intptr_t * old_pointer_space_capacity
Definition: heap.h:2072
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:38
Logger * logger()
Definition: isolate.h:813
StaticResource< Utf8Decoder > * utf8_decoder()
Definition: scanner.h:150
Object * GetCallbacksObject(int descriptor_number)
Definition: objects-inl.h:2024
void set_instance_size(int value)
Definition: objects-inl.h:2842
Object * get(int index)
Definition: objects-inl.h:1675
static VisitorId GetVisitorId(int instance_type, int instance_size)
void ClearJSFunctionResultCaches()
Definition: heap.cc:696
GCCallbackFlags
Definition: v8.h:2724
void RecordStats(HeapStats *stats, bool take_snapshot=false)
Definition: heap.cc:5799
void set_formal_parameter_count(int value)
static const int kMaxLength
Definition: objects.h:2388
String * TryFlattenGetString(PretenureFlag pretenure=NOT_TENURED)
Definition: objects-inl.h:2292
void set_bit_field2(byte value)
Definition: objects-inl.h:2899
void CopyFrom(VisitorDispatchTable *other)
void CreateFillerObjectAt(Address addr, int size)
Definition: heap.cc:3447
static int GetLastError()
MUST_USE_RESULT MaybeObject * AllocateSharedFunctionInfo(Object *name)
Definition: heap.cc:3004
bool AdvanceSweepers(int step_size)
Definition: heap.h:1533
static NormalizedMapCache * cast(Object *obj)
static const int kMaxLength
Definition: objects.h:7166
intptr_t * map_space_capacity
Definition: heap.h:2078
static int SizeFor(int body_size)
Definition: objects.h:4450
void set_stress_deopt_counter(int counter)
static intptr_t MaxVirtualMemory()
static const intptr_t kAllocatedThreshold
static const int kCapacityMask
Definition: heap.h:2234
static const byte kFalse
Definition: objects.h:7735
static void ScavengeObject(HeapObject **p, HeapObject *object)
Definition: heap-inl.h:414
Definition: objects.h:6746
void remove(HeapObject **target, int *size)
Definition: heap.h:336
bool is_keyed_call_stub()
Definition: objects.h:4265
void set_visitor_id(int visitor_id)
Definition: objects-inl.h:2788
bool IsSweepingComplete()
Definition: heap.h:1528
void set_length(int value)
void set_this_property_assignments_count(int value)
bool SetUp(int reserved_semispace_size_, int max_semispace_size)
Definition: spaces.cc:1043
void IterateBuiltins(ObjectVisitor *v)
Definition: builtins.cc:1674
static VisitorDispatchTable< ScavengingCallback > * GetTable()
Definition: heap.cc:1643
T Min(T a, T b)
Definition: utils.h:229
intptr_t * memory_allocator_capacity
Definition: heap.h:2088
static ConsString * cast(Object *obj)
virtual intptr_t SizeOfObjects()
Definition: spaces.h:2495
void set_offset(int offset)
Definition: code-stubs.h:661
static FixedArrayBase * cast(Object *object)
Definition: objects-inl.h:1669
void set_flags(Flags flags)
Definition: objects-inl.h:3009
intptr_t Capacity()
Definition: heap.cc:185
MUST_USE_RESULT MaybeObject * AllocateStruct(InstanceType type)
Definition: heap.cc:4956
void EnterDirectlyIntoStoreBuffer(Address addr)
intptr_t * old_data_space_size
Definition: heap.h:2073
MUST_USE_RESULT MaybeObject * AllocateRaw(int object_size, Executability executable)
Definition: spaces.cc:2613
MUST_USE_RESULT MaybeObject * AllocateJSModule()
Definition: heap.cc:3954
GCTracer * tracer()
Definition: heap.h:1493
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
NewSpace * new_space()
Definition: heap.h:499
MUST_USE_RESULT MaybeObject * AllocateMap(InstanceType instance_type, int instance_size, ElementsKind elements_kind=TERMINAL_FAST_ELEMENTS_KIND)
Definition: heap.cc:2011
#define ARRAY_SIZE(a)
Definition: globals.h:295
const intptr_t kDoubleAlignment
Definition: v8globals.h:52
const int kCharSize
Definition: globals.h:229
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
intptr_t MaxExecutableSize()
Definition: heap.h:472
static const byte kTrue
Definition: objects.h:7736
static const int kMaxLength
Definition: objects.h:7321
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
Definition: heap-inl.h:183
static const int kSize
Definition: objects.h:8051
void SetNextEnumerationIndex(int value)
Definition: objects.h:2454
void set_next_chunk(MemoryChunk *next)
Definition: spaces.h:318
void PrintShortHeapStatistics()
Definition: heap.cc:321
static JSObject * cast(Object *obj)
static const int kHashMask
Definition: heap.h:2236
FlagType type() const
Definition: flags.cc:1358
AllocationSpace TargetSpaceId(InstanceType type)
Definition: heap-inl.h:358
uint32_t RoundUpToPowerOf2(uint32_t x)
Definition: utils.h:186
OldSpace * old_data_space()
Definition: heap.h:501
MUST_USE_RESULT MaybeObject * AllocateRawTwoByteString(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4519
static void AssertValidRange(Address from, Address to)
Definition: spaces.h:1923
MarkCompactCollector * mark_compact_collector()
Definition: heap.h:1512
static MUST_USE_RESULT MaybeObject * Allocate(int number_of_descriptors, SharedMode shared_mode)
Definition: objects.cc:5754
MUST_USE_RESULT MaybeObject * AllocateFunction(Map *function_map, SharedFunctionInfo *shared, Object *prototype, PretenureFlag pretenure=TENURED)
Definition: heap.cc:3694
int FastD2I(double x)
Definition: conversions.h:64
void UpdateNewSpaceReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
Definition: heap.cc:1297
void set_initial_map(Map *value)
Definition: objects-inl.h:4138
static const int kAlignedSize
Definition: objects.h:3723
bool CommitFromSpaceIfNeeded()
Definition: spaces.h:2262
AllocationSpace identity()
Definition: spaces.h:772
void set_unused_property_fields(int value)
Definition: objects-inl.h:2879
void init_prototype_transitions(Object *undefined)
Definition: objects-inl.h:3554
static const int kIsExtensible
Definition: objects.h:5010
MUST_USE_RESULT MaybeObject * AllocateStringFromTwoByte(Vector< const uc16 > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4361
static const int kNonCodeObjectAreaSize
Definition: spaces.h:698
static const int kEntriesPerBucket
Definition: heap.h:2237
static const int kPointerFieldsBeginOffset
Definition: objects.h:4976
void EnsureFromSpaceIsCommitted()
Definition: heap.cc:682
void InitializeBody(Map *map, Object *pre_allocated_value, Object *filler_value)
Definition: objects-inl.h:1580
MUST_USE_RESULT MaybeObject * AllocateAliasedArgumentsEntry(int slot)
Definition: heap.cc:2087
MemoryChunk * next_chunk() const
Definition: spaces.h:315
const int MB
Definition: globals.h:222
static JSFunction * cast(Object *obj)