v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
heap-inl.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_HEAP_INL_H_
29 #define V8_HEAP_INL_H_
30 
31 #include <cmath>
32 
33 #include "heap.h"
34 #include "heap-profiler.h"
35 #include "isolate.h"
36 #include "list-inl.h"
37 #include "objects.h"
38 #include "platform.h"
39 #include "v8-counters.h"
40 #include "store-buffer.h"
41 #include "store-buffer-inl.h"
42 
43 namespace v8 {
44 namespace internal {
45 
47  if (emergency_stack_ != NULL) {
48  emergency_stack_->Add(Entry(target, size));
49  return;
50  }
51 
52  if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) {
53  NewSpacePage* rear_page =
54  NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_));
55  ASSERT(!rear_page->prev_page()->is_anchor());
56  rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->area_end());
58  }
59 
60  if (guard_) {
61  ASSERT(GetHeadPage() ==
62  Page::FromAllocationTop(reinterpret_cast<Address>(limit_)));
63 
64  if ((rear_ - 2) < limit_) {
65  RelocateQueueHead();
66  emergency_stack_->Add(Entry(target, size));
67  return;
68  }
69  }
70 
71  *(--rear_) = reinterpret_cast<intptr_t>(target);
72  *(--rear_) = size;
73  // Assert no overflow into live objects.
74 #ifdef DEBUG
76  reinterpret_cast<Address>(rear_));
77 #endif
78 }
79 
80 
82  guard_ = guard_ ||
83  heap_->new_space()->active_space()->current_page()->address() ==
84  GetHeadPage()->address();
85 }
86 
87 
89  PretenureFlag pretenure) {
90  // Check for ASCII first since this is the common case.
91  const char* start = str.start();
92  int length = str.length();
93  int non_ascii_start = String::NonAsciiStart(start, length);
94  if (non_ascii_start >= length) {
95  // If the string is ASCII, we do not need to convert the characters
96  // since UTF8 is backwards compatible with ASCII.
97  return AllocateStringFromOneByte(str, pretenure);
98  }
99  // Non-ASCII and we need to decode.
100  return AllocateStringFromUtf8Slow(str, non_ascii_start, pretenure);
101 }
102 
103 
104 template<>
105 bool inline Heap::IsOneByte(Vector<const char> str, int chars) {
106  // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported?
107  // ASCII only check.
108  return chars == str.length();
109 }
110 
111 
112 template<>
113 bool inline Heap::IsOneByte(String* str, int chars) {
114  return str->IsOneByteRepresentation();
115 }
116 
117 
119  Vector<const char> str, int chars, uint32_t hash_field) {
120  if (IsOneByte(str, chars)) {
122  Vector<const uint8_t>::cast(str), hash_field);
123  }
124  return AllocateInternalizedStringImpl<false>(str, chars, hash_field);
125 }
126 
127 
128 template<typename T>
130  T t, int chars, uint32_t hash_field) {
131  if (IsOneByte(t, chars)) {
132  return AllocateInternalizedStringImpl<true>(t, chars, hash_field);
133  }
134  return AllocateInternalizedStringImpl<false>(t, chars, hash_field);
135 }
136 
137 
139  uint32_t hash_field) {
140  if (str.length() > String::kMaxLength) {
141  return isolate()->ThrowInvalidStringLength();
142  }
143  // Compute map and object size.
144  Map* map = ascii_internalized_string_map();
146  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
147 
148  // Allocate string.
149  Object* result;
150  { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
151  if (!maybe_result->ToObject(&result)) return maybe_result;
152  }
153 
154  // String maps are all immortal immovable objects.
155  reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map);
156  // Set length and hash fields of the allocated string.
157  String* answer = String::cast(result);
158  answer->set_length(str.length());
159  answer->set_hash_field(hash_field);
160 
161  ASSERT_EQ(size, answer->Size());
162 
163  // Fill in the characters.
165  str.start(), str.length());
166 
167  return answer;
168 }
169 
170 
172  uint32_t hash_field) {
173  if (str.length() > String::kMaxLength) {
174  return isolate()->ThrowInvalidStringLength();
175  }
176  // Compute map and object size.
177  Map* map = internalized_string_map();
179  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
180 
181  // Allocate string.
182  Object* result;
183  { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
184  if (!maybe_result->ToObject(&result)) return maybe_result;
185  }
186 
187  reinterpret_cast<HeapObject*>(result)->set_map(map);
188  // Set length and hash fields of the allocated string.
189  String* answer = String::cast(result);
190  answer->set_length(str.length());
191  answer->set_hash_field(hash_field);
192 
193  ASSERT_EQ(size, answer->Size());
194 
195  // Fill in the characters.
197  str.start(), str.length() * kUC16Size);
198 
199  return answer;
200 }
201 
202 MaybeObject* Heap::CopyFixedArray(FixedArray* src) {
203  return CopyFixedArrayWithMap(src, src->map());
204 }
205 
206 
208  return CopyFixedDoubleArrayWithMap(src, src->map());
209 }
210 
211 
213  return CopyConstantPoolArrayWithMap(src, src->map());
214 }
215 
216 
217 MaybeObject* Heap::AllocateRaw(int size_in_bytes,
218  AllocationSpace space,
219  AllocationSpace retry_space) {
220  ASSERT(AllowHandleAllocation::IsAllowed());
221  ASSERT(AllowHeapAllocation::IsAllowed());
222  ASSERT(gc_state_ == NOT_IN_GC);
223  HeapProfiler* profiler = isolate_->heap_profiler();
224 #ifdef DEBUG
225  if (FLAG_gc_interval >= 0 &&
226  AllowAllocationFailure::IsAllowed(isolate_) &&
227  Heap::allocation_timeout_-- <= 0) {
228  return Failure::RetryAfterGC(space);
229  }
230  isolate_->counters()->objs_since_last_full()->Increment();
231  isolate_->counters()->objs_since_last_young()->Increment();
232 #endif
233 
234  HeapObject* object;
235  MaybeObject* result;
236  if (NEW_SPACE == space) {
237  result = new_space_.AllocateRaw(size_in_bytes);
238  if (always_allocate() && result->IsFailure() && retry_space != NEW_SPACE) {
239  space = retry_space;
240  } else {
241  if (profiler->is_tracking_allocations() && result->To(&object)) {
242  profiler->AllocationEvent(object->address(), size_in_bytes);
243  }
244  return result;
245  }
246  }
247 
248  if (OLD_POINTER_SPACE == space) {
249  result = old_pointer_space_->AllocateRaw(size_in_bytes);
250  } else if (OLD_DATA_SPACE == space) {
251  result = old_data_space_->AllocateRaw(size_in_bytes);
252  } else if (CODE_SPACE == space) {
253  result = code_space_->AllocateRaw(size_in_bytes);
254  } else if (LO_SPACE == space) {
255  result = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
256  } else if (CELL_SPACE == space) {
257  result = cell_space_->AllocateRaw(size_in_bytes);
258  } else if (PROPERTY_CELL_SPACE == space) {
259  result = property_cell_space_->AllocateRaw(size_in_bytes);
260  } else {
261  ASSERT(MAP_SPACE == space);
262  result = map_space_->AllocateRaw(size_in_bytes);
263  }
264  if (result->IsFailure()) old_gen_exhausted_ = true;
265  if (profiler->is_tracking_allocations() && result->To(&object)) {
266  profiler->AllocationEvent(object->address(), size_in_bytes);
267  }
268  return result;
269 }
270 
271 
273  int32_t value, PretenureFlag pretenure) {
274  if (Smi::IsValid(value)) return Smi::FromInt(value);
275  // Bypass NumberFromDouble to avoid various redundant checks.
276  return AllocateHeapNumber(FastI2D(value), pretenure);
277 }
278 
279 
281  uint32_t value, PretenureFlag pretenure) {
282  if (static_cast<int32_t>(value) >= 0 &&
283  Smi::IsValid(static_cast<int32_t>(value))) {
284  return Smi::FromInt(static_cast<int32_t>(value));
285  }
286  // Bypass NumberFromDouble to avoid various redundant checks.
287  return AllocateHeapNumber(FastUI2D(value), pretenure);
288 }
289 
290 
292  ASSERT(string->IsExternalString());
294  reinterpret_cast<v8::String::ExternalStringResourceBase**>(
295  reinterpret_cast<byte*>(string) +
298 
299  // Dispose of the C++ object if it has not already been disposed.
300  if (*resource_addr != NULL) {
301  (*resource_addr)->Dispose();
302  *resource_addr = NULL;
303  }
304 }
305 
306 
307 bool Heap::InNewSpace(Object* object) {
308  bool result = new_space_.Contains(object);
309  ASSERT(!result || // Either not in new space
310  gc_state_ != NOT_IN_GC || // ... or in the middle of GC
311  InToSpace(object)); // ... or in to-space (where we allocate).
312  return result;
313 }
314 
315 
316 bool Heap::InNewSpace(Address address) {
317  return new_space_.Contains(address);
318 }
319 
320 
321 bool Heap::InFromSpace(Object* object) {
322  return new_space_.FromSpaceContains(object);
323 }
324 
325 
326 bool Heap::InToSpace(Object* object) {
327  return new_space_.ToSpaceContains(object);
328 }
329 
330 
332  return old_pointer_space_->Contains(address);
333 }
334 
335 
337  return InOldPointerSpace(reinterpret_cast<Address>(object));
338 }
339 
340 
342  return old_data_space_->Contains(address);
343 }
344 
345 
347  return InOldDataSpace(reinterpret_cast<Address>(object));
348 }
349 
350 
352  if (!incremental_marking()->IsStopped()) return false;
353  return OldGenerationSpaceAvailable() < 0;
354 }
355 
356 
357 bool Heap::ShouldBePromoted(Address old_address, int object_size) {
358  // An object should be promoted if:
359  // - the object has survived a scavenge operation or
360  // - to space is already 25% full.
361  NewSpacePage* page = NewSpacePage::FromAddress(old_address);
362  Address age_mark = new_space_.age_mark();
363  bool below_mark = page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
364  (!page->ContainsLimit(age_mark) || old_address < age_mark);
365  return below_mark || (new_space_.Size() + object_size) >=
366  (new_space_.EffectiveCapacity() >> 2);
367 }
368 
369 
370 void Heap::RecordWrite(Address address, int offset) {
371  if (!InNewSpace(address)) store_buffer_.Mark(address + offset);
372 }
373 
374 
375 void Heap::RecordWrites(Address address, int start, int len) {
376  if (!InNewSpace(address)) {
377  for (int i = 0; i < len; i++) {
378  store_buffer_.Mark(address + start + i * kPointerSize);
379  }
380  }
381 }
382 
383 
385  InstanceType type = object->map()->instance_type();
386  AllocationSpace space = TargetSpaceId(type);
387  return (space == OLD_POINTER_SPACE)
388  ? old_pointer_space_
389  : old_data_space_;
390 }
391 
392 
394  // Heap numbers and sequential strings are promoted to old data space, all
395  // other object types are promoted to old pointer space. We do not use
396  // object->IsHeapNumber() and object->IsSeqString() because we already
397  // know that object has the heap object tag.
398 
399  // These objects are never allocated in new space.
400  ASSERT(type != MAP_TYPE);
401  ASSERT(type != CODE_TYPE);
402  ASSERT(type != ODDBALL_TYPE);
403  ASSERT(type != CELL_TYPE);
404  ASSERT(type != PROPERTY_CELL_TYPE);
405 
406  if (type <= LAST_NAME_TYPE) {
407  if (type == SYMBOL_TYPE) return OLD_POINTER_SPACE;
409  // There are four string representations: sequential strings, external
410  // strings, cons strings, and sliced strings.
411  // Only the latter two contain non-map-word pointers to heap objects.
412  return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
414  : OLD_DATA_SPACE;
415  } else {
416  return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
417  }
418 }
419 
420 
422  // Object migration is governed by the following rules:
423  //
424  // 1) Objects in new-space can be migrated to one of the old spaces
425  // that matches their target space or they stay in new-space.
426  // 2) Objects in old-space stay in the same space when migrating.
427  // 3) Fillers (two or more words) can migrate due to left-trimming of
428  // fixed arrays in new-space, old-data-space and old-pointer-space.
429  // 4) Fillers (one word) can never migrate, they are skipped by
430  // incremental marking explicitly to prevent invalid pattern.
431  // 5) Short external strings can end up in old pointer space when a cons
432  // string in old pointer space is made external (String::MakeExternal).
433  //
434  // Since this function is used for debugging only, we do not place
435  // asserts here, but check everything explicitly.
436  if (obj->map() == one_pointer_filler_map()) return false;
437  InstanceType type = obj->map()->instance_type();
439  AllocationSpace src = chunk->owner()->identity();
440  switch (src) {
441  case NEW_SPACE:
442  return dst == src || dst == TargetSpaceId(type);
443  case OLD_POINTER_SPACE:
444  return dst == src &&
445  (dst == TargetSpaceId(type) || obj->IsFiller() ||
446  (obj->IsExternalString() && ExternalString::cast(obj)->is_short()));
447  case OLD_DATA_SPACE:
448  return dst == src && dst == TargetSpaceId(type);
449  case CODE_SPACE:
450  return dst == src && type == CODE_TYPE;
451  case MAP_SPACE:
452  case CELL_SPACE:
453  case PROPERTY_CELL_SPACE:
454  case LO_SPACE:
455  return false;
456  }
457  UNREACHABLE();
458  return false;
459 }
460 
461 
462 void Heap::CopyBlock(Address dst, Address src, int byte_size) {
463  CopyWords(reinterpret_cast<Object**>(dst),
464  reinterpret_cast<Object**>(src),
465  static_cast<size_t>(byte_size / kPointerSize));
466 }
467 
468 
469 void Heap::MoveBlock(Address dst, Address src, int byte_size) {
470  ASSERT(IsAligned(byte_size, kPointerSize));
471 
472  int size_in_words = byte_size / kPointerSize;
473 
474  if ((dst < src) || (dst >= (src + byte_size))) {
475  Object** src_slot = reinterpret_cast<Object**>(src);
476  Object** dst_slot = reinterpret_cast<Object**>(dst);
477  Object** end_slot = src_slot + size_in_words;
478 
479  while (src_slot != end_slot) {
480  *dst_slot++ = *src_slot++;
481  }
482  } else {
483  OS::MemMove(dst, src, static_cast<size_t>(byte_size));
484  }
485 }
486 
487 
489  ScavengeObject(p, *p);
490 }
491 
492 
495  Heap* heap = object->GetHeap();
496  ASSERT(heap->InFromSpace(object));
497 
498  if (!FLAG_allocation_site_pretenuring ||
499  !AllocationSite::CanTrack(object->map()->instance_type())) return;
500 
501  // Check if there is potentially a memento behind the object. If
502  // the last word of the momento is on another page we return
503  // immediatelly. Note that we do not have to compare with the current
504  // top pointer of the from space page, since we always install filler
505  // objects above the top pointer of a from space page when performing
506  // a garbage collection.
507  Address object_address = object->address();
508  Address memento_address = object_address + object->Size();
509  Address last_memento_word_address = memento_address + kPointerSize;
510  if (!NewSpacePage::OnSamePage(object_address,
511  last_memento_word_address)) {
512  return;
513  }
514 
515  HeapObject* candidate = HeapObject::FromAddress(memento_address);
516  if (candidate->map() != heap->allocation_memento_map()) return;
517 
518  AllocationMemento* memento = AllocationMemento::cast(candidate);
519  if (!memento->IsValid()) return;
520 
521  if (memento->GetAllocationSite()->IncrementMementoFoundCount()) {
522  heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite(), mode);
523  }
524 }
525 
526 
528  ASSERT(object->GetIsolate()->heap()->InFromSpace(object));
529 
530  // We use the first word (where the map pointer usually is) of a heap
531  // object to record the forwarding pointer. A forwarding pointer can
532  // point to an old space, the code space, or the to space of the new
533  // generation.
534  MapWord first_word = object->map_word();
535 
536  // If the first word is a forwarding address, the object has already been
537  // copied.
538  if (first_word.IsForwardingAddress()) {
539  HeapObject* dest = first_word.ToForwardingAddress();
540  ASSERT(object->GetIsolate()->heap()->InFromSpace(*p));
541  *p = dest;
542  return;
543  }
544 
546 
547  // AllocationMementos are unrooted and shouldn't survive a scavenge
548  ASSERT(object->map() != object->GetHeap()->allocation_memento_map());
549  // Call the slow part of scavenge object.
550  return ScavengeObjectSlow(p, object);
551 }
552 
553 
555  const char* gc_reason,
556  const v8::GCCallbackFlags callbackFlags) {
557  const char* collector_reason = NULL;
558  GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
559  return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags);
560 }
561 
562 
563 MaybeObject* Heap::PrepareForCompare(String* str) {
564  // Always flatten small strings and force flattening of long strings
565  // after we have accumulated a certain amount we failed to flatten.
566  static const int kMaxAlwaysFlattenLength = 32;
567  static const int kFlattenLongThreshold = 16*KB;
568 
569  const int length = str->length();
570  MaybeObject* obj = str->TryFlatten();
571  if (length <= kMaxAlwaysFlattenLength ||
572  unflattened_strings_length_ >= kFlattenLongThreshold) {
573  return obj;
574  }
575  if (obj->IsFailure()) {
576  unflattened_strings_length_ += length;
577  }
578  return str;
579 }
580 
581 
583  int64_t change_in_bytes) {
584  ASSERT(HasBeenSetUp());
585  int64_t amount = amount_of_external_allocated_memory_ + change_in_bytes;
586  if (change_in_bytes > 0) {
587  // Avoid overflow.
588  if (amount > amount_of_external_allocated_memory_) {
589  amount_of_external_allocated_memory_ = amount;
590  } else {
591  // Give up and reset the counters in case of an overflow.
592  amount_of_external_allocated_memory_ = 0;
593  amount_of_external_allocated_memory_at_last_global_gc_ = 0;
594  }
595  int64_t amount_since_last_global_gc = PromotedExternalMemorySize();
596  if (amount_since_last_global_gc > external_allocation_limit_) {
597  CollectAllGarbage(kNoGCFlags, "external memory allocation limit reached");
598  }
599  } else {
600  // Avoid underflow.
601  if (amount >= 0) {
602  amount_of_external_allocated_memory_ = amount;
603  } else {
604  // Give up and reset the counters in case of an underflow.
605  amount_of_external_allocated_memory_ = 0;
606  amount_of_external_allocated_memory_at_last_global_gc_ = 0;
607  }
608  }
609  if (FLAG_trace_external_memory) {
610  PrintPID("%8.0f ms: ", isolate()->time_millis_since_init());
611  PrintF("Adjust amount of external memory: delta=%6" V8_PTR_PREFIX "d KB, "
612  "amount=%6" V8_PTR_PREFIX "d KB, since_gc=%6" V8_PTR_PREFIX "d KB, "
613  "isolate=0x%08" V8PRIxPTR ".\n",
614  static_cast<intptr_t>(change_in_bytes / KB),
615  static_cast<intptr_t>(amount_of_external_allocated_memory_ / KB),
616  static_cast<intptr_t>(PromotedExternalMemorySize() / KB),
617  reinterpret_cast<intptr_t>(isolate()));
618  }
619  ASSERT(amount_of_external_allocated_memory_ >= 0);
620  return amount_of_external_allocated_memory_;
621 }
622 
623 
625  return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) -
626  reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
627 }
628 
629 
630 #ifdef DEBUG
631 #define GC_GREEDY_CHECK(ISOLATE) \
632  if (FLAG_gc_greedy) (ISOLATE)->heap()->GarbageCollectionGreedyCheck()
633 #else
634 #define GC_GREEDY_CHECK(ISOLATE) { }
635 #endif
636 
637 // Calls the FUNCTION_CALL function and retries it up to three times
638 // to guarantee that any allocations performed during the call will
639 // succeed if there's enough memory.
640 
641 // Warning: Do not use the identifiers __object__, __maybe_object__ or
642 // __scope__ in a call to this macro.
643 
644 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \
645  do { \
646  GC_GREEDY_CHECK(ISOLATE); \
647  MaybeObject* __maybe_object__ = FUNCTION_CALL; \
648  Object* __object__ = NULL; \
649  if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
650  if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
651  (ISOLATE)->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \
652  allocation_space(), \
653  "allocation failure"); \
654  __maybe_object__ = FUNCTION_CALL; \
655  if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
656  if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
657  (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \
658  (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \
659  { \
660  AlwaysAllocateScope __scope__(ISOLATE); \
661  __maybe_object__ = FUNCTION_CALL; \
662  } \
663  if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
664  if (__maybe_object__->IsRetryAfterGC()) { \
665  /* TODO(1181417): Fix this. */ \
666  v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true);\
667  } \
668  RETURN_EMPTY; \
669  } while (false)
670 
671 #define CALL_AND_RETRY_OR_DIE( \
672  ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \
673  CALL_AND_RETRY( \
674  ISOLATE, \
675  FUNCTION_CALL, \
676  RETURN_VALUE, \
677  RETURN_EMPTY)
678 
679 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \
680  CALL_AND_RETRY_OR_DIE(ISOLATE, \
681  FUNCTION_CALL, \
682  return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
683  return Handle<TYPE>()) \
684 
685 
686 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
687  CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return)
688 
689 
690 #define CALL_HEAP_FUNCTION_PASS_EXCEPTION(ISOLATE, FUNCTION_CALL) \
691  CALL_AND_RETRY(ISOLATE, \
692  FUNCTION_CALL, \
693  return __object__, \
694  return __maybe_object__)
695 
696 
698  ASSERT(string->IsExternalString());
699  if (heap_->InNewSpace(string)) {
700  new_space_strings_.Add(string);
701  } else {
702  old_space_strings_.Add(string);
703  }
704 }
705 
706 
707 void ExternalStringTable::Iterate(ObjectVisitor* v) {
708  if (!new_space_strings_.is_empty()) {
709  Object** start = &new_space_strings_[0];
710  v->VisitPointers(start, start + new_space_strings_.length());
711  }
712  if (!old_space_strings_.is_empty()) {
713  Object** start = &old_space_strings_[0];
714  v->VisitPointers(start, start + old_space_strings_.length());
715  }
716 }
717 
718 
719 // Verify() is inline to avoid ifdef-s around its calls in release
720 // mode.
721 void ExternalStringTable::Verify() {
722 #ifdef DEBUG
723  for (int i = 0; i < new_space_strings_.length(); ++i) {
724  Object* obj = Object::cast(new_space_strings_[i]);
725  ASSERT(heap_->InNewSpace(obj));
726  ASSERT(obj != heap_->the_hole_value());
727  }
728  for (int i = 0; i < old_space_strings_.length(); ++i) {
729  Object* obj = Object::cast(old_space_strings_[i]);
730  ASSERT(!heap_->InNewSpace(obj));
731  ASSERT(obj != heap_->the_hole_value());
732  }
733 #endif
734 }
735 
736 
737 void ExternalStringTable::AddOldString(String* string) {
738  ASSERT(string->IsExternalString());
739  ASSERT(!heap_->InNewSpace(string));
740  old_space_strings_.Add(string);
741 }
742 
743 
744 void ExternalStringTable::ShrinkNewStrings(int position) {
745  new_space_strings_.Rewind(position);
746 #ifdef VERIFY_HEAP
747  if (FLAG_verify_heap) {
748  Verify();
749  }
750 #endif
751 }
752 
753 
755  set_instanceof_cache_function(the_hole_value());
756 }
757 
758 
759 Object* Heap::ToBoolean(bool condition) {
760  return condition ? true_value() : false_value();
761 }
762 
763 
765  set_instanceof_cache_map(the_hole_value());
766  set_instanceof_cache_function(the_hole_value());
767 }
768 
769 
771  : heap_(isolate->heap()), daf_(isolate) {
772  // We shouldn't hit any nested scopes, because that requires
773  // non-handle code to call handle code. The code still works but
774  // performance will degrade, so we want to catch this situation
775  // in debug mode.
776  ASSERT(heap_->always_allocate_scope_depth_ == 0);
777  heap_->always_allocate_scope_depth_++;
778 }
779 
780 
782  heap_->always_allocate_scope_depth_--;
783  ASSERT(heap_->always_allocate_scope_depth_ == 0);
784 }
785 
786 
787 #ifdef VERIFY_HEAP
788 NoWeakObjectVerificationScope::NoWeakObjectVerificationScope() {
789  Isolate* isolate = Isolate::Current();
790  isolate->heap()->no_weak_object_verification_scope_depth_++;
791 }
792 
793 
794 NoWeakObjectVerificationScope::~NoWeakObjectVerificationScope() {
795  Isolate* isolate = Isolate::Current();
796  isolate->heap()->no_weak_object_verification_scope_depth_--;
797 }
798 #endif
799 
800 
802  heap_->gc_callbacks_depth_++;
803 }
804 
805 
807  heap_->gc_callbacks_depth_--;
808 }
809 
810 
812  return heap_->gc_callbacks_depth_ == 1;
813 }
814 
815 
817  for (Object** current = start; current < end; current++) {
818  if ((*current)->IsHeapObject()) {
819  HeapObject* object = HeapObject::cast(*current);
820  CHECK(object->GetIsolate()->heap()->Contains(object));
821  CHECK(object->map()->IsMap());
822  }
823  }
824 }
825 
826 
828  for (Object** current = start; current < end; current++) {
829  CHECK((*current)->IsSmi());
830  }
831 }
832 
833 
834 double GCTracer::SizeOfHeapObjects() {
835  return (static_cast<double>(heap_->SizeOfObjects())) / MB;
836 }
837 
838 
839 } } // namespace v8::internal
840 
841 #endif // V8_HEAP_INL_H_
MUST_USE_RESULT MaybeObject * CopyConstantPoolArray(ConstantPoolArray *src)
Definition: heap-inl.h:212
byte * Address
Definition: globals.h:186
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
static bool OnSamePage(Address address1, Address address2)
Definition: spaces.h:2104
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8Slow(Vector< const char > str, int non_ascii_start, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4905
MUST_USE_RESULT MaybeObject * AllocateOneByteInternalizedString(Vector< const uint8_t > str, uint32_t hash_field)
Definition: heap-inl.h:138
#define V8PRIxPTR
Definition: globals.h:228
AllocationSite * GetAllocationSite()
Definition: objects.h:8452
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
Definition: heap-inl.h:207
void PrintF(const char *format,...)
Definition: v8utils.cc:40
void CollectAllGarbage(int flags, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
Definition: heap.cc:731
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
bool InOldDataSpace(Address address)
Definition: heap-inl.h:341
bool InNewSpace(Object *object)
Definition: heap-inl.h:307
static String * cast(Object *obj)
MaybeObject * TryFlatten(PretenureFlag pretenure=NOT_TENURED)
Definition: objects-inl.h:2978
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes)
Definition: spaces-inl.h:277
Object * ToBoolean(bool condition)
Definition: heap-inl.h:759
Isolate * isolate()
Definition: heap-inl.h:624
int64_t AdjustAmountOfExternalAllocatedMemory(int64_t change_in_bytes)
Definition: heap-inl.h:582
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArrayWithMap(FixedDoubleArray *src, Map *map)
Definition: heap.cc:5224
static Smi * FromInt(int value)
Definition: objects-inl.h:1209
const int KB
Definition: globals.h:245
void FinalizeExternalString(String *string)
Definition: heap-inl.h:291
void CompletelyClearInstanceofCache()
Definition: heap-inl.h:764
static MemoryChunk * FromAddress(Address a)
Definition: spaces.h:305
static HeapObject * cast(Object *obj)
void AddString(String *string)
Definition: heap-inl.h:697
Address age_mark()
Definition: spaces.h:2513
static bool IsOneByte(T t, int chars)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including on console Map counters to a file Enable debugger compile events enable GDBJIT enable GDBJIT interface for all code objects dump only objects containing this substring stress the GC compactor to flush out pretty print source code print source AST function name where to insert a breakpoint print scopes for builtins trace contexts operations print stuff during garbage collection report code statistics after GC report handles after GC trace cache state transitions print interface inference details prints when objects are turned into dictionaries report heap spill statistics along with trace isolate state changes trace regexp bytecode execution Minimal Log all events to the log file Log API events to the log file Log heap samples on garbage collection for the hp2ps tool log positions Log suspect operations Used with turns on browser compatible mode for profiling v8 Specify the name of the log file Enable low level linux profiler Enable perf linux profiler(experimental annotate support).") DEFINE_string(gc_fake_mmap
int int32_t
Definition: unicode.cc:47
void ClearInstanceofCache()
Definition: heap-inl.h:754
bool InFromSpace(Object *object)
Definition: heap-inl.h:321
static void MoveBlock(Address dst, Address src, int byte_size)
Definition: heap-inl.h:469
#define ASSERT(condition)
Definition: checks.h:329
OldSpace * TargetSpace(HeapObject *object)
Definition: heap-inl.h:384
MUST_USE_RESULT MaybeObject * AllocateTwoByteInternalizedString(Vector< const uc16 > str, uint32_t hash_field)
Definition: heap-inl.h:171
NewSpacePage * current_page()
Definition: spaces.h:2228
#define CHECK(condition)
Definition: checks.h:75
intptr_t EffectiveCapacity()
Definition: spaces.h:2449
Failure * ThrowInvalidStringLength()
Definition: isolate.cc:950
MUST_USE_RESULT MaybeObject * AllocateStringFromOneByte(Vector< const uint8_t > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4885
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
Definition: heap.cc:5198
uint8_t byte
Definition: globals.h:185
MUST_USE_RESULT MaybeObject * AllocateHeapNumber(double value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:2969
#define UNREACHABLE()
Definition: checks.h:52
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
void Mark(Address addr)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Definition: flags.cc:211
T * start() const
Definition: utils.h:426
bool InOldPointerSpace(Address address)
Definition: heap-inl.h:331
bool always_allocate()
Definition: heap.h:668
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:88
bool Contains(Address a)
Definition: spaces.h:2427
bool Contains(Address a)
Definition: spaces-inl.h:179
static void MemCopy(void *dest, const void *src, size_t size)
Definition: platform.h:399
static void UpdateAllocationSiteFeedback(HeapObject *object, ScratchpadSlotMode mode)
Definition: heap-inl.h:493
static NewSpacePage * FromAddress(Address address_in_page)
Definition: spaces.h:2089
static Failure * RetryAfterGC()
Definition: objects-inl.h:1255
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1278
const uint32_t kIsIndirectStringMask
Definition: objects.h:622
bool AllowedToBeMigrated(HeapObject *object, AllocationSpace dest)
Definition: heap-inl.h:421
static const int kNoGCFlags
Definition: heap.h:1257
bool ContainsLimit(Address addr)
Definition: spaces.h:384
const int kPointerSize
Definition: globals.h:268
virtual intptr_t Size()
Definition: spaces.h:2438
void VisitPointers(Object **start, Object **end)
Definition: heap-inl.h:827
bool IsFlagSet(int flag)
Definition: spaces.h:456
const int kHeapObjectTag
Definition: v8.h:5473
static void ScavengePointer(HeapObject **p)
Definition: heap-inl.h:488
bool IsAligned(T value, U alignment)
Definition: utils.h:211
void VisitPointers(Object **start, Object **end)
Definition: heap-inl.h:816
static void MemMove(void *dest, const void *src, size_t size)
Definition: platform.h:402
int length() const
Definition: utils.h:420
void CopyWords(T *dst, const T *src, size_t num_words)
Definition: v8utils.h:130
static ExternalString * cast(Object *obj)
void Iterate(ObjectVisitor *v)
Definition: heap-inl.h:707
#define V8_PTR_PREFIX
Definition: globals.h:220
static const int kHeaderSize
Definition: objects.h:9042
double FastUI2D(unsigned x)
Definition: conversions.h:89
bool InToSpace(Object *object)
Definition: heap-inl.h:326
bool HasBeenSetUp()
Definition: heap.cc:260
bool FromSpaceContains(Address address)
Definition: spaces.h:2570
bool ToSpaceContains(Address address)
Definition: spaces.h:2567
static AllocationMemento * cast(Object *obj)
bool CollectGarbage(AllocationSpace space, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
Definition: heap-inl.h:554
#define T(name, string, precedence)
Definition: token.cc:48
HeapProfiler * heap_profiler() const
Definition: isolate.h:985
Space * owner() const
Definition: spaces.h:332
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
Definition: heap-inl.h:202
MUST_USE_RESULT MaybeObject * AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field)
static Object * cast(Object *value)
Definition: objects.h:1641
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:280
IncrementalMarking * incremental_marking()
Definition: heap.h:1781
bool Contains(Address addr)
Definition: heap.cc:5929
void insert(HeapObject *target, int size)
Definition: heap-inl.h:46
bool ShouldBePromoted(Address old_address, int object_size)
Definition: heap-inl.h:357
static int SizeFor(int length)
Definition: objects.h:9118
bool is_tracking_allocations() const
Definition: heap-profiler.h:85
void PrintPID(const char *format,...)
Definition: v8utils.cc:56
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
InstanceType instance_type()
Definition: objects-inl.h:4012
static void CopyBlock(Address dst, Address src, int byte_size)
Definition: heap-inl.h:462
static HeapObject * FromAddress(Address address)
Definition: objects-inl.h:1369
Counters * counters()
Definition: isolate.h:859
MUST_USE_RESULT MaybeObject * AllocateInternalizedStringFromUtf8(Vector< const char > str, int chars, uint32_t hash_field)
Definition: heap-inl.h:118
SemiSpace * active_space()
Definition: spaces.h:2624
bool OldGenerationAllocationLimitReached()
Definition: heap-inl.h:351
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:39
bool IsOneByteRepresentation()
Definition: objects-inl.h:321
double FastI2D(int x)
Definition: conversions.h:81
MUST_USE_RESULT MaybeObject * NumberFromInt32(int32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:272
const uint32_t kIsIndirectStringTag
Definition: objects.h:623
GCCallbackFlags
Definition: v8.h:4073
NewSpacePage * prev_page() const
Definition: spaces.h:2061
HeapObject * obj
static const int kMaxLength
Definition: objects.h:8922
static void ScavengeObject(HeapObject **p, HeapObject *object)
Definition: heap-inl.h:527
void set_length(int value)
MUST_USE_RESULT MaybeObject * CopyConstantPoolArrayWithMap(ConstantPoolArray *src, Map *map)
Definition: heap.cc:5241
AlwaysAllocateScope(Isolate *isolate)
Definition: heap-inl.h:770
MUST_USE_RESULT MaybeObject * PrepareForCompare(String *str)
Definition: heap-inl.h:563
static int SizeFor(int length)
Definition: objects.h:9078
static bool IsAtStart(Address addr)
Definition: spaces.h:2075
static int NonAsciiStart(const char *chars, int length)
Definition: objects.h:8944
static bool CanTrack(InstanceType type)
Definition: objects-inl.h:1500
intptr_t OldGenerationSpaceAvailable()
Definition: heap.h:1585
MUST_USE_RESULT MaybeObject * AllocateRaw(int object_size, Executability executable)
Definition: spaces.cc:2935
NewSpace * new_space()
Definition: heap.h:637
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
Definition: heap-inl.h:217
void AllocationEvent(Address addr, int size)
void set_hash_field(uint32_t value)
Definition: objects-inl.h:2946
static AllocationSpace TargetSpaceId(InstanceType type)
Definition: heap-inl.h:393
static void AssertValidRange(Address from, Address to)
Definition: spaces.h:2242
const int kUC16Size
Definition: globals.h:312
static const int kResourceOffset
Definition: objects.h:9242
AllocationSpace identity()
Definition: spaces.h:906
const int MB
Definition: globals.h:246