28 #ifndef V8_HEAP_INL_H_
29 #define V8_HEAP_INL_H_
47 if (emergency_stack_ !=
NULL) {
48 emergency_stack_->
Add(Entry(target, size));
62 Page::FromAllocationTop(reinterpret_cast<Address>(limit_)));
64 if ((rear_ - 2) < limit_) {
66 emergency_stack_->
Add(Entry(target, size));
71 *(--rear_) = reinterpret_cast<intptr_t>(target);
76 reinterpret_cast<Address>(rear_));
91 const char* start = str.
start();
94 if (non_ascii_start >= length) {
108 return chars == str.
length();
124 return AllocateInternalizedStringImpl<false>(str, chars, hash_field);
130 T t,
int chars, uint32_t hash_field) {
132 return AllocateInternalizedStringImpl<true>(t, chars, hash_field);
134 return AllocateInternalizedStringImpl<false>(t, chars, hash_field);
139 uint32_t hash_field) {
144 Map*
map = ascii_internalized_string_map();
151 if (!maybe_result->ToObject(&result))
return maybe_result;
155 reinterpret_cast<HeapObject*
>(result)->set_map_no_write_barrier(map);
172 uint32_t hash_field) {
177 Map*
map = internalized_string_map();
184 if (!maybe_result->ToObject(&result))
return maybe_result;
187 reinterpret_cast<HeapObject*
>(result)->set_map(map);
220 ASSERT(AllowHandleAllocation::IsAllowed());
221 ASSERT(AllowHeapAllocation::IsAllowed());
225 if (FLAG_gc_interval >= 0 &&
226 AllowAllocationFailure::IsAllowed(isolate_) &&
227 Heap::allocation_timeout_-- <= 0) {
230 isolate_->
counters()->objs_since_last_full()->Increment();
231 isolate_->
counters()->objs_since_last_young()->Increment();
237 result = new_space_.AllocateRaw(size_in_bytes);
249 result = old_pointer_space_->
AllocateRaw(size_in_bytes);
251 result = old_data_space_->
AllocateRaw(size_in_bytes);
259 result = property_cell_space_->
AllocateRaw(size_in_bytes);
264 if (result->IsFailure()) old_gen_exhausted_ =
true;
282 if (static_cast<int32_t>(value) >= 0 &&
292 ASSERT(string->IsExternalString());
295 reinterpret_cast<byte*
>(string) +
300 if (*resource_addr !=
NULL) {
302 *resource_addr =
NULL;
308 bool result = new_space_.
Contains(
object);
317 return new_space_.
Contains(address);
332 return old_pointer_space_->
Contains(address);
342 return old_data_space_->
Contains(address);
365 return below_mark || (new_space_.
Size() + object_size) >=
370 void Heap::RecordWrite(
Address address,
int offset) {
375 void Heap::RecordWrites(
Address address,
int start,
int len) {
377 for (
int i = 0; i < len; i++) {
436 if (obj->
map() == one_pointer_filler_map())
return false;
463 CopyWords(reinterpret_cast<Object**>(dst),
464 reinterpret_cast<Object**>(src),
474 if ((dst < src) || (dst >= (src + byte_size))) {
477 Object** end_slot = src_slot + size_in_words;
479 while (src_slot != end_slot) {
480 *dst_slot++ = *src_slot++;
483 OS::MemMove(dst, src, static_cast<size_t>(byte_size));
495 Heap* heap =
object->GetHeap();
498 if (!FLAG_allocation_site_pretenuring ||
507 Address object_address =
object->address();
508 Address memento_address = object_address +
object->Size();
511 last_memento_word_address)) {
516 if (candidate->
map() != heap->allocation_memento_map())
return;
519 if (!memento->
IsValid())
return;
534 MapWord first_word =
object->map_word();
538 if (first_word.IsForwardingAddress()) {
539 HeapObject* dest = first_word.ToForwardingAddress();
550 return ScavengeObjectSlow(p,
object);
555 const char* gc_reason,
557 const char* collector_reason =
NULL;
558 GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
559 return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags);
566 static const int kMaxAlwaysFlattenLength = 32;
567 static const int kFlattenLongThreshold = 16*
KB;
569 const int length = str->
length();
571 if (length <= kMaxAlwaysFlattenLength ||
572 unflattened_strings_length_ >= kFlattenLongThreshold) {
575 if (obj->IsFailure()) {
576 unflattened_strings_length_ += length;
583 int64_t change_in_bytes) {
585 int64_t amount = amount_of_external_allocated_memory_ + change_in_bytes;
586 if (change_in_bytes > 0) {
588 if (amount > amount_of_external_allocated_memory_) {
589 amount_of_external_allocated_memory_ = amount;
592 amount_of_external_allocated_memory_ = 0;
593 amount_of_external_allocated_memory_at_last_global_gc_ = 0;
595 int64_t amount_since_last_global_gc = PromotedExternalMemorySize();
596 if (amount_since_last_global_gc > external_allocation_limit_) {
602 amount_of_external_allocated_memory_ = amount;
605 amount_of_external_allocated_memory_ = 0;
606 amount_of_external_allocated_memory_at_last_global_gc_ = 0;
609 if (FLAG_trace_external_memory) {
614 static_cast<intptr_t>(change_in_bytes /
KB),
615 static_cast<intptr_t>(amount_of_external_allocated_memory_ /
KB),
616 static_cast<intptr_t>(PromotedExternalMemorySize() /
KB),
617 reinterpret_cast<intptr_t>(
isolate()));
619 ASSERT(amount_of_external_allocated_memory_ >= 0);
620 return amount_of_external_allocated_memory_;
625 return reinterpret_cast<Isolate*
>(
reinterpret_cast<intptr_t
>(
this) -
626 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
631 #define GC_GREEDY_CHECK(ISOLATE) \
632 if (FLAG_gc_greedy) (ISOLATE)->heap()->GarbageCollectionGreedyCheck()
634 #define GC_GREEDY_CHECK(ISOLATE) { }
644 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \
646 GC_GREEDY_CHECK(ISOLATE); \
647 MaybeObject* __maybe_object__ = FUNCTION_CALL; \
648 Object* __object__ = NULL; \
649 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
650 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
651 (ISOLATE)->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \
652 allocation_space(), \
653 "allocation failure"); \
654 __maybe_object__ = FUNCTION_CALL; \
655 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
656 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
657 (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \
658 (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \
660 AlwaysAllocateScope __scope__(ISOLATE); \
661 __maybe_object__ = FUNCTION_CALL; \
663 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
664 if (__maybe_object__->IsRetryAfterGC()) { \
666 v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true);\
671 #define CALL_AND_RETRY_OR_DIE( \
672 ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \
679 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \
680 CALL_AND_RETRY_OR_DIE(ISOLATE, \
682 return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
683 return Handle<TYPE>()) \
686 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
687 CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return)
690 #define CALL_HEAP_FUNCTION_PASS_EXCEPTION(ISOLATE, FUNCTION_CALL) \
691 CALL_AND_RETRY(ISOLATE, \
694 return __maybe_object__)
698 ASSERT(string->IsExternalString());
700 new_space_strings_.
Add(
string);
702 old_space_strings_.
Add(
string);
708 if (!new_space_strings_.is_empty()) {
709 Object** start = &new_space_strings_[0];
710 v->VisitPointers(start, start + new_space_strings_.length());
712 if (!old_space_strings_.is_empty()) {
713 Object** start = &old_space_strings_[0];
714 v->VisitPointers(start, start + old_space_strings_.length());
721 void ExternalStringTable::Verify() {
723 for (
int i = 0; i < new_space_strings_.length(); ++i) {
726 ASSERT(obj != heap_->the_hole_value());
728 for (
int i = 0; i < old_space_strings_.length(); ++i) {
731 ASSERT(obj != heap_->the_hole_value());
737 void ExternalStringTable::AddOldString(String*
string) {
738 ASSERT(string->IsExternalString());
740 old_space_strings_.
Add(
string);
744 void ExternalStringTable::ShrinkNewStrings(
int position) {
745 new_space_strings_.Rewind(position);
747 if (FLAG_verify_heap) {
755 set_instanceof_cache_function(the_hole_value());
760 return condition ? true_value() : false_value();
765 set_instanceof_cache_map(the_hole_value());
766 set_instanceof_cache_function(the_hole_value());
771 : heap_(isolate->heap()), daf_(isolate) {
776 ASSERT(heap_->always_allocate_scope_depth_ == 0);
777 heap_->always_allocate_scope_depth_++;
782 heap_->always_allocate_scope_depth_--;
783 ASSERT(heap_->always_allocate_scope_depth_ == 0);
788 NoWeakObjectVerificationScope::NoWeakObjectVerificationScope() {
789 Isolate* isolate = Isolate::Current();
790 isolate->
heap()->no_weak_object_verification_scope_depth_++;
794 NoWeakObjectVerificationScope::~NoWeakObjectVerificationScope() {
795 Isolate* isolate = Isolate::Current();
796 isolate->heap()->no_weak_object_verification_scope_depth_--;
802 heap_->gc_callbacks_depth_++;
807 heap_->gc_callbacks_depth_--;
812 return heap_->gc_callbacks_depth_ == 1;
817 for (
Object** current = start; current < end; current++) {
818 if ((*current)->IsHeapObject()) {
828 for (
Object** current = start; current < end; current++) {
829 CHECK((*current)->IsSmi());
834 double GCTracer::SizeOfHeapObjects() {
835 return (static_cast<double>(heap_->SizeOfObjects())) /
MB;
841 #endif // V8_HEAP_INL_H_
MUST_USE_RESULT MaybeObject * CopyConstantPoolArray(ConstantPoolArray *src)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
static bool OnSamePage(Address address1, Address address2)
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8Slow(Vector< const char > str, int non_ascii_start, PretenureFlag pretenure=NOT_TENURED)
MUST_USE_RESULT MaybeObject * AllocateOneByteInternalizedString(Vector< const uint8_t > str, uint32_t hash_field)
AllocationSite * GetAllocationSite()
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
void PrintF(const char *format,...)
void CollectAllGarbage(int flags, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
bool InOldDataSpace(Address address)
bool InNewSpace(Object *object)
static String * cast(Object *obj)
MaybeObject * TryFlatten(PretenureFlag pretenure=NOT_TENURED)
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes)
Object * ToBoolean(bool condition)
int64_t AdjustAmountOfExternalAllocatedMemory(int64_t change_in_bytes)
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArrayWithMap(FixedDoubleArray *src, Map *map)
static Smi * FromInt(int value)
void FinalizeExternalString(String *string)
void CompletelyClearInstanceofCache()
static MemoryChunk * FromAddress(Address a)
static HeapObject * cast(Object *obj)
void AddString(String *string)
static bool IsOneByte(T t, int chars)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including on console Map counters to a file Enable debugger compile events enable GDBJIT enable GDBJIT interface for all code objects dump only objects containing this substring stress the GC compactor to flush out pretty print source code print source AST function name where to insert a breakpoint print scopes for builtins trace contexts operations print stuff during garbage collection report code statistics after GC report handles after GC trace cache state transitions print interface inference details prints when objects are turned into dictionaries report heap spill statistics along with trace isolate state changes trace regexp bytecode execution Minimal Log all events to the log file Log API events to the log file Log heap samples on garbage collection for the hp2ps tool log positions Log suspect operations Used with turns on browser compatible mode for profiling v8 Specify the name of the log file Enable low level linux profiler Enable perf linux profiler(experimental annotate support).") DEFINE_string(gc_fake_mmap
void ClearInstanceofCache()
bool InFromSpace(Object *object)
static void MoveBlock(Address dst, Address src, int byte_size)
#define ASSERT(condition)
OldSpace * TargetSpace(HeapObject *object)
MUST_USE_RESULT MaybeObject * AllocateTwoByteInternalizedString(Vector< const uc16 > str, uint32_t hash_field)
NewSpacePage * current_page()
intptr_t EffectiveCapacity()
Failure * ThrowInvalidStringLength()
MUST_USE_RESULT MaybeObject * AllocateStringFromOneByte(Vector< const uint8_t > str, PretenureFlag pretenure=NOT_TENURED)
GCCallbacksScope(Heap *heap)
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
MUST_USE_RESULT MaybeObject * AllocateHeapNumber(double value, PretenureFlag pretenure=NOT_TENURED)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
bool InOldPointerSpace(Address address)
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
static void MemCopy(void *dest, const void *src, size_t size)
static void UpdateAllocationSiteFeedback(HeapObject *object, ScratchpadSlotMode mode)
static NewSpacePage * FromAddress(Address address_in_page)
static Failure * RetryAfterGC()
static bool IsValid(intptr_t value)
const uint32_t kIsIndirectStringMask
bool AllowedToBeMigrated(HeapObject *object, AllocationSpace dest)
static const int kNoGCFlags
bool ContainsLimit(Address addr)
void VisitPointers(Object **start, Object **end)
static void ScavengePointer(HeapObject **p)
bool IsAligned(T value, U alignment)
void VisitPointers(Object **start, Object **end)
static void MemMove(void *dest, const void *src, size_t size)
void CopyWords(T *dst, const T *src, size_t num_words)
static ExternalString * cast(Object *obj)
void Iterate(ObjectVisitor *v)
static const int kHeaderSize
double FastUI2D(unsigned x)
bool InToSpace(Object *object)
bool FromSpaceContains(Address address)
bool ToSpaceContains(Address address)
static AllocationMemento * cast(Object *obj)
bool CollectGarbage(AllocationSpace space, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
#define T(name, string, precedence)
HeapProfiler * heap_profiler() const
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
MUST_USE_RESULT MaybeObject * AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field)
static Object * cast(Object *value)
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
IncrementalMarking * incremental_marking()
bool Contains(Address addr)
bool IncrementMementoFoundCount()
bool ShouldBePromoted(Address old_address, int object_size)
static int SizeFor(int length)
bool is_tracking_allocations() const
void PrintPID(const char *format,...)
#define ASSERT_EQ(v1, v2)
InstanceType instance_type()
static void CopyBlock(Address dst, Address src, int byte_size)
static HeapObject * FromAddress(Address address)
MUST_USE_RESULT MaybeObject * AllocateInternalizedStringFromUtf8(Vector< const char > str, int chars, uint32_t hash_field)
SemiSpace * active_space()
bool OldGenerationAllocationLimitReached()
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
bool IsOneByteRepresentation()
MUST_USE_RESULT MaybeObject * NumberFromInt32(int32_t value, PretenureFlag pretenure=NOT_TENURED)
const uint32_t kIsIndirectStringTag
NewSpacePage * prev_page() const
static const int kMaxLength
static void ScavengeObject(HeapObject **p, HeapObject *object)
void set_length(int value)
MUST_USE_RESULT MaybeObject * CopyConstantPoolArrayWithMap(ConstantPoolArray *src, Map *map)
AlwaysAllocateScope(Isolate *isolate)
MUST_USE_RESULT MaybeObject * PrepareForCompare(String *str)
static int SizeFor(int length)
static bool IsAtStart(Address addr)
static int NonAsciiStart(const char *chars, int length)
static bool CanTrack(InstanceType type)
intptr_t OldGenerationSpaceAvailable()
MUST_USE_RESULT MaybeObject * AllocateRaw(int object_size, Executability executable)
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
void AllocationEvent(Address addr, int size)
void set_hash_field(uint32_t value)
static AllocationSpace TargetSpaceId(InstanceType type)
static void AssertValidRange(Address from, Address to)
static const int kResourceOffset
AllocationSpace identity()