28 #ifndef V8_HEAP_INL_H_
29 #define V8_HEAP_INL_H_
44 if (emergency_stack_ !=
NULL) {
45 emergency_stack_->
Add(Entry(target, size));
59 Page::FromAllocationTop(reinterpret_cast<Address>(limit_)));
61 if ((rear_ - 2) < limit_) {
63 emergency_stack_->
Add(Entry(target, size));
68 *(--rear_) = reinterpret_cast<intptr_t>(target);
73 reinterpret_cast<Address>(rear_));
88 const char* start = str.
start();
91 if (non_ascii_start >= length) {
103 uint32_t hash_field) {
105 static_cast<unsigned>(str.
length()));
111 uint32_t hash_field) {
116 Map* map = ascii_symbol_map();
124 if (!maybe_result->ToObject(&result))
return maybe_result;
128 reinterpret_cast<HeapObject*
>(result)->set_map_no_write_barrier(map);
145 uint32_t hash_field) {
150 Map* map = symbol_map();
158 if (!maybe_result->ToObject(&result))
return maybe_result;
161 reinterpret_cast<HeapObject*
>(result)->set_map(map);
195 if (FLAG_gc_interval >= 0 &&
196 !disallow_allocation_failure_ &&
197 Heap::allocation_timeout_-- <= 0) {
200 isolate_->
counters()->objs_since_last_full()->Increment();
201 isolate_->
counters()->objs_since_last_young()->Increment();
205 result = new_space_.AllocateRaw(size_in_bytes);
214 result = old_pointer_space_->
AllocateRaw(size_in_bytes);
216 result = old_data_space_->
AllocateRaw(size_in_bytes);
227 if (result->IsFailure()) old_gen_exhausted_ =
true;
251 ASSERT(string->IsExternalString());
254 reinterpret_cast<byte*
>(string) +
259 if (*resource_addr !=
NULL) {
261 *resource_addr =
NULL;
266 MaybeObject* Heap::AllocateRawMap() {
268 isolate_->
counters()->objs_since_last_full()->Increment();
269 isolate_->
counters()->objs_since_last_young()->Increment();
272 if (result->IsFailure()) old_gen_exhausted_ =
true;
277 MaybeObject* Heap::AllocateRawCell() {
279 isolate_->
counters()->objs_since_last_full()->Increment();
280 isolate_->
counters()->objs_since_last_young()->Increment();
283 if (result->IsFailure()) old_gen_exhausted_ =
true;
289 bool result = new_space_.
Contains(
object);
326 return below_mark || (new_space_.
Size() + object_size) >=
338 for (
int i = 0; i < len; i++) {
380 CopyWords(reinterpret_cast<Object**>(dst),
381 reinterpret_cast<Object**>(src),
391 if ((dst < src) || (dst >= (src + byte_size))) {
394 Object** end_slot = src_slot + size_in_words;
396 while (src_slot != end_slot) {
397 *dst_slot++ = *src_slot++;
400 memmove(dst, src, byte_size);
417 MapWord first_word =
object->map_word();
421 if (first_word.IsForwardingAddress()) {
422 HeapObject* dest = first_word.ToForwardingAddress();
429 return ScavengeObjectSlow(p,
object);
434 const char* collector_reason =
NULL;
435 GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
436 return CollectGarbage(space, collector, gc_reason, collector_reason);
443 static const int kMaxAlwaysFlattenLength = 32;
444 static const int kFlattenLongThreshold = 16*
KB;
446 const int length = str->
length();
448 if (length <= kMaxAlwaysFlattenLength ||
449 unflattened_strings_length_ >= kFlattenLongThreshold) {
452 if (obj->IsFailure()) {
453 unflattened_strings_length_ += length;
460 intptr_t change_in_bytes) {
462 intptr_t amount = amount_of_external_allocated_memory_ + change_in_bytes;
463 if (change_in_bytes >= 0) {
465 if (amount > amount_of_external_allocated_memory_) {
466 amount_of_external_allocated_memory_ = amount;
469 amount_of_external_allocated_memory_ = 0;
470 amount_of_external_allocated_memory_at_last_global_gc_ = 0;
472 intptr_t amount_since_last_global_gc = PromotedExternalMemorySize();
473 if (amount_since_last_global_gc > external_allocation_limit_) {
479 amount_of_external_allocated_memory_ = amount;
482 amount_of_external_allocated_memory_ = 0;
483 amount_of_external_allocated_memory_at_last_global_gc_ = 0;
486 if (FLAG_trace_external_memory) {
490 change_in_bytes / 1024, amount_of_external_allocated_memory_ / 1024,
491 reinterpret_cast<intptr_t>(
isolate()));
493 ASSERT(amount_of_external_allocated_memory_ >= 0);
494 return amount_of_external_allocated_memory_;
499 roots_[kLastScriptIdRootIndex] = last_script_id;
504 return reinterpret_cast<Isolate*
>(
reinterpret_cast<intptr_t
>(
this) -
505 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
510 #define GC_GREEDY_CHECK() \
511 if (FLAG_gc_greedy) HEAP->GarbageCollectionGreedyCheck()
513 #define GC_GREEDY_CHECK() { }
523 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)\
526 MaybeObject* __maybe_object__ = FUNCTION_CALL; \
527 Object* __object__ = NULL; \
528 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
529 if (__maybe_object__->IsOutOfMemory()) { \
530 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0", true);\
532 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
533 ISOLATE->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \
534 allocation_space(), \
535 "allocation failure"); \
536 __maybe_object__ = FUNCTION_CALL; \
537 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
538 if (__maybe_object__->IsOutOfMemory()) { \
539 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1", true);\
541 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
542 ISOLATE->counters()->gc_last_resort_from_handles()->Increment(); \
543 ISOLATE->heap()->CollectAllAvailableGarbage("last resort gc"); \
545 AlwaysAllocateScope __scope__; \
546 __maybe_object__ = FUNCTION_CALL; \
548 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
549 if (__maybe_object__->IsOutOfMemory() || \
550 __maybe_object__->IsRetryAfterGC()) { \
552 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_2", true);\
558 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \
559 CALL_AND_RETRY(ISOLATE, \
561 return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
562 return Handle<TYPE>())
565 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
566 CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, return, return)
571 inline bool Heap::allow_allocation(
bool new_state) {
572 bool old = allocation_allowed_;
573 allocation_allowed_ = new_state;
581 ASSERT(string->IsExternalString());
583 new_space_strings_.Add(
string);
585 old_space_strings_.Add(
string);
591 if (!new_space_strings_.is_empty()) {
592 Object** start = &new_space_strings_[0];
593 v->VisitPointers(start, start + new_space_strings_.length());
595 if (!old_space_strings_.is_empty()) {
596 Object** start = &old_space_strings_[0];
597 v->VisitPointers(start, start + old_space_strings_.length());
604 void ExternalStringTable::Verify() {
606 for (
int i = 0; i < new_space_strings_.length(); ++i) {
610 ASSERT(obj !=
HEAP->raw_unchecked_the_hole_value());
611 if (obj->IsExternalAsciiString()) {
616 for (
int i = 0; i < old_space_strings_.length(); ++i) {
620 ASSERT(obj !=
HEAP->raw_unchecked_the_hole_value());
621 if (obj->IsExternalAsciiString()) {
630 void ExternalStringTable::AddOldString(String*
string) {
631 ASSERT(string->IsExternalString());
633 old_space_strings_.Add(
string);
637 void ExternalStringTable::ShrinkNewStrings(
int position) {
638 new_space_strings_.Rewind(position);
640 if (FLAG_verify_heap) {
648 set_instanceof_cache_function(the_hole_value());
653 return condition ? true_value() : false_value();
658 set_instanceof_cache_map(the_hole_value());
659 set_instanceof_cache_function(the_hole_value());
664 SubCache* cache = caches_[type];
666 caches_[type] = cache =
new SubCache(type);
668 return cache->Get(input);
672 Address TranscendentalCache::cache_array_address() {
673 return reinterpret_cast<Address>(caches_);
677 double TranscendentalCache::SubCache::Calculate(
double input) {
701 MaybeObject* TranscendentalCache::SubCache::Get(
double input) {
705 Element e = elements_[hash];
706 if (e.in[0] == c.integers[0] &&
707 e.in[1] == c.integers[1]) {
709 isolate_->counters()->transcendental_cache_hit()->Increment();
712 double answer = Calculate(input);
713 isolate_->counters()->transcendental_cache_miss()->Increment();
715 { MaybeObject* maybe_heap_number =
716 isolate_->heap()->AllocateHeapNumber(answer);
717 if (!maybe_heap_number->ToObject(&heap_number))
return maybe_heap_number;
719 elements_[hash].in[0] = c.integers[0];
720 elements_[hash].in[1] = c.integers[1];
721 elements_[hash].output = heap_number;
731 ASSERT(
HEAP->always_allocate_scope_depth_ == 0);
732 HEAP->always_allocate_scope_depth_++;
737 HEAP->always_allocate_scope_depth_--;
738 ASSERT(
HEAP->always_allocate_scope_depth_ == 0);
743 for (
Object** current = start; current < end; current++) {
744 if ((*current)->IsHeapObject()) {
753 double GCTracer::SizeOfHeapObjects() {
754 return (static_cast<double>(
HEAP->SizeOfObjects())) /
MB;
760 old_state_ =
HEAP->disallow_allocation_failure_;
761 HEAP->disallow_allocation_failure_ =
true;
768 HEAP->disallow_allocation_failure_ = old_state_;
778 old_state_ = isolate->
heap()->allow_allocation(
false);
784 if (active_)
HEAP->allow_allocation(old_state_);
790 active_ = !isolate->optimizing_compiler_thread()->IsOptimizerThread();
792 old_state_ = isolate->heap()->allow_allocation(
true);
798 if (active_)
HEAP->allow_allocation(old_state_);
813 #endif // V8_HEAP_INL_H_
double fast_tan(double x)
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8Slow(Vector< const char > str, int non_ascii_start, PretenureFlag pretenure=NOT_TENURED)
MUST_USE_RESULT MaybeObject * AllocateSymbol(Vector< const char > str, int chars, uint32_t hash_field)
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
void RecordWrite(Address address, int offset)
void PrintF(const char *format,...)
bool InNewSpace(Object *object)
static String * cast(Object *obj)
MaybeObject * TryFlatten(PretenureFlag pretenure=NOT_TENURED)
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes)
Object * ToBoolean(bool condition)
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArrayWithMap(FixedDoubleArray *src, Map *map)
static Smi * FromInt(int value)
#define LOG(isolate, Call)
void FinalizeExternalString(String *string)
void CompletelyClearInstanceofCache()
void CollectAllGarbage(int flags, const char *gc_reason=NULL)
DisallowAllocationFailure()
static HeapObject * cast(Object *obj)
MUST_USE_RESULT MaybeObject * AllocateTwoByteSymbol(Vector< const uc16 > str, uint32_t hash_field)
void AddString(String *string)
static Failure * OutOfMemoryException()
DisableAssertNoAllocation()
void ClearInstanceofCache()
bool InFromSpace(Object *object)
static void MoveBlock(Address dst, Address src, int byte_size)
#define ASSERT(condition)
OldSpace * TargetSpace(HeapObject *object)
static const int kMaxLength
NewSpacePage * current_page()
double fast_sin(double x)
intptr_t EffectiveCapacity()
static ExternalAsciiString * cast(Object *obj)
~DisallowAllocationFailure()
MUST_USE_RESULT MaybeObject * AllocateInternalSymbol(unibrow::CharacterStream *buffer, int chars, uint32_t hash_field)
MUST_USE_RESULT MaybeObject * AllocateHeapNumber(double value, PretenureFlag pretenure)
static bool IsAscii(const char *chars, int length)
bool CollectGarbage(AllocationSpace space, GarbageCollector collector, const char *gc_reason, const char *collector_reason)
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
void CopyWords(T *dst, T *src, int num_words)
MUST_USE_RESULT MaybeObject * AllocateStringFromAscii(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
void SetLastScriptId(Object *last_script_id)
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
static NewSpacePage * FromAddress(Address address_in_page)
static Failure * RetryAfterGC()
static bool IsValid(intptr_t value)
const uint32_t kIsIndirectStringMask
static const int kNoGCFlags
bool ContainsLimit(Address addr)
static void ScavengePointer(HeapObject **p)
bool IsAligned(T value, U alignment)
double fast_log(double x)
void set_hash_field(uint32_t value)
void VisitPointers(Object **start, Object **end)
static const int kMaxNonCodeHeapObjectSize
intptr_t AdjustAmountOfExternalAllocatedMemory(intptr_t change_in_bytes)
void Iterate(ObjectVisitor *v)
static const int kHeaderSize
double FastUI2D(unsigned x)
bool InToSpace(Object *object)
bool FromSpaceContains(Address address)
bool ToSpaceContains(Address address)
MUST_USE_RESULT MaybeObject * AllocateAsciiSymbol(Vector< const char > str, uint32_t hash_field)
void RecordWrites(Address address, int start, int len)
~DisableAssertNoAllocation()
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
MUST_USE_RESULT MaybeObject * Get(Type type, double input)
static int SizeFor(int length)
static Object * cast(Object *value)
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
IncrementalMarking * incremental_marking()
bool ShouldBePromoted(Address old_address, int object_size)
static int SizeFor(int length)
void PrintPID(const char *format,...)
#define ASSERT_EQ(v1, v2)
static void CopyBlock(Address dst, Address src, int byte_size)
OptimizingCompilerThread * optimizing_compiler_thread()
SemiSpace * active_space()
bool OldGenerationAllocationLimitReached()
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
MUST_USE_RESULT MaybeObject * NumberFromInt32(int32_t value, PretenureFlag pretenure=NOT_TENURED)
const uint32_t kIsIndirectStringTag
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
NewSpacePage * prev_page() const
static void ScavengeObject(HeapObject **p, HeapObject *object)
double fast_cos(double x)
void set_length(int value)
MUST_USE_RESULT MaybeObject * PrepareForCompare(String *str)
static bool IsAtStart(Address addr)
static int NonAsciiStart(const char *chars, int length)
intptr_t OldGenerationSpaceAvailable()
MUST_USE_RESULT MaybeObject * AllocateRaw(int object_size, Executability executable)
static const int kMaxLength
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
AllocationSpace TargetSpaceId(InstanceType type)
static void AssertValidRange(Address from, Address to)
static const int kResourceOffset