28 #ifndef V8_HEAP_INL_H_
29 #define V8_HEAP_INL_H_
44 if (emergency_stack_ !=
NULL) {
45 emergency_stack_->
Add(Entry(target, size));
59 Page::FromAllocationTop(reinterpret_cast<Address>(limit_)));
61 if ((rear_ - 2) < limit_) {
63 emergency_stack_->
Add(Entry(target, size));
68 *(--rear_) = reinterpret_cast<intptr_t>(target);
73 reinterpret_cast<Address>(rear_));
100 uint32_t hash_field) {
102 static_cast<unsigned>(str.
length()));
108 uint32_t hash_field) {
113 Map* map = ascii_symbol_map();
121 if (!maybe_result->ToObject(&result))
return maybe_result;
125 reinterpret_cast<HeapObject*
>(result)->set_map_no_write_barrier(map);
142 uint32_t hash_field) {
147 Map* map = symbol_map();
155 if (!maybe_result->ToObject(&result))
return maybe_result;
158 reinterpret_cast<HeapObject*
>(result)->set_map(map);
192 if (FLAG_gc_interval >= 0 &&
193 !disallow_allocation_failure_ &&
194 Heap::allocation_timeout_-- <= 0) {
197 isolate_->
counters()->objs_since_last_full()->Increment();
198 isolate_->
counters()->objs_since_last_young()->Increment();
202 result = new_space_.AllocateRaw(size_in_bytes);
211 result = old_pointer_space_->
AllocateRaw(size_in_bytes);
213 result = old_data_space_->
AllocateRaw(size_in_bytes);
224 if (result->IsFailure()) old_gen_exhausted_ =
true;
248 ASSERT(string->IsExternalString());
251 reinterpret_cast<byte*
>(string) +
256 if (*resource_addr !=
NULL) {
258 *resource_addr =
NULL;
263 MaybeObject* Heap::AllocateRawMap() {
265 isolate_->
counters()->objs_since_last_full()->Increment();
266 isolate_->
counters()->objs_since_last_young()->Increment();
269 if (result->IsFailure()) old_gen_exhausted_ =
true;
271 if (!result->IsFailure()) {
281 MaybeObject* Heap::AllocateRawCell() {
283 isolate_->
counters()->objs_since_last_full()->Increment();
284 isolate_->
counters()->objs_since_last_young()->Increment();
287 if (result->IsFailure()) old_gen_exhausted_ =
true;
293 bool result = new_space_.
Contains(
object);
330 return below_mark || (new_space_.
Size() + object_size) >=
342 for (
int i = 0; i < len; i++) {
384 CopyWords(reinterpret_cast<Object**>(dst),
385 reinterpret_cast<Object**>(src),
395 if ((dst < src) || (dst >= (src + byte_size))) {
398 Object** end_slot = src_slot + size_in_words;
400 while (src_slot != end_slot) {
401 *dst_slot++ = *src_slot++;
404 memmove(dst, src, byte_size);
421 MapWord first_word =
object->map_word();
425 if (first_word.IsForwardingAddress()) {
426 HeapObject* dest = first_word.ToForwardingAddress();
433 return ScavengeObjectSlow(p,
object);
438 const char* collector_reason =
NULL;
439 GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
440 return CollectGarbage(space, collector, gc_reason, collector_reason);
447 static const int kMaxAlwaysFlattenLength = 32;
448 static const int kFlattenLongThreshold = 16*
KB;
450 const int length = str->
length();
452 if (length <= kMaxAlwaysFlattenLength ||
453 unflattened_strings_length_ >= kFlattenLongThreshold) {
456 if (obj->IsFailure()) {
457 unflattened_strings_length_ += length;
464 intptr_t change_in_bytes) {
466 intptr_t amount = amount_of_external_allocated_memory_ + change_in_bytes;
467 if (change_in_bytes >= 0) {
469 if (amount > amount_of_external_allocated_memory_) {
470 amount_of_external_allocated_memory_ = amount;
472 intptr_t amount_since_last_global_gc =
473 amount_of_external_allocated_memory_ -
474 amount_of_external_allocated_memory_at_last_global_gc_;
475 if (amount_since_last_global_gc > external_allocation_limit_) {
481 amount_of_external_allocated_memory_ = amount;
484 ASSERT(amount_of_external_allocated_memory_ >= 0);
485 return amount_of_external_allocated_memory_;
490 roots_[kLastScriptIdRootIndex] = last_script_id;
495 return reinterpret_cast<Isolate*
>(
reinterpret_cast<intptr_t
>(
this) -
496 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
501 #define GC_GREEDY_CHECK() \
502 if (FLAG_gc_greedy) HEAP->GarbageCollectionGreedyCheck()
504 #define GC_GREEDY_CHECK() { }
514 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)\
517 MaybeObject* __maybe_object__ = FUNCTION_CALL; \
518 Object* __object__ = NULL; \
519 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
520 if (__maybe_object__->IsOutOfMemory()) { \
521 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0", true);\
523 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
524 ISOLATE->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \
525 allocation_space(), \
526 "allocation failure"); \
527 __maybe_object__ = FUNCTION_CALL; \
528 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
529 if (__maybe_object__->IsOutOfMemory()) { \
530 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1", true);\
532 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
533 ISOLATE->counters()->gc_last_resort_from_handles()->Increment(); \
534 ISOLATE->heap()->CollectAllAvailableGarbage("last resort gc"); \
536 AlwaysAllocateScope __scope__; \
537 __maybe_object__ = FUNCTION_CALL; \
539 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
540 if (__maybe_object__->IsOutOfMemory() || \
541 __maybe_object__->IsRetryAfterGC()) { \
543 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_2", true);\
549 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \
550 CALL_AND_RETRY(ISOLATE, \
552 return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
553 return Handle<TYPE>())
556 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
557 CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, return, return)
562 inline bool Heap::allow_allocation(
bool new_state) {
563 bool old = allocation_allowed_;
564 allocation_allowed_ = new_state;
572 ASSERT(string->IsExternalString());
574 new_space_strings_.Add(
string);
576 old_space_strings_.Add(
string);
582 if (!new_space_strings_.is_empty()) {
583 Object** start = &new_space_strings_[0];
584 v->VisitPointers(start, start + new_space_strings_.length());
586 if (!old_space_strings_.is_empty()) {
587 Object** start = &old_space_strings_[0];
588 v->VisitPointers(start, start + old_space_strings_.length());
595 void ExternalStringTable::Verify() {
597 for (
int i = 0; i < new_space_strings_.length(); ++i) {
601 ASSERT(obj !=
HEAP->raw_unchecked_the_hole_value());
602 if (obj->IsExternalAsciiString()) {
607 for (
int i = 0; i < old_space_strings_.length(); ++i) {
611 ASSERT(obj !=
HEAP->raw_unchecked_the_hole_value());
612 if (obj->IsExternalAsciiString()) {
621 void ExternalStringTable::AddOldString(String*
string) {
622 ASSERT(string->IsExternalString());
624 old_space_strings_.Add(
string);
628 void ExternalStringTable::ShrinkNewStrings(
int position) {
629 new_space_strings_.Rewind(position);
630 if (FLAG_verify_heap) {
637 set_instanceof_cache_function(the_hole_value());
642 return condition ? true_value() : false_value();
647 set_instanceof_cache_map(the_hole_value());
648 set_instanceof_cache_function(the_hole_value());
653 SubCache* cache = caches_[
type];
655 caches_[
type] = cache =
new SubCache(type);
657 return cache->Get(input);
661 Address TranscendentalCache::cache_array_address() {
662 return reinterpret_cast<Address>(caches_);
666 double TranscendentalCache::SubCache::Calculate(
double input) {
690 MaybeObject* TranscendentalCache::SubCache::Get(
double input) {
694 Element e = elements_[hash];
695 if (e.in[0] == c.integers[0] &&
696 e.in[1] == c.integers[1]) {
698 isolate_->counters()->transcendental_cache_hit()->Increment();
701 double answer = Calculate(input);
702 isolate_->counters()->transcendental_cache_miss()->Increment();
704 { MaybeObject* maybe_heap_number =
705 isolate_->heap()->AllocateHeapNumber(answer);
706 if (!maybe_heap_number->ToObject(&heap_number))
return maybe_heap_number;
708 elements_[hash].in[0] = c.integers[0];
709 elements_[hash].in[1] = c.integers[1];
710 elements_[hash].output = heap_number;
720 ASSERT(
HEAP->always_allocate_scope_depth_ == 0);
721 HEAP->always_allocate_scope_depth_++;
726 HEAP->always_allocate_scope_depth_--;
727 ASSERT(
HEAP->always_allocate_scope_depth_ == 0);
732 HEAP->linear_allocation_scope_depth_++;
737 HEAP->linear_allocation_scope_depth_--;
738 ASSERT(
HEAP->linear_allocation_scope_depth_ >= 0);
743 void VerifyPointersVisitor::VisitPointers(
Object** start,
Object** end) {
744 for (
Object** current = start; current < end; current++) {
745 if ((*current)->IsHeapObject()) {
755 double GCTracer::SizeOfHeapObjects() {
756 return (static_cast<double>(
HEAP->SizeOfObjects())) /
MB;
761 DisallowAllocationFailure::DisallowAllocationFailure() {
762 old_state_ =
HEAP->disallow_allocation_failure_;
763 HEAP->disallow_allocation_failure_ =
true;
767 DisallowAllocationFailure::~DisallowAllocationFailure() {
768 HEAP->disallow_allocation_failure_ = old_state_;
775 old_state_ =
HEAP->allow_allocation(
false);
780 HEAP->allow_allocation(old_state_);
785 old_state_ =
HEAP->allow_allocation(
true);
790 HEAP->allow_allocation(old_state_);
805 #endif // V8_HEAP_INL_H_
double fast_tan(double x)
const intptr_t kMapAlignmentMask
MUST_USE_RESULT MaybeObject * AllocateSymbol(Vector< const char > str, int chars, uint32_t hash_field)
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
void RecordWrite(Address address, int offset)
bool InNewSpace(Object *object)
static String * cast(Object *obj)
MaybeObject * TryFlatten(PretenureFlag pretenure=NOT_TENURED)
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes)
Object * ToBoolean(bool condition)
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArrayWithMap(FixedDoubleArray *src, Map *map)
static Smi * FromInt(int value)
#define LOG(isolate, Call)
void FinalizeExternalString(String *string)
void CompletelyClearInstanceofCache()
void CollectAllGarbage(int flags, const char *gc_reason=NULL)
static HeapObject * cast(Object *obj)
MUST_USE_RESULT MaybeObject * AllocateTwoByteSymbol(Vector< const uc16 > str, uint32_t hash_field)
void AddString(String *string)
static Failure * OutOfMemoryException()
DisableAssertNoAllocation()
void ClearInstanceofCache()
bool InFromSpace(Object *object)
static void MoveBlock(Address dst, Address src, int byte_size)
#define ASSERT(condition)
OldSpace * TargetSpace(HeapObject *object)
static const int kMaxLength
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8Slow(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
NewSpacePage * current_page()
double fast_sin(double x)
intptr_t EffectiveCapacity()
static ExternalAsciiString * cast(Object *obj)
MUST_USE_RESULT MaybeObject * AllocateInternalSymbol(unibrow::CharacterStream *buffer, int chars, uint32_t hash_field)
MUST_USE_RESULT MaybeObject * AllocateHeapNumber(double value, PretenureFlag pretenure)
static bool IsAscii(const char *chars, int length)
bool CollectGarbage(AllocationSpace space, GarbageCollector collector, const char *gc_reason, const char *collector_reason)
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
void CopyWords(T *dst, T *src, int num_words)
MUST_USE_RESULT MaybeObject * AllocateStringFromAscii(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
void SetLastScriptId(Object *last_script_id)
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
static NewSpacePage * FromAddress(Address address_in_page)
static Failure * RetryAfterGC()
static bool IsValid(intptr_t value)
const uint32_t kIsIndirectStringMask
static const int kNoGCFlags
bool ContainsLimit(Address addr)
static void ScavengePointer(HeapObject **p)
bool IsAligned(T value, U alignment)
double fast_log(double x)
void set_hash_field(uint32_t value)
static const int kMaxNonCodeHeapObjectSize
intptr_t AdjustAmountOfExternalAllocatedMemory(intptr_t change_in_bytes)
void Iterate(ObjectVisitor *v)
static const int kHeaderSize
double FastUI2D(unsigned x)
bool InToSpace(Object *object)
bool FromSpaceContains(Address address)
bool ToSpaceContains(Address address)
MUST_USE_RESULT MaybeObject * AllocateAsciiSymbol(Vector< const char > str, uint32_t hash_field)
void RecordWrites(Address address, int start, int len)
~DisableAssertNoAllocation()
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
MUST_USE_RESULT MaybeObject * Get(Type type, double input)
static int SizeFor(int length)
static Object * cast(Object *value)
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
IncrementalMarking * incremental_marking()
bool ShouldBePromoted(Address old_address, int object_size)
static int SizeFor(int length)
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
static void CopyBlock(Address dst, Address src, int byte_size)
SemiSpace * active_space()
bool OldGenerationAllocationLimitReached()
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
MUST_USE_RESULT MaybeObject * NumberFromInt32(int32_t value, PretenureFlag pretenure=NOT_TENURED)
const uint32_t kIsIndirectStringTag
NewSpacePage * prev_page() const
static void ScavengeObject(HeapObject **p, HeapObject *object)
double fast_cos(double x)
void set_length(int value)
MUST_USE_RESULT MaybeObject * PrepareForCompare(String *str)
static bool IsAtStart(Address addr)
intptr_t OldGenerationSpaceAvailable()
MUST_USE_RESULT MaybeObject * AllocateRaw(int object_size, Executability executable)
static const int kMaxLength
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
AllocationSpace TargetSpaceId(InstanceType type)
static void AssertValidRange(Address from, Address to)
static const int kResourceOffset