v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
heap-inl.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_HEAP_INL_H_
29 #define V8_HEAP_INL_H_
30 
31 #include "heap.h"
32 #include "isolate.h"
33 #include "list-inl.h"
34 #include "objects.h"
35 #include "platform.h"
36 #include "v8-counters.h"
37 #include "store-buffer.h"
38 #include "store-buffer-inl.h"
39 
40 namespace v8 {
41 namespace internal {
42 
43 void PromotionQueue::insert(HeapObject* target, int size) {
44  if (emergency_stack_ != NULL) {
45  emergency_stack_->Add(Entry(target, size));
46  return;
47  }
48 
49  if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) {
50  NewSpacePage* rear_page =
51  NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_));
52  ASSERT(!rear_page->prev_page()->is_anchor());
53  rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->area_end());
55  }
56 
57  if (guard_) {
58  ASSERT(GetHeadPage() ==
59  Page::FromAllocationTop(reinterpret_cast<Address>(limit_)));
60 
61  if ((rear_ - 2) < limit_) {
62  RelocateQueueHead();
63  emergency_stack_->Add(Entry(target, size));
64  return;
65  }
66  }
67 
68  *(--rear_) = reinterpret_cast<intptr_t>(target);
69  *(--rear_) = size;
70  // Assert no overflow into live objects.
71 #ifdef DEBUG
72  SemiSpace::AssertValidRange(HEAP->new_space()->top(),
73  reinterpret_cast<Address>(rear_));
74 #endif
75 }
76 
77 
79  guard_ = guard_ ||
80  heap_->new_space()->active_space()->current_page()->address() ==
81  GetHeadPage()->address();
82 }
83 
84 
86  PretenureFlag pretenure) {
87  // Check for ASCII first since this is the common case.
88  const char* start = str.start();
89  int length = str.length();
90  int non_ascii_start = String::NonAsciiStart(start, length);
91  if (non_ascii_start >= length) {
92  // If the string is ASCII, we do not need to convert the characters
93  // since UTF8 is backwards compatible with ASCII.
94  return AllocateStringFromAscii(str, pretenure);
95  }
96  // Non-ASCII and we need to decode.
97  return AllocateStringFromUtf8Slow(str, non_ascii_start, pretenure);
98 }
99 
100 
102  int chars,
103  uint32_t hash_field) {
104  unibrow::Utf8InputBuffer<> buffer(str.start(),
105  static_cast<unsigned>(str.length()));
106  return AllocateInternalSymbol(&buffer, chars, hash_field);
107 }
108 
109 
111  uint32_t hash_field) {
112  if (str.length() > SeqAsciiString::kMaxLength) {
114  }
115  // Compute map and object size.
116  Map* map = ascii_symbol_map();
117  int size = SeqAsciiString::SizeFor(str.length());
118 
119  // Allocate string.
120  Object* result;
121  { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
122  ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
123  : old_data_space_->AllocateRaw(size);
124  if (!maybe_result->ToObject(&result)) return maybe_result;
125  }
126 
127  // String maps are all immortal immovable objects.
128  reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map);
129  // Set length and hash fields of the allocated string.
130  String* answer = String::cast(result);
131  answer->set_length(str.length());
132  answer->set_hash_field(hash_field);
133 
134  ASSERT_EQ(size, answer->Size());
135 
136  // Fill in the characters.
137  memcpy(answer->address() + SeqAsciiString::kHeaderSize,
138  str.start(), str.length());
139 
140  return answer;
141 }
142 
143 
145  uint32_t hash_field) {
146  if (str.length() > SeqTwoByteString::kMaxLength) {
148  }
149  // Compute map and object size.
150  Map* map = symbol_map();
151  int size = SeqTwoByteString::SizeFor(str.length());
152 
153  // Allocate string.
154  Object* result;
155  { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
156  ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
157  : old_data_space_->AllocateRaw(size);
158  if (!maybe_result->ToObject(&result)) return maybe_result;
159  }
160 
161  reinterpret_cast<HeapObject*>(result)->set_map(map);
162  // Set length and hash fields of the allocated string.
163  String* answer = String::cast(result);
164  answer->set_length(str.length());
165  answer->set_hash_field(hash_field);
166 
167  ASSERT_EQ(size, answer->Size());
168 
169  // Fill in the characters.
170  memcpy(answer->address() + SeqTwoByteString::kHeaderSize,
171  str.start(), str.length() * kUC16Size);
172 
173  return answer;
174 }
175 
176 MaybeObject* Heap::CopyFixedArray(FixedArray* src) {
177  return CopyFixedArrayWithMap(src, src->map());
178 }
179 
180 
182  return CopyFixedDoubleArrayWithMap(src, src->map());
183 }
184 
185 
186 MaybeObject* Heap::AllocateRaw(int size_in_bytes,
187  AllocationSpace space,
188  AllocationSpace retry_space) {
189  ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
190  ASSERT(space != NEW_SPACE ||
191  retry_space == OLD_POINTER_SPACE ||
192  retry_space == OLD_DATA_SPACE ||
193  retry_space == LO_SPACE);
194 #ifdef DEBUG
195  if (FLAG_gc_interval >= 0 &&
196  !disallow_allocation_failure_ &&
197  Heap::allocation_timeout_-- <= 0) {
198  return Failure::RetryAfterGC(space);
199  }
200  isolate_->counters()->objs_since_last_full()->Increment();
201  isolate_->counters()->objs_since_last_young()->Increment();
202 #endif
203  MaybeObject* result;
204  if (NEW_SPACE == space) {
205  result = new_space_.AllocateRaw(size_in_bytes);
206  if (always_allocate() && result->IsFailure()) {
207  space = retry_space;
208  } else {
209  return result;
210  }
211  }
212 
213  if (OLD_POINTER_SPACE == space) {
214  result = old_pointer_space_->AllocateRaw(size_in_bytes);
215  } else if (OLD_DATA_SPACE == space) {
216  result = old_data_space_->AllocateRaw(size_in_bytes);
217  } else if (CODE_SPACE == space) {
218  result = code_space_->AllocateRaw(size_in_bytes);
219  } else if (LO_SPACE == space) {
220  result = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
221  } else if (CELL_SPACE == space) {
222  result = cell_space_->AllocateRaw(size_in_bytes);
223  } else {
224  ASSERT(MAP_SPACE == space);
225  result = map_space_->AllocateRaw(size_in_bytes);
226  }
227  if (result->IsFailure()) old_gen_exhausted_ = true;
228  return result;
229 }
230 
231 
233  int32_t value, PretenureFlag pretenure) {
234  if (Smi::IsValid(value)) return Smi::FromInt(value);
235  // Bypass NumberFromDouble to avoid various redundant checks.
236  return AllocateHeapNumber(FastI2D(value), pretenure);
237 }
238 
239 
241  uint32_t value, PretenureFlag pretenure) {
242  if ((int32_t)value >= 0 && Smi::IsValid((int32_t)value)) {
243  return Smi::FromInt((int32_t)value);
244  }
245  // Bypass NumberFromDouble to avoid various redundant checks.
246  return AllocateHeapNumber(FastUI2D(value), pretenure);
247 }
248 
249 
251  ASSERT(string->IsExternalString());
253  reinterpret_cast<v8::String::ExternalStringResourceBase**>(
254  reinterpret_cast<byte*>(string) +
257 
258  // Dispose of the C++ object if it has not already been disposed.
259  if (*resource_addr != NULL) {
260  (*resource_addr)->Dispose();
261  *resource_addr = NULL;
262  }
263 }
264 
265 
266 MaybeObject* Heap::AllocateRawMap() {
267 #ifdef DEBUG
268  isolate_->counters()->objs_since_last_full()->Increment();
269  isolate_->counters()->objs_since_last_young()->Increment();
270 #endif
271  MaybeObject* result = map_space_->AllocateRaw(Map::kSize);
272  if (result->IsFailure()) old_gen_exhausted_ = true;
273  return result;
274 }
275 
276 
277 MaybeObject* Heap::AllocateRawCell() {
278 #ifdef DEBUG
279  isolate_->counters()->objs_since_last_full()->Increment();
280  isolate_->counters()->objs_since_last_young()->Increment();
281 #endif
282  MaybeObject* result = cell_space_->AllocateRaw(JSGlobalPropertyCell::kSize);
283  if (result->IsFailure()) old_gen_exhausted_ = true;
284  return result;
285 }
286 
287 
288 bool Heap::InNewSpace(Object* object) {
289  bool result = new_space_.Contains(object);
290  ASSERT(!result || // Either not in new space
291  gc_state_ != NOT_IN_GC || // ... or in the middle of GC
292  InToSpace(object)); // ... or in to-space (where we allocate).
293  return result;
294 }
295 
296 
298  return new_space_.Contains(addr);
299 }
300 
301 
302 bool Heap::InFromSpace(Object* object) {
303  return new_space_.FromSpaceContains(object);
304 }
305 
306 
307 bool Heap::InToSpace(Object* object) {
308  return new_space_.ToSpaceContains(object);
309 }
310 
311 
313  if (!incremental_marking()->IsStopped()) return false;
314  return OldGenerationSpaceAvailable() < 0;
315 }
316 
317 
318 bool Heap::ShouldBePromoted(Address old_address, int object_size) {
319  // An object should be promoted if:
320  // - the object has survived a scavenge operation or
321  // - to space is already 25% full.
322  NewSpacePage* page = NewSpacePage::FromAddress(old_address);
323  Address age_mark = new_space_.age_mark();
324  bool below_mark = page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
325  (!page->ContainsLimit(age_mark) || old_address < age_mark);
326  return below_mark || (new_space_.Size() + object_size) >=
327  (new_space_.EffectiveCapacity() >> 2);
328 }
329 
330 
331 void Heap::RecordWrite(Address address, int offset) {
332  if (!InNewSpace(address)) store_buffer_.Mark(address + offset);
333 }
334 
335 
336 void Heap::RecordWrites(Address address, int start, int len) {
337  if (!InNewSpace(address)) {
338  for (int i = 0; i < len; i++) {
339  store_buffer_.Mark(address + start + i * kPointerSize);
340  }
341  }
342 }
343 
344 
346  InstanceType type = object->map()->instance_type();
347  AllocationSpace space = TargetSpaceId(type);
348  return (space == OLD_POINTER_SPACE)
349  ? old_pointer_space_
350  : old_data_space_;
351 }
352 
353 
355  // Heap numbers and sequential strings are promoted to old data space, all
356  // other object types are promoted to old pointer space. We do not use
357  // object->IsHeapNumber() and object->IsSeqString() because we already
358  // know that object has the heap object tag.
359 
360  // These objects are never allocated in new space.
361  ASSERT(type != MAP_TYPE);
362  ASSERT(type != CODE_TYPE);
363  ASSERT(type != ODDBALL_TYPE);
365 
366  if (type < FIRST_NONSTRING_TYPE) {
367  // There are four string representations: sequential strings, external
368  // strings, cons strings, and sliced strings.
369  // Only the latter two contain non-map-word pointers to heap objects.
370  return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
372  : OLD_DATA_SPACE;
373  } else {
374  return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
375  }
376 }
377 
378 
379 void Heap::CopyBlock(Address dst, Address src, int byte_size) {
380  CopyWords(reinterpret_cast<Object**>(dst),
381  reinterpret_cast<Object**>(src),
382  byte_size / kPointerSize);
383 }
384 
385 
386 void Heap::MoveBlock(Address dst, Address src, int byte_size) {
387  ASSERT(IsAligned(byte_size, kPointerSize));
388 
389  int size_in_words = byte_size / kPointerSize;
390 
391  if ((dst < src) || (dst >= (src + byte_size))) {
392  Object** src_slot = reinterpret_cast<Object**>(src);
393  Object** dst_slot = reinterpret_cast<Object**>(dst);
394  Object** end_slot = src_slot + size_in_words;
395 
396  while (src_slot != end_slot) {
397  *dst_slot++ = *src_slot++;
398  }
399  } else {
400  memmove(dst, src, byte_size);
401  }
402 }
403 
404 
406  ScavengeObject(p, *p);
407 }
408 
409 
411  ASSERT(HEAP->InFromSpace(object));
412 
413  // We use the first word (where the map pointer usually is) of a heap
414  // object to record the forwarding pointer. A forwarding pointer can
415  // point to an old space, the code space, or the to space of the new
416  // generation.
417  MapWord first_word = object->map_word();
418 
419  // If the first word is a forwarding address, the object has already been
420  // copied.
421  if (first_word.IsForwardingAddress()) {
422  HeapObject* dest = first_word.ToForwardingAddress();
423  ASSERT(HEAP->InFromSpace(*p));
424  *p = dest;
425  return;
426  }
427 
428  // Call the slow part of scavenge object.
429  return ScavengeObjectSlow(p, object);
430 }
431 
432 
433 bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason) {
434  const char* collector_reason = NULL;
435  GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
436  return CollectGarbage(space, collector, gc_reason, collector_reason);
437 }
438 
439 
440 MaybeObject* Heap::PrepareForCompare(String* str) {
441  // Always flatten small strings and force flattening of long strings
442  // after we have accumulated a certain amount we failed to flatten.
443  static const int kMaxAlwaysFlattenLength = 32;
444  static const int kFlattenLongThreshold = 16*KB;
445 
446  const int length = str->length();
447  MaybeObject* obj = str->TryFlatten();
448  if (length <= kMaxAlwaysFlattenLength ||
449  unflattened_strings_length_ >= kFlattenLongThreshold) {
450  return obj;
451  }
452  if (obj->IsFailure()) {
453  unflattened_strings_length_ += length;
454  }
455  return str;
456 }
457 
458 
460  intptr_t change_in_bytes) {
461  ASSERT(HasBeenSetUp());
462  intptr_t amount = amount_of_external_allocated_memory_ + change_in_bytes;
463  if (change_in_bytes >= 0) {
464  // Avoid overflow.
465  if (amount > amount_of_external_allocated_memory_) {
466  amount_of_external_allocated_memory_ = amount;
467  } else {
468  // Give up and reset the counters in case of an overflow.
469  amount_of_external_allocated_memory_ = 0;
470  amount_of_external_allocated_memory_at_last_global_gc_ = 0;
471  }
472  intptr_t amount_since_last_global_gc = PromotedExternalMemorySize();
473  if (amount_since_last_global_gc > external_allocation_limit_) {
474  CollectAllGarbage(kNoGCFlags, "external memory allocation limit reached");
475  }
476  } else {
477  // Avoid underflow.
478  if (amount >= 0) {
479  amount_of_external_allocated_memory_ = amount;
480  } else {
481  // Give up and reset the counters in case of an overflow.
482  amount_of_external_allocated_memory_ = 0;
483  amount_of_external_allocated_memory_at_last_global_gc_ = 0;
484  }
485  }
486  if (FLAG_trace_external_memory) {
487  PrintPID("%8.0f ms: ", isolate()->time_millis_since_init());
488  PrintF("Adjust amount of external memory: delta=%6" V8_PTR_PREFIX "d KB, "
489  " amount=%6" V8_PTR_PREFIX "d KB, isolate=0x%08" V8PRIxPTR ".\n",
490  change_in_bytes / 1024, amount_of_external_allocated_memory_ / 1024,
491  reinterpret_cast<intptr_t>(isolate()));
492  }
493  ASSERT(amount_of_external_allocated_memory_ >= 0);
494  return amount_of_external_allocated_memory_;
495 }
496 
497 
498 void Heap::SetLastScriptId(Object* last_script_id) {
499  roots_[kLastScriptIdRootIndex] = last_script_id;
500 }
501 
502 
504  return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) -
505  reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
506 }
507 
508 
509 #ifdef DEBUG
510 #define GC_GREEDY_CHECK() \
511  if (FLAG_gc_greedy) HEAP->GarbageCollectionGreedyCheck()
512 #else
513 #define GC_GREEDY_CHECK() { }
514 #endif
515 
516 // Calls the FUNCTION_CALL function and retries it up to three times
517 // to guarantee that any allocations performed during the call will
518 // succeed if there's enough memory.
519 
520 // Warning: Do not use the identifiers __object__, __maybe_object__ or
521 // __scope__ in a call to this macro.
522 
523 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)\
524  do { \
525  GC_GREEDY_CHECK(); \
526  MaybeObject* __maybe_object__ = FUNCTION_CALL; \
527  Object* __object__ = NULL; \
528  if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
529  if (__maybe_object__->IsOutOfMemory()) { \
530  v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0", true);\
531  } \
532  if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
533  ISOLATE->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \
534  allocation_space(), \
535  "allocation failure"); \
536  __maybe_object__ = FUNCTION_CALL; \
537  if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
538  if (__maybe_object__->IsOutOfMemory()) { \
539  v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1", true);\
540  } \
541  if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
542  ISOLATE->counters()->gc_last_resort_from_handles()->Increment(); \
543  ISOLATE->heap()->CollectAllAvailableGarbage("last resort gc"); \
544  { \
545  AlwaysAllocateScope __scope__; \
546  __maybe_object__ = FUNCTION_CALL; \
547  } \
548  if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
549  if (__maybe_object__->IsOutOfMemory() || \
550  __maybe_object__->IsRetryAfterGC()) { \
551  /* TODO(1181417): Fix this. */ \
552  v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_2", true);\
553  } \
554  RETURN_EMPTY; \
555  } while (false)
556 
557 
558 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \
559  CALL_AND_RETRY(ISOLATE, \
560  FUNCTION_CALL, \
561  return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
562  return Handle<TYPE>())
563 
564 
565 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
566  CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, return, return)
567 
568 
569 #ifdef DEBUG
570 
571 inline bool Heap::allow_allocation(bool new_state) {
572  bool old = allocation_allowed_;
573  allocation_allowed_ = new_state;
574  return old;
575 }
576 
577 #endif
578 
579 
581  ASSERT(string->IsExternalString());
582  if (heap_->InNewSpace(string)) {
583  new_space_strings_.Add(string);
584  } else {
585  old_space_strings_.Add(string);
586  }
587 }
588 
589 
590 void ExternalStringTable::Iterate(ObjectVisitor* v) {
591  if (!new_space_strings_.is_empty()) {
592  Object** start = &new_space_strings_[0];
593  v->VisitPointers(start, start + new_space_strings_.length());
594  }
595  if (!old_space_strings_.is_empty()) {
596  Object** start = &old_space_strings_[0];
597  v->VisitPointers(start, start + old_space_strings_.length());
598  }
599 }
600 
601 
602 // Verify() is inline to avoid ifdef-s around its calls in release
603 // mode.
604 void ExternalStringTable::Verify() {
605 #ifdef DEBUG
606  for (int i = 0; i < new_space_strings_.length(); ++i) {
607  Object* obj = Object::cast(new_space_strings_[i]);
608  // TODO(yangguo): check that the object is indeed an external string.
609  ASSERT(heap_->InNewSpace(obj));
610  ASSERT(obj != HEAP->raw_unchecked_the_hole_value());
611  if (obj->IsExternalAsciiString()) {
613  ASSERT(String::IsAscii(string->GetChars(), string->length()));
614  }
615  }
616  for (int i = 0; i < old_space_strings_.length(); ++i) {
617  Object* obj = Object::cast(old_space_strings_[i]);
618  // TODO(yangguo): check that the object is indeed an external string.
619  ASSERT(!heap_->InNewSpace(obj));
620  ASSERT(obj != HEAP->raw_unchecked_the_hole_value());
621  if (obj->IsExternalAsciiString()) {
622  ExternalAsciiString* string = ExternalAsciiString::cast(obj);
623  ASSERT(String::IsAscii(string->GetChars(), string->length()));
624  }
625  }
626 #endif
627 }
628 
629 
630 void ExternalStringTable::AddOldString(String* string) {
631  ASSERT(string->IsExternalString());
632  ASSERT(!heap_->InNewSpace(string));
633  old_space_strings_.Add(string);
634 }
635 
636 
637 void ExternalStringTable::ShrinkNewStrings(int position) {
638  new_space_strings_.Rewind(position);
639 #ifdef VERIFY_HEAP
640  if (FLAG_verify_heap) {
641  Verify();
642  }
643 #endif
644 }
645 
646 
648  set_instanceof_cache_function(the_hole_value());
649 }
650 
651 
652 Object* Heap::ToBoolean(bool condition) {
653  return condition ? true_value() : false_value();
654 }
655 
656 
658  set_instanceof_cache_map(the_hole_value());
659  set_instanceof_cache_function(the_hole_value());
660 }
661 
662 
663 MaybeObject* TranscendentalCache::Get(Type type, double input) {
664  SubCache* cache = caches_[type];
665  if (cache == NULL) {
666  caches_[type] = cache = new SubCache(type);
667  }
668  return cache->Get(input);
669 }
670 
671 
672 Address TranscendentalCache::cache_array_address() {
673  return reinterpret_cast<Address>(caches_);
674 }
675 
676 
677 double TranscendentalCache::SubCache::Calculate(double input) {
678  switch (type_) {
679  case ACOS:
680  return acos(input);
681  case ASIN:
682  return asin(input);
683  case ATAN:
684  return atan(input);
685  case COS:
686  return fast_cos(input);
687  case EXP:
688  return exp(input);
689  case LOG:
690  return fast_log(input);
691  case SIN:
692  return fast_sin(input);
693  case TAN:
694  return fast_tan(input);
695  default:
696  return 0.0; // Never happens.
697  }
698 }
699 
700 
701 MaybeObject* TranscendentalCache::SubCache::Get(double input) {
702  Converter c;
703  c.dbl = input;
704  int hash = Hash(c);
705  Element e = elements_[hash];
706  if (e.in[0] == c.integers[0] &&
707  e.in[1] == c.integers[1]) {
708  ASSERT(e.output != NULL);
709  isolate_->counters()->transcendental_cache_hit()->Increment();
710  return e.output;
711  }
712  double answer = Calculate(input);
713  isolate_->counters()->transcendental_cache_miss()->Increment();
714  Object* heap_number;
715  { MaybeObject* maybe_heap_number =
716  isolate_->heap()->AllocateHeapNumber(answer);
717  if (!maybe_heap_number->ToObject(&heap_number)) return maybe_heap_number;
718  }
719  elements_[hash].in[0] = c.integers[0];
720  elements_[hash].in[1] = c.integers[1];
721  elements_[hash].output = heap_number;
722  return heap_number;
723 }
724 
725 
727  // We shouldn't hit any nested scopes, because that requires
728  // non-handle code to call handle code. The code still works but
729  // performance will degrade, so we want to catch this situation
730  // in debug mode.
731  ASSERT(HEAP->always_allocate_scope_depth_ == 0);
732  HEAP->always_allocate_scope_depth_++;
733 }
734 
735 
737  HEAP->always_allocate_scope_depth_--;
738  ASSERT(HEAP->always_allocate_scope_depth_ == 0);
739 }
740 
741 
743  for (Object** current = start; current < end; current++) {
744  if ((*current)->IsHeapObject()) {
745  HeapObject* object = HeapObject::cast(*current);
746  CHECK(HEAP->Contains(object));
747  CHECK(object->map()->IsMap());
748  }
749  }
750 }
751 
752 
753 double GCTracer::SizeOfHeapObjects() {
754  return (static_cast<double>(HEAP->SizeOfObjects())) / MB;
755 }
756 
757 
759 #ifdef DEBUG
760  old_state_ = HEAP->disallow_allocation_failure_;
761  HEAP->disallow_allocation_failure_ = true;
762 #endif
763 }
764 
765 
767 #ifdef DEBUG
768  HEAP->disallow_allocation_failure_ = old_state_;
769 #endif
770 }
771 
772 
773 #ifdef DEBUG
775  Isolate* isolate = ISOLATE;
776  active_ = !isolate->optimizing_compiler_thread()->IsOptimizerThread();
777  if (active_) {
778  old_state_ = isolate->heap()->allow_allocation(false);
779  }
780 }
781 
782 
784  if (active_) HEAP->allow_allocation(old_state_);
785 }
786 
787 
789  Isolate* isolate = ISOLATE;
790  active_ = !isolate->optimizing_compiler_thread()->IsOptimizerThread();
791  if (active_) {
792  old_state_ = isolate->heap()->allow_allocation(true);
793  }
794 }
795 
796 
798  if (active_) HEAP->allow_allocation(old_state_);
799 }
800 
801 #else
802 
807 
808 #endif
809 
810 
811 } } // namespace v8::internal
812 
813 #endif // V8_HEAP_INL_H_
byte * Address
Definition: globals.h:157
double fast_tan(double x)
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8Slow(Vector< const char > str, int non_ascii_start, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4430
MUST_USE_RESULT MaybeObject * AllocateSymbol(Vector< const char > str, int chars, uint32_t hash_field)
Definition: heap-inl.h:101
#define V8PRIxPTR
Definition: globals.h:189
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
Definition: heap-inl.h:181
void RecordWrite(Address address, int offset)
Definition: heap-inl.h:331
void PrintF(const char *format,...)
Definition: v8utils.cc:40
bool InNewSpace(Object *object)
Definition: heap-inl.h:288
static String * cast(Object *obj)
MaybeObject * TryFlatten(PretenureFlag pretenure=NOT_TENURED)
Definition: objects-inl.h:2428
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes)
Definition: spaces-inl.h:263
Object * ToBoolean(bool condition)
Definition: heap-inl.h:652
Isolate * isolate()
Definition: heap-inl.h:503
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArrayWithMap(FixedDoubleArray *src, Map *map)
Definition: heap.cc:4737
static Smi * FromInt(int value)
Definition: objects-inl.h:981
#define LOG(isolate, Call)
Definition: log.h:81
const int KB
Definition: globals.h:207
void FinalizeExternalString(String *string)
Definition: heap-inl.h:250
void CompletelyClearInstanceofCache()
Definition: heap-inl.h:657
void CollectAllGarbage(int flags, const char *gc_reason=NULL)
Definition: heap.cc:538
static HeapObject * cast(Object *obj)
MUST_USE_RESULT MaybeObject * AllocateTwoByteSymbol(Vector< const uc16 > str, uint32_t hash_field)
Definition: heap-inl.h:144
void AddString(String *string)
Definition: heap-inl.h:580
static Failure * OutOfMemoryException()
Definition: objects-inl.h:1029
Address age_mark()
Definition: spaces.h:2187
int int32_t
Definition: unicode.cc:47
void ClearInstanceofCache()
Definition: heap-inl.h:647
bool InFromSpace(Object *object)
Definition: heap-inl.h:302
static void MoveBlock(Address dst, Address src, int byte_size)
Definition: heap-inl.h:386
#define ASSERT(condition)
Definition: checks.h:270
OldSpace * TargetSpace(HeapObject *object)
Definition: heap-inl.h:345
static const int kMaxLength
Definition: objects.h:7608
NewSpacePage * current_page()
Definition: spaces.h:1934
double fast_sin(double x)
#define CHECK(condition)
Definition: checks.h:56
intptr_t EffectiveCapacity()
Definition: spaces.h:2144
static ExternalAsciiString * cast(Object *obj)
MUST_USE_RESULT MaybeObject * AllocateInternalSymbol(unibrow::CharacterStream *buffer, int chars, uint32_t hash_field)
Definition: heap.cc:4514
MUST_USE_RESULT MaybeObject * AllocateHeapNumber(double value, PretenureFlag pretenure)
Definition: heap.cc:2483
static bool IsAscii(const char *chars, int length)
Definition: objects.h:7443
bool CollectGarbage(AllocationSpace space, GarbageCollector collector, const char *gc_reason, const char *collector_reason)
Definition: heap.cc:577
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
Definition: heap.cc:4711
uint8_t byte
Definition: globals.h:156
void CopyWords(T *dst, T *src, int num_words)
Definition: v8utils.h:127
MUST_USE_RESULT MaybeObject * AllocateStringFromAscii(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4412
void Mark(Address addr)
T * start() const
Definition: utils.h:390
bool always_allocate()
Definition: heap.h:531
void SetLastScriptId(Object *last_script_id)
Definition: heap-inl.h:498
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:85
bool Contains(Address a)
Definition: spaces.h:2122
static NewSpacePage * FromAddress(Address address_in_page)
Definition: spaces.h:1796
static Failure * RetryAfterGC()
Definition: objects-inl.h:1040
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1059
const uint32_t kIsIndirectStringMask
Definition: objects.h:481
static const int kNoGCFlags
Definition: heap.h:1081
bool ContainsLimit(Address addr)
Definition: spaces.h:371
const int kPointerSize
Definition: globals.h:220
virtual intptr_t Size()
Definition: spaces.h:2133
bool IsFlagSet(int flag)
Definition: spaces.h:437
const int kHeapObjectTag
Definition: v8.h:4009
static void ScavengePointer(HeapObject **p)
Definition: heap-inl.h:405
bool IsAligned(T value, U alignment)
Definition: utils.h:206
double fast_log(double x)
void set_hash_field(uint32_t value)
Definition: objects-inl.h:2411
void VisitPointers(Object **start, Object **end)
Definition: heap-inl.h:742
int length() const
Definition: utils.h:384
static const int kSize
Definition: objects.h:5139
static const int kMaxNonCodeHeapObjectSize
Definition: spaces.h:717
intptr_t AdjustAmountOfExternalAllocatedMemory(intptr_t change_in_bytes)
Definition: heap-inl.h:459
void Iterate(ObjectVisitor *v)
Definition: heap-inl.h:590
#define V8_PTR_PREFIX
Definition: globals.h:181
static const int kHeaderSize
Definition: objects.h:7517
double FastUI2D(unsigned x)
Definition: conversions.h:84
bool InToSpace(Object *object)
Definition: heap-inl.h:307
bool HasBeenSetUp()
Definition: heap.cc:234
bool FromSpaceContains(Address address)
Definition: spaces.h:2243
bool ToSpaceContains(Address address)
Definition: spaces.h:2240
MUST_USE_RESULT MaybeObject * AllocateAsciiSymbol(Vector< const char > str, uint32_t hash_field)
Definition: heap-inl.h:110
void RecordWrites(Address address, int start, int len)
Definition: heap-inl.h:336
#define ISOLATE
Definition: isolate.h:1435
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
Definition: heap-inl.h:176
MUST_USE_RESULT MaybeObject * Get(Type type, double input)
Definition: heap-inl.h:663
static int SizeFor(int length)
Definition: objects.h:7548
static Object * cast(Object *value)
Definition: objects.h:1007
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:240
IncrementalMarking * incremental_marking()
Definition: heap.h:1553
void insert(HeapObject *target, int size)
Definition: heap-inl.h:43
bool ShouldBePromoted(Address old_address, int object_size)
Definition: heap-inl.h:318
static int SizeFor(int length)
Definition: objects.h:7600
#define HEAP
Definition: isolate.h:1433
void PrintPID(const char *format,...)
Definition: v8utils.cc:56
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
static void CopyBlock(Address dst, Address src, int byte_size)
Definition: heap-inl.h:379
Counters * counters()
Definition: isolate.h:819
OptimizingCompilerThread * optimizing_compiler_thread()
Definition: isolate.h:1065
SemiSpace * active_space()
Definition: spaces.h:2304
bool OldGenerationAllocationLimitReached()
Definition: heap-inl.h:312
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:38
double FastI2D(int x)
Definition: conversions.h:76
MUST_USE_RESULT MaybeObject * NumberFromInt32(int32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:232
const uint32_t kIsIndirectStringTag
Definition: objects.h:482
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
NewSpacePage * prev_page() const
Definition: spaces.h:1768
static void ScavengeObject(HeapObject **p, HeapObject *object)
Definition: heap-inl.h:410
double fast_cos(double x)
void set_length(int value)
MUST_USE_RESULT MaybeObject * PrepareForCompare(String *str)
Definition: heap-inl.h:440
static bool IsAtStart(Address addr)
Definition: spaces.h:1782
static int NonAsciiStart(const char *chars, int length)
Definition: objects.h:7421
intptr_t OldGenerationSpaceAvailable()
Definition: heap.h:1390
MUST_USE_RESULT MaybeObject * AllocateRaw(int object_size, Executability executable)
Definition: spaces.cc:2650
NewSpace * new_space()
Definition: heap.h:505
static const int kMaxLength
Definition: objects.h:7556
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
Definition: heap-inl.h:186
AllocationSpace TargetSpaceId(InstanceType type)
Definition: heap-inl.h:354
static void AssertValidRange(Address from, Address to)
Definition: spaces.h:1948
const int kUC16Size
Definition: globals.h:262
static const int kResourceOffset
Definition: objects.h:7745
const int MB
Definition: globals.h:208