v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
heap-inl.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_HEAP_INL_H_
29 #define V8_HEAP_INL_H_
30 
31 #include "heap.h"
32 #include "isolate.h"
33 #include "list-inl.h"
34 #include "objects.h"
35 #include "platform.h"
36 #include "v8-counters.h"
37 #include "store-buffer.h"
38 #include "store-buffer-inl.h"
39 
40 namespace v8 {
41 namespace internal {
42 
43 void PromotionQueue::insert(HeapObject* target, int size) {
44  if (emergency_stack_ != NULL) {
45  emergency_stack_->Add(Entry(target, size));
46  return;
47  }
48 
49  if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) {
50  NewSpacePage* rear_page =
51  NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_));
52  ASSERT(!rear_page->prev_page()->is_anchor());
53  rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->area_end());
55  }
56 
57  if (guard_) {
58  ASSERT(GetHeadPage() ==
59  Page::FromAllocationTop(reinterpret_cast<Address>(limit_)));
60 
61  if ((rear_ - 2) < limit_) {
62  RelocateQueueHead();
63  emergency_stack_->Add(Entry(target, size));
64  return;
65  }
66  }
67 
68  *(--rear_) = reinterpret_cast<intptr_t>(target);
69  *(--rear_) = size;
70  // Assert no overflow into live objects.
71 #ifdef DEBUG
72  SemiSpace::AssertValidRange(HEAP->new_space()->top(),
73  reinterpret_cast<Address>(rear_));
74 #endif
75 }
76 
77 
79  guard_ = guard_ ||
80  heap_->new_space()->active_space()->current_page()->address() ==
81  GetHeadPage()->address();
82 }
83 
84 
86  PretenureFlag pretenure) {
87  // Check for ASCII first since this is the common case.
88  if (String::IsAscii(str.start(), str.length())) {
89  // If the string is ASCII, we do not need to convert the characters
90  // since UTF8 is backwards compatible with ASCII.
91  return AllocateStringFromAscii(str, pretenure);
92  }
93  // Non-ASCII and we need to decode.
94  return AllocateStringFromUtf8Slow(str, pretenure);
95 }
96 
97 
99  int chars,
100  uint32_t hash_field) {
101  unibrow::Utf8InputBuffer<> buffer(str.start(),
102  static_cast<unsigned>(str.length()));
103  return AllocateInternalSymbol(&buffer, chars, hash_field);
104 }
105 
106 
108  uint32_t hash_field) {
109  if (str.length() > SeqAsciiString::kMaxLength) {
111  }
112  // Compute map and object size.
113  Map* map = ascii_symbol_map();
114  int size = SeqAsciiString::SizeFor(str.length());
115 
116  // Allocate string.
117  Object* result;
118  { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
119  ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
120  : old_data_space_->AllocateRaw(size);
121  if (!maybe_result->ToObject(&result)) return maybe_result;
122  }
123 
124  // String maps are all immortal immovable objects.
125  reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map);
126  // Set length and hash fields of the allocated string.
127  String* answer = String::cast(result);
128  answer->set_length(str.length());
129  answer->set_hash_field(hash_field);
130 
131  ASSERT_EQ(size, answer->Size());
132 
133  // Fill in the characters.
134  memcpy(answer->address() + SeqAsciiString::kHeaderSize,
135  str.start(), str.length());
136 
137  return answer;
138 }
139 
140 
142  uint32_t hash_field) {
143  if (str.length() > SeqTwoByteString::kMaxLength) {
145  }
146  // Compute map and object size.
147  Map* map = symbol_map();
148  int size = SeqTwoByteString::SizeFor(str.length());
149 
150  // Allocate string.
151  Object* result;
152  { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
153  ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
154  : old_data_space_->AllocateRaw(size);
155  if (!maybe_result->ToObject(&result)) return maybe_result;
156  }
157 
158  reinterpret_cast<HeapObject*>(result)->set_map(map);
159  // Set length and hash fields of the allocated string.
160  String* answer = String::cast(result);
161  answer->set_length(str.length());
162  answer->set_hash_field(hash_field);
163 
164  ASSERT_EQ(size, answer->Size());
165 
166  // Fill in the characters.
167  memcpy(answer->address() + SeqTwoByteString::kHeaderSize,
168  str.start(), str.length() * kUC16Size);
169 
170  return answer;
171 }
172 
173 MaybeObject* Heap::CopyFixedArray(FixedArray* src) {
174  return CopyFixedArrayWithMap(src, src->map());
175 }
176 
177 
179  return CopyFixedDoubleArrayWithMap(src, src->map());
180 }
181 
182 
183 MaybeObject* Heap::AllocateRaw(int size_in_bytes,
184  AllocationSpace space,
185  AllocationSpace retry_space) {
186  ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
187  ASSERT(space != NEW_SPACE ||
188  retry_space == OLD_POINTER_SPACE ||
189  retry_space == OLD_DATA_SPACE ||
190  retry_space == LO_SPACE);
191 #ifdef DEBUG
192  if (FLAG_gc_interval >= 0 &&
193  !disallow_allocation_failure_ &&
194  Heap::allocation_timeout_-- <= 0) {
195  return Failure::RetryAfterGC(space);
196  }
197  isolate_->counters()->objs_since_last_full()->Increment();
198  isolate_->counters()->objs_since_last_young()->Increment();
199 #endif
200  MaybeObject* result;
201  if (NEW_SPACE == space) {
202  result = new_space_.AllocateRaw(size_in_bytes);
203  if (always_allocate() && result->IsFailure()) {
204  space = retry_space;
205  } else {
206  return result;
207  }
208  }
209 
210  if (OLD_POINTER_SPACE == space) {
211  result = old_pointer_space_->AllocateRaw(size_in_bytes);
212  } else if (OLD_DATA_SPACE == space) {
213  result = old_data_space_->AllocateRaw(size_in_bytes);
214  } else if (CODE_SPACE == space) {
215  result = code_space_->AllocateRaw(size_in_bytes);
216  } else if (LO_SPACE == space) {
217  result = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
218  } else if (CELL_SPACE == space) {
219  result = cell_space_->AllocateRaw(size_in_bytes);
220  } else {
221  ASSERT(MAP_SPACE == space);
222  result = map_space_->AllocateRaw(size_in_bytes);
223  }
224  if (result->IsFailure()) old_gen_exhausted_ = true;
225  return result;
226 }
227 
228 
230  int32_t value, PretenureFlag pretenure) {
231  if (Smi::IsValid(value)) return Smi::FromInt(value);
232  // Bypass NumberFromDouble to avoid various redundant checks.
233  return AllocateHeapNumber(FastI2D(value), pretenure);
234 }
235 
236 
238  uint32_t value, PretenureFlag pretenure) {
239  if ((int32_t)value >= 0 && Smi::IsValid((int32_t)value)) {
240  return Smi::FromInt((int32_t)value);
241  }
242  // Bypass NumberFromDouble to avoid various redundant checks.
243  return AllocateHeapNumber(FastUI2D(value), pretenure);
244 }
245 
246 
248  ASSERT(string->IsExternalString());
250  reinterpret_cast<v8::String::ExternalStringResourceBase**>(
251  reinterpret_cast<byte*>(string) +
254 
255  // Dispose of the C++ object if it has not already been disposed.
256  if (*resource_addr != NULL) {
257  (*resource_addr)->Dispose();
258  *resource_addr = NULL;
259  }
260 }
261 
262 
263 MaybeObject* Heap::AllocateRawMap() {
264 #ifdef DEBUG
265  isolate_->counters()->objs_since_last_full()->Increment();
266  isolate_->counters()->objs_since_last_young()->Increment();
267 #endif
268  MaybeObject* result = map_space_->AllocateRaw(Map::kSize);
269  if (result->IsFailure()) old_gen_exhausted_ = true;
270 #ifdef DEBUG
271  if (!result->IsFailure()) {
272  // Maps have their own alignment.
273  CHECK((reinterpret_cast<intptr_t>(result) & kMapAlignmentMask) ==
274  static_cast<intptr_t>(kHeapObjectTag));
275  }
276 #endif
277  return result;
278 }
279 
280 
281 MaybeObject* Heap::AllocateRawCell() {
282 #ifdef DEBUG
283  isolate_->counters()->objs_since_last_full()->Increment();
284  isolate_->counters()->objs_since_last_young()->Increment();
285 #endif
286  MaybeObject* result = cell_space_->AllocateRaw(JSGlobalPropertyCell::kSize);
287  if (result->IsFailure()) old_gen_exhausted_ = true;
288  return result;
289 }
290 
291 
292 bool Heap::InNewSpace(Object* object) {
293  bool result = new_space_.Contains(object);
294  ASSERT(!result || // Either not in new space
295  gc_state_ != NOT_IN_GC || // ... or in the middle of GC
296  InToSpace(object)); // ... or in to-space (where we allocate).
297  return result;
298 }
299 
300 
302  return new_space_.Contains(addr);
303 }
304 
305 
306 bool Heap::InFromSpace(Object* object) {
307  return new_space_.FromSpaceContains(object);
308 }
309 
310 
311 bool Heap::InToSpace(Object* object) {
312  return new_space_.ToSpaceContains(object);
313 }
314 
315 
317  if (!incremental_marking()->IsStopped()) return false;
318  return OldGenerationSpaceAvailable() < 0;
319 }
320 
321 
322 bool Heap::ShouldBePromoted(Address old_address, int object_size) {
323  // An object should be promoted if:
324  // - the object has survived a scavenge operation or
325  // - to space is already 25% full.
326  NewSpacePage* page = NewSpacePage::FromAddress(old_address);
327  Address age_mark = new_space_.age_mark();
328  bool below_mark = page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
329  (!page->ContainsLimit(age_mark) || old_address < age_mark);
330  return below_mark || (new_space_.Size() + object_size) >=
331  (new_space_.EffectiveCapacity() >> 2);
332 }
333 
334 
335 void Heap::RecordWrite(Address address, int offset) {
336  if (!InNewSpace(address)) store_buffer_.Mark(address + offset);
337 }
338 
339 
340 void Heap::RecordWrites(Address address, int start, int len) {
341  if (!InNewSpace(address)) {
342  for (int i = 0; i < len; i++) {
343  store_buffer_.Mark(address + start + i * kPointerSize);
344  }
345  }
346 }
347 
348 
350  InstanceType type = object->map()->instance_type();
351  AllocationSpace space = TargetSpaceId(type);
352  return (space == OLD_POINTER_SPACE)
353  ? old_pointer_space_
354  : old_data_space_;
355 }
356 
357 
359  // Heap numbers and sequential strings are promoted to old data space, all
360  // other object types are promoted to old pointer space. We do not use
361  // object->IsHeapNumber() and object->IsSeqString() because we already
362  // know that object has the heap object tag.
363 
364  // These objects are never allocated in new space.
365  ASSERT(type != MAP_TYPE);
366  ASSERT(type != CODE_TYPE);
367  ASSERT(type != ODDBALL_TYPE);
369 
370  if (type < FIRST_NONSTRING_TYPE) {
371  // There are four string representations: sequential strings, external
372  // strings, cons strings, and sliced strings.
373  // Only the latter two contain non-map-word pointers to heap objects.
374  return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
376  : OLD_DATA_SPACE;
377  } else {
378  return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
379  }
380 }
381 
382 
383 void Heap::CopyBlock(Address dst, Address src, int byte_size) {
384  CopyWords(reinterpret_cast<Object**>(dst),
385  reinterpret_cast<Object**>(src),
386  byte_size / kPointerSize);
387 }
388 
389 
390 void Heap::MoveBlock(Address dst, Address src, int byte_size) {
391  ASSERT(IsAligned(byte_size, kPointerSize));
392 
393  int size_in_words = byte_size / kPointerSize;
394 
395  if ((dst < src) || (dst >= (src + byte_size))) {
396  Object** src_slot = reinterpret_cast<Object**>(src);
397  Object** dst_slot = reinterpret_cast<Object**>(dst);
398  Object** end_slot = src_slot + size_in_words;
399 
400  while (src_slot != end_slot) {
401  *dst_slot++ = *src_slot++;
402  }
403  } else {
404  memmove(dst, src, byte_size);
405  }
406 }
407 
408 
410  ScavengeObject(p, *p);
411 }
412 
413 
415  ASSERT(HEAP->InFromSpace(object));
416 
417  // We use the first word (where the map pointer usually is) of a heap
418  // object to record the forwarding pointer. A forwarding pointer can
419  // point to an old space, the code space, or the to space of the new
420  // generation.
421  MapWord first_word = object->map_word();
422 
423  // If the first word is a forwarding address, the object has already been
424  // copied.
425  if (first_word.IsForwardingAddress()) {
426  HeapObject* dest = first_word.ToForwardingAddress();
427  ASSERT(HEAP->InFromSpace(*p));
428  *p = dest;
429  return;
430  }
431 
432  // Call the slow part of scavenge object.
433  return ScavengeObjectSlow(p, object);
434 }
435 
436 
437 bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason) {
438  const char* collector_reason = NULL;
439  GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
440  return CollectGarbage(space, collector, gc_reason, collector_reason);
441 }
442 
443 
444 MaybeObject* Heap::PrepareForCompare(String* str) {
445  // Always flatten small strings and force flattening of long strings
446  // after we have accumulated a certain amount we failed to flatten.
447  static const int kMaxAlwaysFlattenLength = 32;
448  static const int kFlattenLongThreshold = 16*KB;
449 
450  const int length = str->length();
451  MaybeObject* obj = str->TryFlatten();
452  if (length <= kMaxAlwaysFlattenLength ||
453  unflattened_strings_length_ >= kFlattenLongThreshold) {
454  return obj;
455  }
456  if (obj->IsFailure()) {
457  unflattened_strings_length_ += length;
458  }
459  return str;
460 }
461 
462 
464  intptr_t change_in_bytes) {
465  ASSERT(HasBeenSetUp());
466  intptr_t amount = amount_of_external_allocated_memory_ + change_in_bytes;
467  if (change_in_bytes >= 0) {
468  // Avoid overflow.
469  if (amount > amount_of_external_allocated_memory_) {
470  amount_of_external_allocated_memory_ = amount;
471  }
472  intptr_t amount_since_last_global_gc =
473  amount_of_external_allocated_memory_ -
474  amount_of_external_allocated_memory_at_last_global_gc_;
475  if (amount_since_last_global_gc > external_allocation_limit_) {
476  CollectAllGarbage(kNoGCFlags, "external memory allocation limit reached");
477  }
478  } else {
479  // Avoid underflow.
480  if (amount >= 0) {
481  amount_of_external_allocated_memory_ = amount;
482  }
483  }
484  ASSERT(amount_of_external_allocated_memory_ >= 0);
485  return amount_of_external_allocated_memory_;
486 }
487 
488 
489 void Heap::SetLastScriptId(Object* last_script_id) {
490  roots_[kLastScriptIdRootIndex] = last_script_id;
491 }
492 
493 
495  return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) -
496  reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
497 }
498 
499 
500 #ifdef DEBUG
501 #define GC_GREEDY_CHECK() \
502  if (FLAG_gc_greedy) HEAP->GarbageCollectionGreedyCheck()
503 #else
504 #define GC_GREEDY_CHECK() { }
505 #endif
506 
507 // Calls the FUNCTION_CALL function and retries it up to three times
508 // to guarantee that any allocations performed during the call will
509 // succeed if there's enough memory.
510 
511 // Warning: Do not use the identifiers __object__, __maybe_object__ or
512 // __scope__ in a call to this macro.
513 
514 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)\
515  do { \
516  GC_GREEDY_CHECK(); \
517  MaybeObject* __maybe_object__ = FUNCTION_CALL; \
518  Object* __object__ = NULL; \
519  if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
520  if (__maybe_object__->IsOutOfMemory()) { \
521  v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0", true);\
522  } \
523  if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
524  ISOLATE->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \
525  allocation_space(), \
526  "allocation failure"); \
527  __maybe_object__ = FUNCTION_CALL; \
528  if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
529  if (__maybe_object__->IsOutOfMemory()) { \
530  v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1", true);\
531  } \
532  if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
533  ISOLATE->counters()->gc_last_resort_from_handles()->Increment(); \
534  ISOLATE->heap()->CollectAllAvailableGarbage("last resort gc"); \
535  { \
536  AlwaysAllocateScope __scope__; \
537  __maybe_object__ = FUNCTION_CALL; \
538  } \
539  if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
540  if (__maybe_object__->IsOutOfMemory() || \
541  __maybe_object__->IsRetryAfterGC()) { \
542  /* TODO(1181417): Fix this. */ \
543  v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_2", true);\
544  } \
545  RETURN_EMPTY; \
546  } while (false)
547 
548 
549 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \
550  CALL_AND_RETRY(ISOLATE, \
551  FUNCTION_CALL, \
552  return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
553  return Handle<TYPE>())
554 
555 
556 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
557  CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, return, return)
558 
559 
560 #ifdef DEBUG
561 
562 inline bool Heap::allow_allocation(bool new_state) {
563  bool old = allocation_allowed_;
564  allocation_allowed_ = new_state;
565  return old;
566 }
567 
568 #endif
569 
570 
572  ASSERT(string->IsExternalString());
573  if (heap_->InNewSpace(string)) {
574  new_space_strings_.Add(string);
575  } else {
576  old_space_strings_.Add(string);
577  }
578 }
579 
580 
581 void ExternalStringTable::Iterate(ObjectVisitor* v) {
582  if (!new_space_strings_.is_empty()) {
583  Object** start = &new_space_strings_[0];
584  v->VisitPointers(start, start + new_space_strings_.length());
585  }
586  if (!old_space_strings_.is_empty()) {
587  Object** start = &old_space_strings_[0];
588  v->VisitPointers(start, start + old_space_strings_.length());
589  }
590 }
591 
592 
593 // Verify() is inline to avoid ifdef-s around its calls in release
594 // mode.
595 void ExternalStringTable::Verify() {
596 #ifdef DEBUG
597  for (int i = 0; i < new_space_strings_.length(); ++i) {
598  Object* obj = Object::cast(new_space_strings_[i]);
599  // TODO(yangguo): check that the object is indeed an external string.
600  ASSERT(heap_->InNewSpace(obj));
601  ASSERT(obj != HEAP->raw_unchecked_the_hole_value());
602  if (obj->IsExternalAsciiString()) {
604  ASSERT(String::IsAscii(string->GetChars(), string->length()));
605  }
606  }
607  for (int i = 0; i < old_space_strings_.length(); ++i) {
608  Object* obj = Object::cast(old_space_strings_[i]);
609  // TODO(yangguo): check that the object is indeed an external string.
610  ASSERT(!heap_->InNewSpace(obj));
611  ASSERT(obj != HEAP->raw_unchecked_the_hole_value());
612  if (obj->IsExternalAsciiString()) {
613  ExternalAsciiString* string = ExternalAsciiString::cast(obj);
614  ASSERT(String::IsAscii(string->GetChars(), string->length()));
615  }
616  }
617 #endif
618 }
619 
620 
621 void ExternalStringTable::AddOldString(String* string) {
622  ASSERT(string->IsExternalString());
623  ASSERT(!heap_->InNewSpace(string));
624  old_space_strings_.Add(string);
625 }
626 
627 
628 void ExternalStringTable::ShrinkNewStrings(int position) {
629  new_space_strings_.Rewind(position);
630  if (FLAG_verify_heap) {
631  Verify();
632  }
633 }
634 
635 
637  set_instanceof_cache_function(the_hole_value());
638 }
639 
640 
641 Object* Heap::ToBoolean(bool condition) {
642  return condition ? true_value() : false_value();
643 }
644 
645 
647  set_instanceof_cache_map(the_hole_value());
648  set_instanceof_cache_function(the_hole_value());
649 }
650 
651 
652 MaybeObject* TranscendentalCache::Get(Type type, double input) {
653  SubCache* cache = caches_[type];
654  if (cache == NULL) {
655  caches_[type] = cache = new SubCache(type);
656  }
657  return cache->Get(input);
658 }
659 
660 
661 Address TranscendentalCache::cache_array_address() {
662  return reinterpret_cast<Address>(caches_);
663 }
664 
665 
666 double TranscendentalCache::SubCache::Calculate(double input) {
667  switch (type_) {
668  case ACOS:
669  return acos(input);
670  case ASIN:
671  return asin(input);
672  case ATAN:
673  return atan(input);
674  case COS:
675  return fast_cos(input);
676  case EXP:
677  return exp(input);
678  case LOG:
679  return fast_log(input);
680  case SIN:
681  return fast_sin(input);
682  case TAN:
683  return fast_tan(input);
684  default:
685  return 0.0; // Never happens.
686  }
687 }
688 
689 
690 MaybeObject* TranscendentalCache::SubCache::Get(double input) {
691  Converter c;
692  c.dbl = input;
693  int hash = Hash(c);
694  Element e = elements_[hash];
695  if (e.in[0] == c.integers[0] &&
696  e.in[1] == c.integers[1]) {
697  ASSERT(e.output != NULL);
698  isolate_->counters()->transcendental_cache_hit()->Increment();
699  return e.output;
700  }
701  double answer = Calculate(input);
702  isolate_->counters()->transcendental_cache_miss()->Increment();
703  Object* heap_number;
704  { MaybeObject* maybe_heap_number =
705  isolate_->heap()->AllocateHeapNumber(answer);
706  if (!maybe_heap_number->ToObject(&heap_number)) return maybe_heap_number;
707  }
708  elements_[hash].in[0] = c.integers[0];
709  elements_[hash].in[1] = c.integers[1];
710  elements_[hash].output = heap_number;
711  return heap_number;
712 }
713 
714 
716  // We shouldn't hit any nested scopes, because that requires
717  // non-handle code to call handle code. The code still works but
718  // performance will degrade, so we want to catch this situation
719  // in debug mode.
720  ASSERT(HEAP->always_allocate_scope_depth_ == 0);
721  HEAP->always_allocate_scope_depth_++;
722 }
723 
724 
726  HEAP->always_allocate_scope_depth_--;
727  ASSERT(HEAP->always_allocate_scope_depth_ == 0);
728 }
729 
730 
732  HEAP->linear_allocation_scope_depth_++;
733 }
734 
735 
737  HEAP->linear_allocation_scope_depth_--;
738  ASSERT(HEAP->linear_allocation_scope_depth_ >= 0);
739 }
740 
741 
742 #ifdef DEBUG
743 void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
744  for (Object** current = start; current < end; current++) {
745  if ((*current)->IsHeapObject()) {
746  HeapObject* object = HeapObject::cast(*current);
747  ASSERT(HEAP->Contains(object));
748  ASSERT(object->map()->IsMap());
749  }
750  }
751 }
752 #endif
753 
754 
755 double GCTracer::SizeOfHeapObjects() {
756  return (static_cast<double>(HEAP->SizeOfObjects())) / MB;
757 }
758 
759 
760 #ifdef DEBUG
761 DisallowAllocationFailure::DisallowAllocationFailure() {
762  old_state_ = HEAP->disallow_allocation_failure_;
763  HEAP->disallow_allocation_failure_ = true;
764 }
765 
766 
767 DisallowAllocationFailure::~DisallowAllocationFailure() {
768  HEAP->disallow_allocation_failure_ = old_state_;
769 }
770 #endif
771 
772 
773 #ifdef DEBUG
775  old_state_ = HEAP->allow_allocation(false);
776 }
777 
778 
780  HEAP->allow_allocation(old_state_);
781 }
782 
783 
785  old_state_ = HEAP->allow_allocation(true);
786 }
787 
788 
790  HEAP->allow_allocation(old_state_);
791 }
792 
793 #else
794 
799 
800 #endif
801 
802 
803 } } // namespace v8::internal
804 
805 #endif // V8_HEAP_INL_H_
byte * Address
Definition: globals.h:172
double fast_tan(double x)
const intptr_t kMapAlignmentMask
Definition: v8globals.h:62
MUST_USE_RESULT MaybeObject * AllocateSymbol(Vector< const char > str, int chars, uint32_t hash_field)
Definition: heap-inl.h:98
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
Definition: heap-inl.h:178
void RecordWrite(Address address, int offset)
Definition: heap-inl.h:335
bool InNewSpace(Object *object)
Definition: heap-inl.h:292
static String * cast(Object *obj)
MaybeObject * TryFlatten(PretenureFlag pretenure=NOT_TENURED)
Definition: objects-inl.h:2284
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes)
Definition: spaces-inl.h:263
Object * ToBoolean(bool condition)
Definition: heap-inl.h:641
Isolate * isolate()
Definition: heap-inl.h:494
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArrayWithMap(FixedDoubleArray *src, Map *map)
Definition: heap.cc:4630
static Smi * FromInt(int value)
Definition: objects-inl.h:973
#define LOG(isolate, Call)
Definition: log.h:81
const int KB
Definition: globals.h:221
void FinalizeExternalString(String *string)
Definition: heap-inl.h:247
void CompletelyClearInstanceofCache()
Definition: heap-inl.h:646
void CollectAllGarbage(int flags, const char *gc_reason=NULL)
Definition: heap.cc:452
static HeapObject * cast(Object *obj)
MUST_USE_RESULT MaybeObject * AllocateTwoByteSymbol(Vector< const uc16 > str, uint32_t hash_field)
Definition: heap-inl.h:141
void AddString(String *string)
Definition: heap-inl.h:571
static Failure * OutOfMemoryException()
Definition: objects-inl.h:1021
Address age_mark()
Definition: spaces.h:2162
int int32_t
Definition: unicode.cc:47
void ClearInstanceofCache()
Definition: heap-inl.h:636
bool InFromSpace(Object *object)
Definition: heap-inl.h:306
static void MoveBlock(Address dst, Address src, int byte_size)
Definition: heap-inl.h:390
FlagType type_
Definition: flags.cc:1351
#define ASSERT(condition)
Definition: checks.h:270
OldSpace * TargetSpace(HeapObject *object)
Definition: heap-inl.h:349
static const int kMaxLength
Definition: objects.h:7375
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8Slow(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4322
NewSpacePage * current_page()
Definition: spaces.h:1912
double fast_sin(double x)
#define CHECK(condition)
Definition: checks.h:56
intptr_t EffectiveCapacity()
Definition: spaces.h:2119
static ExternalAsciiString * cast(Object *obj)
MUST_USE_RESULT MaybeObject * AllocateInternalSymbol(unibrow::CharacterStream *buffer, int chars, uint32_t hash_field)
Definition: heap.cc:4407
MUST_USE_RESULT MaybeObject * AllocateHeapNumber(double value, PretenureFlag pretenure)
Definition: heap.cc:2407
static bool IsAscii(const char *chars, int length)
Definition: objects.h:7198
bool CollectGarbage(AllocationSpace space, GarbageCollector collector, const char *gc_reason, const char *collector_reason)
Definition: heap.cc:491
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
Definition: heap.cc:4604
uint8_t byte
Definition: globals.h:171
void CopyWords(T *dst, T *src, int num_words)
Definition: v8utils.h:124
MUST_USE_RESULT MaybeObject * AllocateStringFromAscii(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4302
void Mark(Address addr)
T * start() const
Definition: utils.h:389
bool always_allocate()
Definition: heap.h:507
void SetLastScriptId(Object *last_script_id)
Definition: heap-inl.h:489
MUST_USE_RESULT MaybeObject * AllocateStringFromUtf8(Vector< const char > str, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:85
bool Contains(Address a)
Definition: spaces.h:2097
static NewSpacePage * FromAddress(Address address_in_page)
Definition: spaces.h:1774
static Failure * RetryAfterGC()
Definition: objects-inl.h:1032
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1051
const uint32_t kIsIndirectStringMask
Definition: objects.h:462
static const int kNoGCFlags
Definition: heap.h:1049
bool ContainsLimit(Address addr)
Definition: spaces.h:372
const int kPointerSize
Definition: globals.h:234
virtual intptr_t Size()
Definition: spaces.h:2108
bool IsFlagSet(int flag)
Definition: spaces.h:433
const int kHeapObjectTag
Definition: v8.h:3848
static void ScavengePointer(HeapObject **p)
Definition: heap-inl.h:409
bool IsAligned(T value, U alignment)
Definition: utils.h:206
double fast_log(double x)
void set_hash_field(uint32_t value)
Definition: objects-inl.h:2267
int length() const
Definition: utils.h:383
static const int kSize
Definition: objects.h:4972
static const int kMaxNonCodeHeapObjectSize
Definition: spaces.h:701
intptr_t AdjustAmountOfExternalAllocatedMemory(intptr_t change_in_bytes)
Definition: heap-inl.h:463
void Iterate(ObjectVisitor *v)
Definition: heap-inl.h:581
static const int kHeaderSize
Definition: objects.h:7282
double FastUI2D(unsigned x)
Definition: conversions.h:81
bool InToSpace(Object *object)
Definition: heap-inl.h:311
bool HasBeenSetUp()
Definition: heap.cc:228
bool FromSpaceContains(Address address)
Definition: spaces.h:2218
bool ToSpaceContains(Address address)
Definition: spaces.h:2215
MUST_USE_RESULT MaybeObject * AllocateAsciiSymbol(Vector< const char > str, uint32_t hash_field)
Definition: heap-inl.h:107
void RecordWrites(Address address, int start, int len)
Definition: heap-inl.h:340
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
Definition: heap-inl.h:173
MUST_USE_RESULT MaybeObject * Get(Type type, double input)
Definition: heap-inl.h:652
static int SizeFor(int length)
Definition: objects.h:7313
static Object * cast(Object *value)
Definition: objects.h:962
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:237
IncrementalMarking * incremental_marking()
Definition: heap.h:1524
void insert(HeapObject *target, int size)
Definition: heap-inl.h:43
bool ShouldBePromoted(Address old_address, int object_size)
Definition: heap-inl.h:322
static int SizeFor(int length)
Definition: objects.h:7367
#define HEAP
Definition: isolate.h:1408
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
static void CopyBlock(Address dst, Address src, int byte_size)
Definition: heap-inl.h:383
Counters * counters()
Definition: isolate.h:804
SemiSpace * active_space()
Definition: spaces.h:2276
bool OldGenerationAllocationLimitReached()
Definition: heap-inl.h:316
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:38
double FastI2D(int x)
Definition: conversions.h:73
MUST_USE_RESULT MaybeObject * NumberFromInt32(int32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:229
const uint32_t kIsIndirectStringTag
Definition: objects.h:463
NewSpacePage * prev_page() const
Definition: spaces.h:1746
static void ScavengeObject(HeapObject **p, HeapObject *object)
Definition: heap-inl.h:414
double fast_cos(double x)
void set_length(int value)
MUST_USE_RESULT MaybeObject * PrepareForCompare(String *str)
Definition: heap-inl.h:444
static bool IsAtStart(Address addr)
Definition: spaces.h:1760
intptr_t OldGenerationSpaceAvailable()
Definition: heap.h:1354
MUST_USE_RESULT MaybeObject * AllocateRaw(int object_size, Executability executable)
Definition: spaces.cc:2613
NewSpace * new_space()
Definition: heap.h:499
static const int kMaxLength
Definition: objects.h:7321
MUST_USE_RESULT MaybeObject * AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
Definition: heap-inl.h:183
FlagType type() const
Definition: flags.cc:1358
AllocationSpace TargetSpaceId(InstanceType type)
Definition: heap-inl.h:358
static void AssertValidRange(Address from, Address to)
Definition: spaces.h:1923
const int kUC16Size
Definition: globals.h:276
static const int kResourceOffset
Definition: objects.h:7515
const int MB
Definition: globals.h:222