v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
objects-inl.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 //
28 // Review notes:
29 //
30 // - The use of macros in these inline functions may seem superfluous
31 // but it is absolutely needed to make sure gcc generates optimal
32 // code. gcc is not happy when attempting to inline too deep.
33 //
34 
35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_
37 
38 #include "elements.h"
39 #include "objects.h"
40 #include "contexts.h"
41 #include "conversions-inl.h"
42 #include "heap.h"
43 #include "isolate.h"
44 #include "heap-inl.h"
45 #include "property.h"
46 #include "spaces.h"
47 #include "store-buffer.h"
48 #include "v8memory.h"
49 #include "factory.h"
50 #include "incremental-marking.h"
51 #include "transitions-inl.h"
52 #include "objects-visiting.h"
53 
54 namespace v8 {
55 namespace internal {
56 
57 PropertyDetails::PropertyDetails(Smi* smi) {
58  value_ = smi->value();
59 }
60 
61 
62 Smi* PropertyDetails::AsSmi() const {
63  // Ensure the upper 2 bits have the same value by sign extending it. This is
64  // necessary to be able to use the 31st bit of the property details.
65  int value = value_ << 1;
66  return Smi::FromInt(value >> 1);
67 }
68 
69 
70 PropertyDetails PropertyDetails::AsDeleted() const {
71  Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
72  return PropertyDetails(smi);
73 }
74 
75 
76 #define TYPE_CHECKER(type, instancetype) \
77  bool Object::Is##type() { \
78  return Object::IsHeapObject() && \
79  HeapObject::cast(this)->map()->instance_type() == instancetype; \
80  }
81 
82 
83 #define CAST_ACCESSOR(type) \
84  type* type::cast(Object* object) { \
85  SLOW_ASSERT(object->Is##type()); \
86  return reinterpret_cast<type*>(object); \
87  }
88 
89 
90 #define FIXED_TYPED_ARRAY_CAST_ACCESSOR(type) \
91  template<> \
92  type* type::cast(Object* object) { \
93  SLOW_ASSERT(object->Is##type()); \
94  return reinterpret_cast<type*>(object); \
95  }
96 
97 #define INT_ACCESSORS(holder, name, offset) \
98  int holder::name() { return READ_INT_FIELD(this, offset); } \
99  void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
100 
101 
102 #define ACCESSORS(holder, name, type, offset) \
103  type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
104  void holder::set_##name(type* value, WriteBarrierMode mode) { \
105  WRITE_FIELD(this, offset, value); \
106  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
107  }
108 
109 
110 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
111 #define ACCESSORS_TO_SMI(holder, name, offset) \
112  Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \
113  void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
114  WRITE_FIELD(this, offset, value); \
115  }
116 
117 
118 // Getter that returns a Smi as an int and writes an int as a Smi.
119 #define SMI_ACCESSORS(holder, name, offset) \
120  int holder::name() { \
121  Object* value = READ_FIELD(this, offset); \
122  return Smi::cast(value)->value(); \
123  } \
124  void holder::set_##name(int value) { \
125  WRITE_FIELD(this, offset, Smi::FromInt(value)); \
126  }
127 
128 
129 #define BOOL_GETTER(holder, field, name, offset) \
130  bool holder::name() { \
131  return BooleanBit::get(field(), offset); \
132  } \
133 
134 
135 #define BOOL_ACCESSORS(holder, field, name, offset) \
136  bool holder::name() { \
137  return BooleanBit::get(field(), offset); \
138  } \
139  void holder::set_##name(bool value) { \
140  set_##field(BooleanBit::set(field(), offset, value)); \
141  }
142 
143 
145  return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
146  IsFixedTypedArrayBase() || IsExternalArray();
147 }
148 
149 
150 // External objects are not extensible, so the map check is enough.
152  return Object::IsHeapObject() &&
153  HeapObject::cast(this)->map() ==
154  HeapObject::cast(this)->GetHeap()->external_map();
155 }
156 
157 
159  return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
160 }
161 
162 
163 bool Object::IsSmi() {
164  return HAS_SMI_TAG(this);
165 }
166 
167 
168 bool Object::IsHeapObject() {
169  return Internals::HasHeapObjectTag(this);
170 }
171 
172 
174  ASSERT(!this->IsFailure());
175  return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
176 }
177 
178 
181 
182 
183 bool Object::IsString() {
184  return Object::IsHeapObject()
186 }
187 
188 
189 bool Object::IsName() {
190  return IsString() || IsSymbol();
191 }
192 
193 
194 bool Object::IsUniqueName() {
195  return IsInternalizedString() || IsSymbol();
196 }
197 
198 
199 bool Object::IsSpecObject() {
200  return Object::IsHeapObject()
202 }
203 
204 
205 bool Object::IsSpecFunction() {
206  if (!Object::IsHeapObject()) return false;
207  InstanceType type = HeapObject::cast(this)->map()->instance_type();
208  return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
209 }
210 
211 
212 bool Object::IsInternalizedString() {
213  if (!this->IsHeapObject()) return false;
214  uint32_t type = HeapObject::cast(this)->map()->instance_type();
216  return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
218 }
219 
220 
221 bool Object::IsConsString() {
222  if (!IsString()) return false;
223  return StringShape(String::cast(this)).IsCons();
224 }
225 
226 
227 bool Object::IsSlicedString() {
228  if (!IsString()) return false;
229  return StringShape(String::cast(this)).IsSliced();
230 }
231 
232 
233 bool Object::IsSeqString() {
234  if (!IsString()) return false;
235  return StringShape(String::cast(this)).IsSequential();
236 }
237 
238 
239 bool Object::IsSeqOneByteString() {
240  if (!IsString()) return false;
241  return StringShape(String::cast(this)).IsSequential() &&
243 }
244 
245 
246 bool Object::IsSeqTwoByteString() {
247  if (!IsString()) return false;
248  return StringShape(String::cast(this)).IsSequential() &&
250 }
251 
252 
253 bool Object::IsExternalString() {
254  if (!IsString()) return false;
255  return StringShape(String::cast(this)).IsExternal();
256 }
257 
258 
259 bool Object::IsExternalAsciiString() {
260  if (!IsString()) return false;
261  return StringShape(String::cast(this)).IsExternal() &&
263 }
264 
265 
266 bool Object::IsExternalTwoByteString() {
267  if (!IsString()) return false;
268  return StringShape(String::cast(this)).IsExternal() &&
270 }
271 
273  // Dictionary is covered under FixedArray.
274  return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
275  IsFixedTypedArrayBase();
276 }
277 
278 
280  Representation representation) {
281  if (representation.IsSmi() && IsUninitialized()) {
282  return Smi::FromInt(0);
283  }
284  if (!representation.IsDouble()) return this;
285  if (IsUninitialized()) {
286  return heap->AllocateHeapNumber(0);
287  }
288  return heap->AllocateHeapNumber(Number());
289 }
290 
291 
292 StringShape::StringShape(String* str)
293  : type_(str->map()->instance_type()) {
294  set_valid();
295  ASSERT((type_ & kIsNotStringMask) == kStringTag);
296 }
297 
298 
299 StringShape::StringShape(Map* map)
300  : type_(map->instance_type()) {
301  set_valid();
302  ASSERT((type_ & kIsNotStringMask) == kStringTag);
303 }
304 
305 
306 StringShape::StringShape(InstanceType t)
307  : type_(static_cast<uint32_t>(t)) {
308  set_valid();
309  ASSERT((type_ & kIsNotStringMask) == kStringTag);
310 }
311 
312 
313 bool StringShape::IsInternalized() {
314  ASSERT(valid());
316  return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
318 }
319 
320 
322  uint32_t type = map()->instance_type();
323  return (type & kStringEncodingMask) == kOneByteStringTag;
324 }
325 
326 
328  uint32_t type = map()->instance_type();
329  return (type & kStringEncodingMask) == kTwoByteStringTag;
330 }
331 
332 
334  uint32_t type = map()->instance_type();
337  ASSERT(IsFlat());
338  switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
339  case kOneByteStringTag:
340  return true;
341  case kTwoByteStringTag:
342  return false;
343  default: // Cons or sliced string. Need to go deeper.
345  }
346 }
347 
348 
350  uint32_t type = map()->instance_type();
353  ASSERT(IsFlat());
354  switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
355  case kOneByteStringTag:
356  return false;
357  case kTwoByteStringTag:
358  return true;
359  default: // Cons or sliced string. Need to go deeper.
361  }
362 }
363 
364 
366  uint32_t type = map()->instance_type();
367  return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
369 }
370 
371 
372 bool StringShape::IsCons() {
373  return (type_ & kStringRepresentationMask) == kConsStringTag;
374 }
375 
376 
377 bool StringShape::IsSliced() {
378  return (type_ & kStringRepresentationMask) == kSlicedStringTag;
379 }
380 
381 
382 bool StringShape::IsIndirect() {
383  return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
384 }
385 
386 
387 bool StringShape::IsExternal() {
388  return (type_ & kStringRepresentationMask) == kExternalStringTag;
389 }
390 
391 
392 bool StringShape::IsSequential() {
393  return (type_ & kStringRepresentationMask) == kSeqStringTag;
394 }
395 
396 
397 StringRepresentationTag StringShape::representation_tag() {
398  uint32_t tag = (type_ & kStringRepresentationMask);
399  return static_cast<StringRepresentationTag>(tag);
400 }
401 
402 
403 uint32_t StringShape::encoding_tag() {
404  return type_ & kStringEncodingMask;
405 }
406 
407 
408 uint32_t StringShape::full_representation_tag() {
409  return (type_ & (kStringRepresentationMask | kStringEncodingMask));
410 }
411 
412 
415 
416 STATIC_CHECK(static_cast<uint32_t>(kStringEncodingMask) ==
418 
419 
420 bool StringShape::IsSequentialAscii() {
421  return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
422 }
423 
424 
425 bool StringShape::IsSequentialTwoByte() {
426  return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
427 }
428 
429 
430 bool StringShape::IsExternalAscii() {
431  return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
432 }
433 
434 
437 
439 
440 
441 bool StringShape::IsExternalTwoByte() {
442  return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
443 }
444 
445 
448 
450 
452  ASSERT(0 <= index && index <= length_);
453  if (is_ascii_) {
454  return static_cast<const byte*>(start_)[index];
455  } else {
456  return static_cast<const uc16*>(start_)[index];
457  }
458 }
459 
460 
461 template <typename Char>
463  public:
464  explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
465  : string_(string), hash_field_(0), seed_(seed) { }
466 
467  virtual uint32_t Hash() {
468  hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
469  string_.length(),
470  seed_);
471 
472  uint32_t result = hash_field_ >> String::kHashShift;
473  ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
474  return result;
475  }
476 
477 
478  virtual uint32_t HashForObject(Object* other) {
479  return String::cast(other)->Hash();
480  }
481 
483  uint32_t hash_field_;
484  uint32_t seed_;
485 };
486 
487 
488 class OneByteStringKey : public SequentialStringKey<uint8_t> {
489  public:
491  : SequentialStringKey<uint8_t>(str, seed) { }
492 
493  virtual bool IsMatch(Object* string) {
494  return String::cast(string)->IsOneByteEqualTo(string_);
495  }
496 
497  virtual MaybeObject* AsObject(Heap* heap);
498 };
499 
500 
501 template<class Char>
502 class SubStringKey : public HashTableKey {
503  public:
504  SubStringKey(Handle<String> string, int from, int length)
505  : string_(string), from_(from), length_(length) {
506  if (string_->IsSlicedString()) {
507  string_ = Handle<String>(Unslice(*string_, &from_));
508  }
509  ASSERT(string_->IsSeqString() || string->IsExternalString());
510  }
511 
512  virtual uint32_t Hash() {
513  ASSERT(length_ >= 0);
514  ASSERT(from_ + length_ <= string_->length());
515  const Char* chars = GetChars() + from_;
517  chars, length_, string_->GetHeap()->HashSeed());
518  uint32_t result = hash_field_ >> String::kHashShift;
519  ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
520  return result;
521  }
522 
523  virtual uint32_t HashForObject(Object* other) {
524  return String::cast(other)->Hash();
525  }
526 
527  virtual bool IsMatch(Object* string);
528  virtual MaybeObject* AsObject(Heap* heap);
529 
530  private:
531  const Char* GetChars();
532  String* Unslice(String* string, int* offset) {
533  while (string->IsSlicedString()) {
534  SlicedString* sliced = SlicedString::cast(string);
535  *offset += sliced->offset();
536  string = sliced->parent();
537  }
538  return string;
539  }
540 
541  Handle<String> string_;
542  int from_;
543  int length_;
544  uint32_t hash_field_;
545 };
546 
547 
548 class TwoByteStringKey : public SequentialStringKey<uc16> {
549  public:
550  explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
551  : SequentialStringKey<uc16>(str, seed) { }
552 
553  virtual bool IsMatch(Object* string) {
554  return String::cast(string)->IsTwoByteEqualTo(string_);
555  }
556 
557  virtual MaybeObject* AsObject(Heap* heap);
558 };
559 
560 
561 // Utf8StringKey carries a vector of chars as key.
562 class Utf8StringKey : public HashTableKey {
563  public:
564  explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
565  : string_(string), hash_field_(0), seed_(seed) { }
566 
567  virtual bool IsMatch(Object* string) {
568  return String::cast(string)->IsUtf8EqualTo(string_);
569  }
570 
571  virtual uint32_t Hash() {
572  if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
574  uint32_t result = hash_field_ >> String::kHashShift;
575  ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
576  return result;
577  }
578 
579  virtual uint32_t HashForObject(Object* other) {
580  return String::cast(other)->Hash();
581  }
582 
583  virtual MaybeObject* AsObject(Heap* heap) {
584  if (hash_field_ == 0) Hash();
586  chars_,
587  hash_field_);
588  }
589 
591  uint32_t hash_field_;
592  int chars_; // Caches the number of characters when computing the hash code.
593  uint32_t seed_;
594 };
595 
596 
597 bool Object::IsNumber() {
598  return IsSmi() || IsHeapNumber();
599 }
600 
601 
603 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
604 
605 
606 bool Object::IsFiller() {
607  if (!Object::IsHeapObject()) return false;
608  InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
609  return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
610 }
611 
612 
613 bool Object::IsExternalArray() {
614  if (!Object::IsHeapObject())
615  return false;
616  InstanceType instance_type =
617  HeapObject::cast(this)->map()->instance_type();
618  return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
619  instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
620 }
621 
622 
623 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
624  TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \
625  TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
626 
628 #undef TYPED_ARRAY_TYPE_CHECKER
629 
630 
631 bool Object::IsFixedTypedArrayBase() {
632  if (!Object::IsHeapObject()) return false;
633 
634  InstanceType instance_type =
635  HeapObject::cast(this)->map()->instance_type();
636  return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
637  instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
638 }
639 
640 
641 bool MaybeObject::IsFailure() {
642  return HAS_FAILURE_TAG(this);
643 }
644 
645 
646 bool MaybeObject::IsRetryAfterGC() {
647  return HAS_FAILURE_TAG(this)
649 }
650 
651 
652 bool MaybeObject::IsException() {
653  return this == Failure::Exception();
654 }
655 
656 
657 bool MaybeObject::IsTheHole() {
658  return !IsFailure() && ToObjectUnchecked()->IsTheHole();
659 }
660 
661 
662 bool MaybeObject::IsUninitialized() {
663  return !IsFailure() && ToObjectUnchecked()->IsUninitialized();
664 }
665 
666 
667 Failure* Failure::cast(MaybeObject* obj) {
668  ASSERT(HAS_FAILURE_TAG(obj));
669  return reinterpret_cast<Failure*>(obj);
670 }
671 
672 
673 bool Object::IsJSReceiver() {
675  return IsHeapObject() &&
677 }
678 
679 
680 bool Object::IsJSObject() {
682  return IsHeapObject() &&
684 }
685 
686 
687 bool Object::IsJSProxy() {
688  if (!Object::IsHeapObject()) return false;
689  InstanceType type = HeapObject::cast(this)->map()->instance_type();
690  return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
691 }
692 
693 
694 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
697 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
698 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
699 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
701 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
702 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
703 TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
704 
705 
706 bool Object::IsJSWeakCollection() {
707  return IsJSWeakMap() || IsJSWeakSet();
708 }
709 
710 
711 bool Object::IsDescriptorArray() {
712  return IsFixedArray();
713 }
714 
715 
716 bool Object::IsTransitionArray() {
717  return IsFixedArray();
718 }
719 
720 
721 bool Object::IsDeoptimizationInputData() {
722  // Must be a fixed array.
723  if (!IsFixedArray()) return false;
724 
725  // There's no sure way to detect the difference between a fixed array and
726  // a deoptimization data array. Since this is used for asserts we can
727  // check that the length is zero or else the fixed size plus a multiple of
728  // the entry size.
729  int length = FixedArray::cast(this)->length();
730  if (length == 0) return true;
731 
733  return length >= 0 &&
735 }
736 
737 
738 bool Object::IsDeoptimizationOutputData() {
739  if (!IsFixedArray()) return false;
740  // There's actually no way to see the difference between a fixed array and
741  // a deoptimization data array. Since this is used for asserts we can check
742  // that the length is plausible though.
743  if (FixedArray::cast(this)->length() % 2 != 0) return false;
744  return true;
745 }
746 
747 
748 bool Object::IsDependentCode() {
749  if (!IsFixedArray()) return false;
750  // There's actually no way to see the difference between a fixed array and
751  // a dependent codes array.
752  return true;
753 }
754 
755 
756 bool Object::IsContext() {
757  if (!Object::IsHeapObject()) return false;
758  Map* map = HeapObject::cast(this)->map();
759  Heap* heap = map->GetHeap();
760  return (map == heap->function_context_map() ||
761  map == heap->catch_context_map() ||
762  map == heap->with_context_map() ||
763  map == heap->native_context_map() ||
764  map == heap->block_context_map() ||
765  map == heap->module_context_map() ||
766  map == heap->global_context_map());
767 }
768 
769 
770 bool Object::IsNativeContext() {
771  return Object::IsHeapObject() &&
772  HeapObject::cast(this)->map() ==
773  HeapObject::cast(this)->GetHeap()->native_context_map();
774 }
775 
776 
777 bool Object::IsScopeInfo() {
778  return Object::IsHeapObject() &&
779  HeapObject::cast(this)->map() ==
780  HeapObject::cast(this)->GetHeap()->scope_info_map();
781 }
782 
783 
784 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
785 
786 
787 template <> inline bool Is<JSFunction>(Object* obj) {
788  return obj->IsJSFunction();
789 }
790 
791 
792 TYPE_CHECKER(Code, CODE_TYPE)
793 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
794 TYPE_CHECKER(Cell, CELL_TYPE)
795 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
796 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
797 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
798 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
799 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
800 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
801 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
802 
803 
804 bool Object::IsStringWrapper() {
805  return IsJSValue() && JSValue::cast(this)->value()->IsString();
806 }
807 
808 
809 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
810 
811 
812 bool Object::IsBoolean() {
813  return IsOddball() &&
814  ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
815 }
816 
817 
818 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
819 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
820 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
821 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
822 
823 
824 bool Object::IsJSArrayBufferView() {
825  return IsJSDataView() || IsJSTypedArray();
826 }
827 
828 
829 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
830 
831 
832 template <> inline bool Is<JSArray>(Object* obj) {
833  return obj->IsJSArray();
834 }
835 
836 
837 bool Object::IsHashTable() {
838  return Object::IsHeapObject() &&
839  HeapObject::cast(this)->map() ==
840  HeapObject::cast(this)->GetHeap()->hash_table_map();
841 }
842 
843 
844 bool Object::IsDictionary() {
845  return IsHashTable() &&
846  this != HeapObject::cast(this)->GetHeap()->string_table();
847 }
848 
849 
850 bool Object::IsStringTable() {
851  return IsHashTable() &&
852  this == HeapObject::cast(this)->GetHeap()->raw_unchecked_string_table();
853 }
854 
855 
856 bool Object::IsJSFunctionResultCache() {
857  if (!IsFixedArray()) return false;
858  FixedArray* self = FixedArray::cast(this);
859  int length = self->length();
860  if (length < JSFunctionResultCache::kEntriesIndex) return false;
863  return false;
864  }
865 #ifdef VERIFY_HEAP
866  if (FLAG_verify_heap) {
867  reinterpret_cast<JSFunctionResultCache*>(this)->
868  JSFunctionResultCacheVerify();
869  }
870 #endif
871  return true;
872 }
873 
874 
875 bool Object::IsNormalizedMapCache() {
876  if (!IsFixedArray()) return false;
877  if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
878  return false;
879  }
880 #ifdef VERIFY_HEAP
881  if (FLAG_verify_heap) {
882  reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
883  }
884 #endif
885  return true;
886 }
887 
888 
889 bool Object::IsCompilationCacheTable() {
890  return IsHashTable();
891 }
892 
893 
894 bool Object::IsCodeCacheHashTable() {
895  return IsHashTable();
896 }
897 
898 
899 bool Object::IsPolymorphicCodeCacheHashTable() {
900  return IsHashTable();
901 }
902 
903 
904 bool Object::IsMapCache() {
905  return IsHashTable();
906 }
907 
908 
909 bool Object::IsObjectHashTable() {
910  return IsHashTable();
911 }
912 
913 
914 bool Object::IsPrimitive() {
915  return IsOddball() || IsNumber() || IsString();
916 }
917 
918 
919 bool Object::IsJSGlobalProxy() {
920  bool result = IsHeapObject() &&
921  (HeapObject::cast(this)->map()->instance_type() ==
923  ASSERT(!result ||
924  HeapObject::cast(this)->map()->is_access_check_needed());
925  return result;
926 }
927 
928 
929 bool Object::IsGlobalObject() {
930  if (!IsHeapObject()) return false;
931 
932  InstanceType type = HeapObject::cast(this)->map()->instance_type();
933  return type == JS_GLOBAL_OBJECT_TYPE ||
934  type == JS_BUILTINS_OBJECT_TYPE;
935 }
936 
937 
938 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
939 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
940 
941 
942 bool Object::IsUndetectableObject() {
943  return IsHeapObject()
944  && HeapObject::cast(this)->map()->is_undetectable();
945 }
946 
947 
948 bool Object::IsAccessCheckNeeded() {
949  if (!IsHeapObject()) return false;
950  if (IsJSGlobalProxy()) {
951  JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
952  GlobalObject* global =
953  proxy->GetIsolate()->context()->global_object();
954  return proxy->IsDetachedFrom(global);
955  }
956  return HeapObject::cast(this)->map()->is_access_check_needed();
957 }
958 
959 
961  if (!IsHeapObject()) return false;
962  switch (HeapObject::cast(this)->map()->instance_type()) {
963 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
965 #undef MAKE_STRUCT_CASE
966  default: return false;
967  }
968 }
969 
970 
971 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
972  bool Object::Is##Name() { \
973  return Object::IsHeapObject() \
974  && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
975  }
977 #undef MAKE_STRUCT_PREDICATE
978 
979 
980 bool Object::IsUndefined() {
981  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
982 }
983 
984 
985 bool Object::IsNull() {
986  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
987 }
988 
989 
990 bool Object::IsTheHole() {
991  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
992 }
993 
994 
995 bool Object::IsUninitialized() {
996  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
997 }
998 
999 
1000 bool Object::IsTrue() {
1001  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1002 }
1003 
1004 
1005 bool Object::IsFalse() {
1006  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1007 }
1008 
1009 
1011  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1012 }
1013 
1014 
1015 double Object::Number() {
1016  ASSERT(IsNumber());
1017  return IsSmi()
1018  ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1019  : reinterpret_cast<HeapNumber*>(this)->value();
1020 }
1021 
1022 
1024  return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1025 }
1026 
1027 
1029  if (object->IsSmi()) return object;
1030  if (object->IsHeapNumber()) {
1031  double value = Handle<HeapNumber>::cast(object)->value();
1032  int int_value = FastD2I(value);
1033  if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
1034  return handle(Smi::FromInt(int_value), isolate);
1035  }
1036  }
1037  return Handle<Object>();
1038 }
1039 
1040 
1041 // TODO(ishell): Use handlified version instead.
1042 MaybeObject* Object::ToSmi() {
1043  if (IsSmi()) return this;
1044  if (IsHeapNumber()) {
1045  double value = HeapNumber::cast(this)->value();
1046  int int_value = FastD2I(value);
1047  if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
1048  return Smi::FromInt(int_value);
1049  }
1050  }
1051  return Failure::Exception();
1052 }
1053 
1054 
1056  return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1057 }
1058 
1059 
1061  Handle<Object> object,
1062  uint32_t index) {
1063  // GetElement can trigger a getter which can cause allocation.
1064  // This was not always the case. This ASSERT is here to catch
1065  // leftover incorrect uses.
1066  ASSERT(AllowHeapAllocation::IsAllowed());
1067  return Object::GetElementWithReceiver(isolate, object, object, index);
1068 }
1069 
1070 
1072  Handle<Object> object,
1073  uint32_t index) {
1074  Handle<Object> result =
1075  Object::GetElementWithReceiver(isolate, object, object, index);
1076  CHECK_NOT_EMPTY_HANDLE(isolate, result);
1077  return result;
1078 }
1079 
1080 
1081 MaybeObject* Object::GetProperty(Name* key) {
1082  PropertyAttributes attributes;
1083  return GetPropertyWithReceiver(this, key, &attributes);
1084 }
1085 
1086 
1087 MaybeObject* Object::GetProperty(Name* key, PropertyAttributes* attributes) {
1088  return GetPropertyWithReceiver(this, key, attributes);
1089 }
1090 
1091 
1092 #define FIELD_ADDR(p, offset) \
1093  (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1094 
1095 #define READ_FIELD(p, offset) \
1096  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
1097 
1098 #define WRITE_FIELD(p, offset, value) \
1099  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1100 
1101 #define WRITE_BARRIER(heap, object, offset, value) \
1102  heap->incremental_marking()->RecordWrite( \
1103  object, HeapObject::RawField(object, offset), value); \
1104  if (heap->InNewSpace(value)) { \
1105  heap->RecordWrite(object->address(), offset); \
1106  }
1107 
1108 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1109  if (mode == UPDATE_WRITE_BARRIER) { \
1110  heap->incremental_marking()->RecordWrite( \
1111  object, HeapObject::RawField(object, offset), value); \
1112  if (heap->InNewSpace(value)) { \
1113  heap->RecordWrite(object->address(), offset); \
1114  } \
1115  }
1116 
1117 #ifndef V8_TARGET_ARCH_MIPS
1118  #define READ_DOUBLE_FIELD(p, offset) \
1119  (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
1120 #else // V8_TARGET_ARCH_MIPS
1121  // Prevent gcc from using load-double (mips ldc1) on (possibly)
1122  // non-64-bit aligned HeapNumber::value.
1123  static inline double read_double_field(void* p, int offset) {
1124  union conversion {
1125  double d;
1126  uint32_t u[2];
1127  } c;
1128  c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
1129  c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
1130  return c.d;
1131  }
1132  #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
1133 #endif // V8_TARGET_ARCH_MIPS
1134 
1135 #ifndef V8_TARGET_ARCH_MIPS
1136  #define WRITE_DOUBLE_FIELD(p, offset, value) \
1137  (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
1138 #else // V8_TARGET_ARCH_MIPS
1139  // Prevent gcc from using store-double (mips sdc1) on (possibly)
1140  // non-64-bit aligned HeapNumber::value.
1141  static inline void write_double_field(void* p, int offset,
1142  double value) {
1143  union conversion {
1144  double d;
1145  uint32_t u[2];
1146  } c;
1147  c.d = value;
1148  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
1149  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
1150  }
1151  #define WRITE_DOUBLE_FIELD(p, offset, value) \
1152  write_double_field(p, offset, value)
1153 #endif // V8_TARGET_ARCH_MIPS
1154 
1155 
1156 #define READ_INT_FIELD(p, offset) \
1157  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
1158 
1159 #define WRITE_INT_FIELD(p, offset, value) \
1160  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1161 
1162 #define READ_INTPTR_FIELD(p, offset) \
1163  (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
1164 
1165 #define WRITE_INTPTR_FIELD(p, offset, value) \
1166  (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1167 
1168 #define READ_UINT32_FIELD(p, offset) \
1169  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
1170 
1171 #define WRITE_UINT32_FIELD(p, offset, value) \
1172  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1173 
1174 #define READ_INT32_FIELD(p, offset) \
1175  (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)))
1176 
1177 #define WRITE_INT32_FIELD(p, offset, value) \
1178  (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1179 
1180 #define READ_INT64_FIELD(p, offset) \
1181  (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
1182 
1183 #define WRITE_INT64_FIELD(p, offset, value) \
1184  (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1185 
1186 #define READ_SHORT_FIELD(p, offset) \
1187  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
1188 
1189 #define WRITE_SHORT_FIELD(p, offset, value) \
1190  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1191 
1192 #define READ_BYTE_FIELD(p, offset) \
1193  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
1194 
1195 #define WRITE_BYTE_FIELD(p, offset, value) \
1196  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1197 
1198 
1200  return &READ_FIELD(obj, byte_offset);
1201 }
1202 
1203 
1204 int Smi::value() {
1205  return Internals::SmiValue(this);
1206 }
1207 
1208 
1209 Smi* Smi::FromInt(int value) {
1210  ASSERT(Smi::IsValid(value));
1211  return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1212 }
1213 
1214 
1215 Smi* Smi::FromIntptr(intptr_t value) {
1216  ASSERT(Smi::IsValid(value));
1217  int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1218  return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1219 }
1220 
1221 
1223  return static_cast<Type>(value() & kFailureTypeTagMask);
1224 }
1225 
1226 
1228  return type() == INTERNAL_ERROR;
1229 }
1230 
1231 
1234  return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
1235  & kSpaceTagMask);
1236 }
1237 
1238 
1240  return Construct(INTERNAL_ERROR);
1241 }
1242 
1243 
1245  return Construct(EXCEPTION);
1246 }
1247 
1248 
1249 intptr_t Failure::value() const {
1250  return static_cast<intptr_t>(
1251  reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
1252 }
1253 
1254 
1256  return RetryAfterGC(NEW_SPACE);
1257 }
1258 
1259 
1261  ASSERT((space & ~kSpaceTagMask) == 0);
1262  return Construct(RETRY_AFTER_GC, space);
1263 }
1264 
1265 
1266 Failure* Failure::Construct(Type type, intptr_t value) {
1267  uintptr_t info =
1268  (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1269  ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1270  // Fill the unused bits with a pattern that's easy to recognize in crash
1271  // dumps.
1272  static const int kFailureMagicPattern = 0x0BAD0000;
1273  return reinterpret_cast<Failure*>(
1274  (info << kFailureTagSize) | kFailureTag | kFailureMagicPattern);
1275 }
1276 
1277 
1278 bool Smi::IsValid(intptr_t value) {
1279  bool result = Internals::IsValidSmi(value);
1280  ASSERT_EQ(result, value >= kMinValue && value <= kMaxValue);
1281  return result;
1282 }
1283 
1284 
1285 MapWord MapWord::FromMap(Map* map) {
1286  return MapWord(reinterpret_cast<uintptr_t>(map));
1287 }
1288 
1289 
1290 Map* MapWord::ToMap() {
1291  return reinterpret_cast<Map*>(value_);
1292 }
1293 
1294 
1295 bool MapWord::IsForwardingAddress() {
1296  return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1297 }
1298 
1299 
1300 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1301  Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1302  return MapWord(reinterpret_cast<uintptr_t>(raw));
1303 }
1304 
1305 
1306 HeapObject* MapWord::ToForwardingAddress() {
1307  ASSERT(IsForwardingAddress());
1308  return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1309 }
1310 
1311 
1312 #ifdef VERIFY_HEAP
1313 void HeapObject::VerifyObjectField(int offset) {
1314  VerifyPointer(READ_FIELD(this, offset));
1315 }
1316 
1317 void HeapObject::VerifySmiField(int offset) {
1318  CHECK(READ_FIELD(this, offset)->IsSmi());
1319 }
1320 #endif
1321 
1322 
1324  Heap* heap =
1325  MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1326  SLOW_ASSERT(heap != NULL);
1327  return heap;
1328 }
1329 
1330 
1332  return GetHeap()->isolate();
1333 }
1334 
1335 
1337  return map_word().ToMap();
1338 }
1339 
1340 
1341 void HeapObject::set_map(Map* value) {
1342  set_map_word(MapWord::FromMap(value));
1343  if (value != NULL) {
1344  // TODO(1600) We are passing NULL as a slot because maps can never be on
1345  // evacuation candidate.
1346  value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1347  }
1348 }
1349 
1350 
1351 // Unsafe accessor omitting write barrier.
1353  set_map_word(MapWord::FromMap(value));
1354 }
1355 
1356 
1358  return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1359 }
1360 
1361 
1362 void HeapObject::set_map_word(MapWord map_word) {
1363  // WRITE_FIELD does not invoke write barrier, but there is no need
1364  // here.
1365  WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1366 }
1367 
1368 
1370  ASSERT_TAG_ALIGNED(address);
1371  return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1372 }
1373 
1374 
1376  return reinterpret_cast<Address>(this) - kHeapObjectTag;
1377 }
1378 
1379 
1381  return SizeFromMap(map());
1382 }
1383 
1384 
1385 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1386  v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1387  reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1388 }
1389 
1390 
1391 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1392  v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1393 }
1394 
1395 
1396 void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
1397  v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1398 }
1399 
1400 
1402  return READ_DOUBLE_FIELD(this, kValueOffset);
1403 }
1404 
1405 
1406 void HeapNumber::set_value(double value) {
1407  WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1408 }
1409 
1410 
1412  return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1414 }
1415 
1416 
1418  return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1419 }
1420 
1421 
1422 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1423 
1424 
1425 Object** FixedArray::GetFirstElementAddress() {
1426  return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1427 }
1428 
1429 
1431  Object* the_hole = GetHeap()->the_hole_value();
1432  Object** current = GetFirstElementAddress();
1433  for (int i = 0; i < length(); ++i) {
1434  Object* candidate = *current++;
1435  if (!candidate->IsSmi() && candidate != the_hole) return false;
1436  }
1437  return true;
1438 }
1439 
1440 
1441 FixedArrayBase* JSObject::elements() {
1442  Object* array = READ_FIELD(this, kElementsOffset);
1443  return static_cast<FixedArrayBase*>(array);
1444 }
1445 
1446 
1448 #ifdef ENABLE_SLOW_ASSERTS
1450  ElementsAccessor* accessor = GetElementsAccessor();
1451  accessor->Validate(this);
1452  }
1453 #endif
1454 }
1455 
1456 
1458  set_transition_info(Smi::FromInt(0));
1460  set_nested_site(Smi::FromInt(0));
1461  set_pretenure_data(Smi::FromInt(0));
1462  set_pretenure_create_count(Smi::FromInt(0));
1463  set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1465 }
1466 
1467 
1469  ASSERT(!IsZombie());
1470  Initialize();
1472 }
1473 
1474 
1475 // Heuristic: We only need to create allocation site info if the boilerplate
1476 // elements kind is the initial elements kind.
1478  ElementsKind boilerplate_elements_kind) {
1479  if (FLAG_pretenuring_call_new ||
1480  IsFastSmiElementsKind(boilerplate_elements_kind)) {
1481  return TRACK_ALLOCATION_SITE;
1482  }
1483 
1485 }
1486 
1487 
1489  ElementsKind to) {
1490  if (FLAG_pretenuring_call_new ||
1491  (IsFastSmiElementsKind(from) &&
1493  return TRACK_ALLOCATION_SITE;
1494  }
1495 
1497 }
1498 
1499 
1501  if (FLAG_allocation_site_pretenuring) {
1502  return type == JS_ARRAY_TYPE ||
1503  type == JS_OBJECT_TYPE ||
1504  type < FIRST_NONSTRING_TYPE;
1505  }
1506  return type == JS_ARRAY_TYPE;
1507 }
1508 
1509 
1510 inline DependentCode::DependencyGroup AllocationSite::ToDependencyGroup(
1511  Reason reason) {
1512  switch (reason) {
1513  case TENURING:
1515  break;
1516  case TRANSITIONS:
1518  break;
1519  }
1520  UNREACHABLE();
1522 }
1523 
1524 
1526  int value = pretenure_data()->value();
1527  // Verify that we can count more mementos than we can possibly find in one
1528  // new space collection.
1529  ASSERT((GetHeap()->MaxSemiSpaceSize() /
1533  set_pretenure_data(
1536 }
1537 
1539  if (IsZombie()) return false;
1540 
1541  int value = memento_found_count();
1542  set_memento_found_count(value + 1);
1543  return value == 0;
1544 }
1545 
1546 
1548  ASSERT(FLAG_allocation_site_pretenuring);
1549  int value = memento_create_count();
1550  set_memento_create_count(value + 1);
1551 }
1552 
1553 
1555  bool decision_changed = false;
1556  int create_count = memento_create_count();
1557  int found_count = memento_found_count();
1558  bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1559  double ratio =
1560  minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1561  static_cast<double>(found_count) / create_count : 0.0;
1562  PretenureFlag current_mode = GetPretenureMode();
1563 
1564  if (minimum_mementos_created) {
1565  PretenureDecision result = ratio >= kPretenureRatio
1566  ? kTenure
1567  : kDontTenure;
1568  set_pretenure_decision(result);
1569  if (current_mode != GetPretenureMode()) {
1570  decision_changed = true;
1572  }
1573  }
1574 
1575  if (FLAG_trace_pretenuring_statistics) {
1576  PrintF(
1577  "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1578  static_cast<void*>(this), create_count, found_count, ratio,
1579  current_mode == TENURED ? "tenured" : "not tenured",
1580  GetPretenureMode() == TENURED ? "tenured" : "not tenured");
1581  }
1582 
1583  // Clear feedback calculation fields until the next gc.
1586  return decision_changed;
1587 }
1588 
1589 
1591  object->ValidateElements();
1592  ElementsKind elements_kind = object->map()->elements_kind();
1593  if (!IsFastObjectElementsKind(elements_kind)) {
1594  if (IsFastHoleyElementsKind(elements_kind)) {
1596  } else {
1598  }
1599  }
1600 }
1601 
1602 
1604  Object** objects,
1605  uint32_t count,
1607  ElementsKind current_kind = object->map()->elements_kind();
1608  ElementsKind target_kind = current_kind;
1609  {
1610  DisallowHeapAllocation no_allocation;
1612  bool is_holey = IsFastHoleyElementsKind(current_kind);
1613  if (current_kind == FAST_HOLEY_ELEMENTS) return;
1614  Heap* heap = object->GetHeap();
1615  Object* the_hole = heap->the_hole_value();
1616  for (uint32_t i = 0; i < count; ++i) {
1617  Object* current = *objects++;
1618  if (current == the_hole) {
1619  is_holey = true;
1620  target_kind = GetHoleyElementsKind(target_kind);
1621  } else if (!current->IsSmi()) {
1622  if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1623  if (IsFastSmiElementsKind(target_kind)) {
1624  if (is_holey) {
1625  target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1626  } else {
1627  target_kind = FAST_DOUBLE_ELEMENTS;
1628  }
1629  }
1630  } else if (is_holey) {
1631  target_kind = FAST_HOLEY_ELEMENTS;
1632  break;
1633  } else {
1634  target_kind = FAST_ELEMENTS;
1635  }
1636  }
1637  }
1638  }
1639  if (target_kind != current_kind) {
1640  TransitionElementsKind(object, target_kind);
1641  }
1642 }
1643 
1644 
1646  Handle<FixedArrayBase> elements,
1647  uint32_t length,
1649  Heap* heap = object->GetHeap();
1650  if (elements->map() != heap->fixed_double_array_map()) {
1651  ASSERT(elements->map() == heap->fixed_array_map() ||
1652  elements->map() == heap->fixed_cow_array_map());
1653  if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1655  }
1656  Object** objects =
1657  Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1658  EnsureCanContainElements(object, objects, length, mode);
1659  return;
1660  }
1661 
1663  if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1665  } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1666  Handle<FixedDoubleArray> double_array =
1668  for (uint32_t i = 0; i < length; ++i) {
1669  if (double_array->is_the_hole(i)) {
1671  return;
1672  }
1673  }
1675  }
1676 }
1677 
1678 
1680  ElementsKind to_kind) {
1681  Map* current_map = map();
1682  ElementsKind from_kind = current_map->elements_kind();
1683  if (from_kind == to_kind) return current_map;
1684 
1685  Context* native_context = isolate->context()->native_context();
1686  Object* maybe_array_maps = native_context->js_array_maps();
1687  if (maybe_array_maps->IsFixedArray()) {
1688  FixedArray* array_maps = FixedArray::cast(maybe_array_maps);
1689  if (array_maps->get(from_kind) == current_map) {
1690  Object* maybe_transitioned_map = array_maps->get(to_kind);
1691  if (maybe_transitioned_map->IsMap()) {
1692  return Map::cast(maybe_transitioned_map);
1693  }
1694  }
1695  }
1696 
1697  return GetElementsTransitionMapSlow(to_kind);
1698 }
1699 
1700 
1702  FixedArrayBase* value,
1704  ASSERT(value->HasValidElements());
1705  if (new_map != NULL) {
1706  if (mode == UPDATE_WRITE_BARRIER) {
1707  set_map(new_map);
1708  } else {
1709  ASSERT(mode == SKIP_WRITE_BARRIER);
1710  set_map_no_write_barrier(new_map);
1711  }
1712  }
1713  ASSERT((map()->has_fast_smi_or_object_elements() ||
1714  (value == GetHeap()->empty_fixed_array())) ==
1715  (value->map() == GetHeap()->fixed_array_map() ||
1716  value->map() == GetHeap()->fixed_cow_array_map()));
1717  ASSERT((value == GetHeap()->empty_fixed_array()) ||
1718  (map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
1719  WRITE_FIELD(this, kElementsOffset, value);
1720  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1721 }
1722 
1723 
1724 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1725  set_map_and_elements(NULL, value, mode);
1726 }
1727 
1728 
1729 void JSObject::initialize_properties() {
1730  ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1731  WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1732 }
1733 
1734 
1736  if (map()->has_fast_smi_or_object_elements() ||
1737  map()->has_fast_double_elements()) {
1738  ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1739  WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1740  } else if (map()->has_external_array_elements()) {
1741  ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(map());
1742  ASSERT(!GetHeap()->InNewSpace(empty_array));
1743  WRITE_FIELD(this, kElementsOffset, empty_array);
1744  } else if (map()->has_fixed_typed_array_elements()) {
1745  FixedTypedArrayBase* empty_array =
1747  ASSERT(!GetHeap()->InNewSpace(empty_array));
1748  WRITE_FIELD(this, kElementsOffset, empty_array);
1749  } else {
1750  UNREACHABLE();
1751  }
1752 }
1753 
1754 
1755 MaybeObject* JSObject::ResetElements() {
1756  if (map()->is_observed()) {
1757  // Maintain invariant that observed elements are always in dictionary mode.
1758  SeededNumberDictionary* dictionary;
1759  MaybeObject* maybe = SeededNumberDictionary::Allocate(GetHeap(), 0);
1760  if (!maybe->To(&dictionary)) return maybe;
1761  if (map() == GetHeap()->sloppy_arguments_elements_map()) {
1762  FixedArray::cast(elements())->set(1, dictionary);
1763  } else {
1764  set_elements(dictionary);
1765  }
1766  return this;
1767  }
1768 
1769  ElementsKind elements_kind = GetInitialFastElementsKind();
1770  if (!FLAG_smi_only_arrays) {
1771  elements_kind = FastSmiToObjectElementsKind(elements_kind);
1772  }
1773  MaybeObject* maybe = GetElementsTransitionMap(GetIsolate(), elements_kind);
1774  Map* map;
1775  if (!maybe->To(&map)) return maybe;
1776  set_map(map);
1778 
1779  return this;
1780 }
1781 
1782 
1784  DisallowHeapAllocation no_gc;
1785  if (!map->HasTransitionArray()) return Handle<String>::null();
1786  TransitionArray* transitions = map->transitions();
1787  if (!transitions->IsSimpleTransition()) return Handle<String>::null();
1788  int transition = TransitionArray::kSimpleTransitionIndex;
1789  PropertyDetails details = transitions->GetTargetDetails(transition);
1790  Name* name = transitions->GetKey(transition);
1791  if (details.type() != FIELD) return Handle<String>::null();
1792  if (details.attributes() != NONE) return Handle<String>::null();
1793  if (!name->IsString()) return Handle<String>::null();
1794  return Handle<String>(String::cast(name));
1795 }
1796 
1797 
1799  ASSERT(!ExpectedTransitionKey(map).is_null());
1800  return Handle<Map>(map->transitions()->GetTarget(
1802 }
1803 
1804 
1806  DisallowHeapAllocation no_allocation;
1807  if (!map->HasTransitionArray()) return Handle<Map>::null();
1808  TransitionArray* transitions = map->transitions();
1809  int transition = transitions->Search(*key);
1810  if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
1811  PropertyDetails target_details = transitions->GetTargetDetails(transition);
1812  if (target_details.type() != FIELD) return Handle<Map>::null();
1813  if (target_details.attributes() != NONE) return Handle<Map>::null();
1814  return Handle<Map>(transitions->GetTarget(transition));
1815 }
1816 
1817 
1818 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1819 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1820 
1821 
1822 byte Oddball::kind() {
1823  return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1824 }
1825 
1826 
1828  WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1829 }
1830 
1831 
1832 Object* Cell::value() {
1833  return READ_FIELD(this, kValueOffset);
1834 }
1835 
1836 
1837 void Cell::set_value(Object* val, WriteBarrierMode ignored) {
1838  // The write barrier is not used for global property cells.
1839  ASSERT(!val->IsPropertyCell() && !val->IsCell());
1840  WRITE_FIELD(this, kValueOffset, val);
1841 }
1842 
1843 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1844 
1845 Object* PropertyCell::type_raw() {
1846  return READ_FIELD(this, kTypeOffset);
1847 }
1848 
1849 
1850 void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
1851  WRITE_FIELD(this, kTypeOffset, val);
1852 }
1853 
1854 
1856  InstanceType type = map()->instance_type();
1857  // Check for the most common kind of JavaScript object before
1858  // falling into the generic switch. This speeds up the internal
1859  // field operations considerably on average.
1860  if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1861  switch (type) {
1863  return JSGeneratorObject::kSize;
1864  case JS_MODULE_TYPE:
1865  return JSModule::kSize;
1866  case JS_GLOBAL_PROXY_TYPE:
1867  return JSGlobalProxy::kSize;
1868  case JS_GLOBAL_OBJECT_TYPE:
1869  return JSGlobalObject::kSize;
1871  return JSBuiltinsObject::kSize;
1872  case JS_FUNCTION_TYPE:
1873  return JSFunction::kSize;
1874  case JS_VALUE_TYPE:
1875  return JSValue::kSize;
1876  case JS_DATE_TYPE:
1877  return JSDate::kSize;
1878  case JS_ARRAY_TYPE:
1879  return JSArray::kSize;
1880  case JS_ARRAY_BUFFER_TYPE:
1881  return JSArrayBuffer::kSize;
1882  case JS_TYPED_ARRAY_TYPE:
1883  return JSTypedArray::kSize;
1884  case JS_DATA_VIEW_TYPE:
1885  return JSDataView::kSize;
1886  case JS_SET_TYPE:
1887  return JSSet::kSize;
1888  case JS_MAP_TYPE:
1889  return JSMap::kSize;
1890  case JS_WEAK_MAP_TYPE:
1891  return JSWeakMap::kSize;
1892  case JS_WEAK_SET_TYPE:
1893  return JSWeakSet::kSize;
1894  case JS_REGEXP_TYPE:
1895  return JSRegExp::kSize;
1897  return JSObject::kHeaderSize;
1899  return JSMessageObject::kSize;
1900  default:
1901  // TODO(jkummerow): Re-enable this. Blink currently hits this
1902  // from its CustomElementConstructorBuilder.
1903  // UNREACHABLE();
1904  return 0;
1905  }
1906 }
1907 
1908 
1911  // Make sure to adjust for the number of in-object properties. These
1912  // properties do contribute to the size, but are not internal fields.
1913  return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1914  map()->inobject_properties();
1915 }
1916 
1917 
1919  ASSERT(index < GetInternalFieldCount() && index >= 0);
1920  return GetHeaderSize() + (kPointerSize * index);
1921 }
1922 
1923 
1925  ASSERT(index < GetInternalFieldCount() && index >= 0);
1926  // Internal objects do follow immediately after the header, whereas in-object
1927  // properties are at the end of the object. Therefore there is no need
1928  // to adjust the index here.
1929  return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1930 }
1931 
1932 
1933 void JSObject::SetInternalField(int index, Object* value) {
1934  ASSERT(index < GetInternalFieldCount() && index >= 0);
1935  // Internal objects do follow immediately after the header, whereas in-object
1936  // properties are at the end of the object. Therefore there is no need
1937  // to adjust the index here.
1938  int offset = GetHeaderSize() + (kPointerSize * index);
1939  WRITE_FIELD(this, offset, value);
1940  WRITE_BARRIER(GetHeap(), this, offset, value);
1941 }
1942 
1943 
1944 void JSObject::SetInternalField(int index, Smi* value) {
1945  ASSERT(index < GetInternalFieldCount() && index >= 0);
1946  // Internal objects do follow immediately after the header, whereas in-object
1947  // properties are at the end of the object. Therefore there is no need
1948  // to adjust the index here.
1949  int offset = GetHeaderSize() + (kPointerSize * index);
1950  WRITE_FIELD(this, offset, value);
1951 }
1952 
1953 
1954 MaybeObject* JSObject::FastPropertyAt(Representation representation,
1955  int index) {
1956  Object* raw_value = RawFastPropertyAt(index);
1957  return raw_value->AllocateNewStorageFor(GetHeap(), representation);
1958 }
1959 
1960 
1961 // Access fast-case object properties at index. The use of these routines
1962 // is needed to correctly distinguish between properties stored in-object and
1963 // properties stored in the properties array.
1965  // Adjust for the number of properties stored in the object.
1966  index -= map()->inobject_properties();
1967  if (index < 0) {
1968  int offset = map()->instance_size() + (index * kPointerSize);
1969  return READ_FIELD(this, offset);
1970  } else {
1971  ASSERT(index < properties()->length());
1972  return properties()->get(index);
1973  }
1974 }
1975 
1976 
1977 void JSObject::FastPropertyAtPut(int index, Object* value) {
1978  // Adjust for the number of properties stored in the object.
1979  index -= map()->inobject_properties();
1980  if (index < 0) {
1981  int offset = map()->instance_size() + (index * kPointerSize);
1982  WRITE_FIELD(this, offset, value);
1983  WRITE_BARRIER(GetHeap(), this, offset, value);
1984  } else {
1985  ASSERT(index < properties()->length());
1986  properties()->set(index, value);
1987  }
1988 }
1989 
1990 
1992  return map()->GetInObjectPropertyOffset(index);
1993 }
1994 
1995 
1997  int offset = GetInObjectPropertyOffset(index);
1998  return READ_FIELD(this, offset);
1999 }
2000 
2001 
2003  Object* value,
2005  // Adjust for the number of properties stored in the object.
2006  int offset = GetInObjectPropertyOffset(index);
2007  WRITE_FIELD(this, offset, value);
2008  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2009  return value;
2010 }
2011 
2012 
2013 
2015  Object* pre_allocated_value,
2016  Object* filler_value) {
2017  ASSERT(!filler_value->IsHeapObject() ||
2018  !GetHeap()->InNewSpace(filler_value));
2019  ASSERT(!pre_allocated_value->IsHeapObject() ||
2020  !GetHeap()->InNewSpace(pre_allocated_value));
2021  int size = map->instance_size();
2022  int offset = kHeaderSize;
2023  if (filler_value != pre_allocated_value) {
2024  int pre_allocated = map->pre_allocated_property_fields();
2025  ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
2026  for (int i = 0; i < pre_allocated; i++) {
2027  WRITE_FIELD(this, offset, pre_allocated_value);
2028  offset += kPointerSize;
2029  }
2030  }
2031  while (offset < size) {
2032  WRITE_FIELD(this, offset, filler_value);
2033  offset += kPointerSize;
2034  }
2035 }
2036 
2037 
2039  ASSERT(properties()->IsDictionary() == map()->is_dictionary_map());
2040  return !properties()->IsDictionary();
2041 }
2042 
2043 
2045  // Allow extra fast properties if the object has more than
2046  // kFastPropertiesSoftLimit in-object properties. When this is the case, it is
2047  // very unlikely that the object is being used as a dictionary and there is a
2048  // good chance that allowing more map transitions will be worth it.
2049  Map* map = this->map();
2050  if (map->unused_property_fields() != 0) return false;
2051 
2052  int inobject = map->inobject_properties();
2053 
2054  int limit;
2055  if (store_mode == CERTAINLY_NOT_STORE_FROM_KEYED) {
2056  limit = Max(inobject, kMaxFastProperties);
2057  } else {
2058  limit = Max(inobject, kFastPropertiesSoftLimit);
2059  }
2060  return properties()->length() > limit;
2061 }
2062 
2063 
2064 void Struct::InitializeBody(int object_size) {
2065  Object* value = GetHeap()->undefined_value();
2066  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2067  WRITE_FIELD(this, offset, value);
2068  }
2069 }
2070 
2071 
2072 bool Object::ToArrayIndex(uint32_t* index) {
2073  if (IsSmi()) {
2074  int value = Smi::cast(this)->value();
2075  if (value < 0) return false;
2076  *index = value;
2077  return true;
2078  }
2079  if (IsHeapNumber()) {
2080  double value = HeapNumber::cast(this)->value();
2081  uint32_t uint_value = static_cast<uint32_t>(value);
2082  if (value == static_cast<double>(uint_value)) {
2083  *index = uint_value;
2084  return true;
2085  }
2086  }
2087  return false;
2088 }
2089 
2090 
2092  if (!this->IsJSValue()) return false;
2093 
2094  JSValue* js_value = JSValue::cast(this);
2095  if (!js_value->value()->IsString()) return false;
2096 
2097  String* str = String::cast(js_value->value());
2098  if (index >= static_cast<uint32_t>(str->length())) return false;
2099 
2100  return true;
2101 }
2102 
2103 
2105 #if ENABLE_EXTRA_CHECKS
2106  if (!(IsSmi() ||
2107  IsString() ||
2108  IsSymbol() ||
2109  IsSpecObject() ||
2110  IsHeapNumber() ||
2111  IsUndefined() ||
2112  IsTrue() ||
2113  IsFalse() ||
2114  IsNull())) {
2115  FATAL("API call returned invalid object");
2116  }
2117 #endif // ENABLE_EXTRA_CHECKS
2118 }
2119 
2120 
2122  ASSERT(object->IsFixedArrayBase());
2123  return reinterpret_cast<FixedArrayBase*>(object);
2124 }
2125 
2126 
2128  SLOW_ASSERT(index >= 0 && index < this->length());
2129  return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2130 }
2131 
2132 
2133 bool FixedArray::is_the_hole(int index) {
2134  return get(index) == GetHeap()->the_hole_value();
2135 }
2136 
2137 
2138 void FixedArray::set(int index, Smi* value) {
2139  ASSERT(map() != GetHeap()->fixed_cow_array_map());
2140  ASSERT(index >= 0 && index < this->length());
2141  ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
2142  int offset = kHeaderSize + index * kPointerSize;
2143  WRITE_FIELD(this, offset, value);
2144 }
2145 
2146 
2147 void FixedArray::set(int index, Object* value) {
2148  ASSERT(map() != GetHeap()->fixed_cow_array_map());
2149  ASSERT(index >= 0 && index < this->length());
2150  int offset = kHeaderSize + index * kPointerSize;
2151  WRITE_FIELD(this, offset, value);
2152  WRITE_BARRIER(GetHeap(), this, offset, value);
2153 }
2154 
2155 
2156 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
2157  return BitCast<uint64_t, double>(value) == kHoleNanInt64;
2158 }
2159 
2160 
2162  return BitCast<double, uint64_t>(kHoleNanInt64);
2163 }
2164 
2165 
2167  ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
2168  ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
2169  return OS::nan_value();
2170 }
2171 
2172 
2173 double FixedDoubleArray::get_scalar(int index) {
2174  ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2175  map() != GetHeap()->fixed_array_map());
2176  ASSERT(index >= 0 && index < this->length());
2177  double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2178  ASSERT(!is_the_hole_nan(result));
2179  return result;
2180 }
2181 
2183  ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2184  map() != GetHeap()->fixed_array_map());
2185  ASSERT(index >= 0 && index < this->length());
2186  return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
2187 }
2188 
2189 MaybeObject* FixedDoubleArray::get(int index) {
2190  if (is_the_hole(index)) {
2191  return GetHeap()->the_hole_value();
2192  } else {
2193  return GetHeap()->NumberFromDouble(get_scalar(index));
2194  }
2195 }
2196 
2197 
2199  if (is_the_hole(index)) {
2200  return GetIsolate()->factory()->the_hole_value();
2201  } else {
2202  return GetIsolate()->factory()->NewNumber(get_scalar(index));
2203  }
2204 }
2205 
2206 
2207 void FixedDoubleArray::set(int index, double value) {
2208  ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2209  map() != GetHeap()->fixed_array_map());
2210  int offset = kHeaderSize + index * kDoubleSize;
2211  if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double();
2212  WRITE_DOUBLE_FIELD(this, offset, value);
2213 }
2214 
2215 
2217  ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2218  map() != GetHeap()->fixed_array_map());
2219  int offset = kHeaderSize + index * kDoubleSize;
2220  WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
2221 }
2222 
2223 
2225  int offset = kHeaderSize + index * kDoubleSize;
2226  return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
2227 }
2228 
2229 
2231  ConstantPoolArray, first_code_ptr_index, kFirstCodePointerIndexOffset)
2233  ConstantPoolArray, first_heap_ptr_index, kFirstHeapPointerIndexOffset)
2235  ConstantPoolArray, first_int32_index, kFirstInt32IndexOffset)
2236 
2237 
2238 int ConstantPoolArray::first_int64_index() {
2239  return 0;
2240 }
2241 
2242 
2244  return first_code_ptr_index();
2245 }
2246 
2247 
2250 }
2251 
2252 
2255 }
2256 
2257 
2259  return length() - first_int32_index();
2260 }
2261 
2262 
2263 void ConstantPoolArray::SetEntryCounts(int number_of_int64_entries,
2264  int number_of_code_ptr_entries,
2265  int number_of_heap_ptr_entries,
2266  int number_of_int32_entries) {
2267  int current_index = number_of_int64_entries;
2268  set_first_code_ptr_index(current_index);
2269  current_index += number_of_code_ptr_entries;
2270  set_first_heap_ptr_index(current_index);
2271  current_index += number_of_heap_ptr_entries;
2272  set_first_int32_index(current_index);
2273  current_index += number_of_int32_entries;
2274  set_length(current_index);
2275 }
2276 
2277 
2279  ASSERT(map() == GetHeap()->constant_pool_array_map());
2280  ASSERT(index >= 0 && index < first_code_ptr_index());
2281  return READ_INT64_FIELD(this, OffsetOfElementAt(index));
2282 }
2283 
2286  ASSERT(map() == GetHeap()->constant_pool_array_map());
2287  ASSERT(index >= 0 && index < first_code_ptr_index());
2288  return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
2289 }
2290 
2291 
2293  ASSERT(map() == GetHeap()->constant_pool_array_map());
2294  ASSERT(index >= first_code_ptr_index() && index < first_heap_ptr_index());
2295  return reinterpret_cast<Address>(READ_FIELD(this, OffsetOfElementAt(index)));
2296 }
2297 
2298 
2300  ASSERT(map() == GetHeap()->constant_pool_array_map());
2301  ASSERT(index >= first_heap_ptr_index() && index < first_int32_index());
2302  return READ_FIELD(this, OffsetOfElementAt(index));
2303 }
2304 
2305 
2307  ASSERT(map() == GetHeap()->constant_pool_array_map());
2308  ASSERT(index >= first_int32_index() && index < length());
2309  return READ_INT32_FIELD(this, OffsetOfElementAt(index));
2310 }
2311 
2312 
2313 void ConstantPoolArray::set(int index, Address value) {
2314  ASSERT(map() == GetHeap()->constant_pool_array_map());
2315  ASSERT(index >= first_code_ptr_index() && index < first_heap_ptr_index());
2316  WRITE_FIELD(this, OffsetOfElementAt(index), reinterpret_cast<Object*>(value));
2317 }
2318 
2319 
2320 void ConstantPoolArray::set(int index, Object* value) {
2321  ASSERT(map() == GetHeap()->constant_pool_array_map());
2322  ASSERT(index >= first_code_ptr_index() && index < first_int32_index());
2323  WRITE_FIELD(this, OffsetOfElementAt(index), value);
2324  WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
2325 }
2326 
2327 
2328 void ConstantPoolArray::set(int index, int64_t value) {
2329  ASSERT(map() == GetHeap()->constant_pool_array_map());
2330  ASSERT(index >= first_int64_index() && index < first_code_ptr_index());
2331  WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
2332 }
2333 
2334 
2335 void ConstantPoolArray::set(int index, double value) {
2337  ASSERT(map() == GetHeap()->constant_pool_array_map());
2338  ASSERT(index >= first_int64_index() && index < first_code_ptr_index());
2339  WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
2340 }
2341 
2342 
2343 void ConstantPoolArray::set(int index, int32_t value) {
2344  ASSERT(map() == GetHeap()->constant_pool_array_map());
2345  ASSERT(index >= this->first_int32_index() && index < length());
2346  WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
2347 }
2348 
2349 
2351  const DisallowHeapAllocation& promise) {
2352  Heap* heap = GetHeap();
2353  if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2354  if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2355  return UPDATE_WRITE_BARRIER;
2356 }
2357 
2358 
2359 void FixedArray::set(int index,
2360  Object* value,
2362  ASSERT(map() != GetHeap()->fixed_cow_array_map());
2363  ASSERT(index >= 0 && index < this->length());
2364  int offset = kHeaderSize + index * kPointerSize;
2365  WRITE_FIELD(this, offset, value);
2366  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2367 }
2368 
2369 
2371  int index,
2372  Object* value) {
2373  ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
2374  ASSERT(index >= 0 && index < array->length());
2375  int offset = kHeaderSize + index * kPointerSize;
2376  WRITE_FIELD(array, offset, value);
2377  Heap* heap = array->GetHeap();
2378  if (heap->InNewSpace(value)) {
2379  heap->RecordWrite(array->address(), offset);
2380  }
2381 }
2382 
2383 
2385  int index,
2386  Object* value) {
2387  ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
2388  ASSERT(index >= 0 && index < array->length());
2389  ASSERT(!array->GetHeap()->InNewSpace(value));
2390  WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2391 }
2392 
2393 
2394 void FixedArray::set_undefined(int index) {
2395  ASSERT(map() != GetHeap()->fixed_cow_array_map());
2396  ASSERT(index >= 0 && index < this->length());
2397  ASSERT(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2398  WRITE_FIELD(this,
2399  kHeaderSize + index * kPointerSize,
2400  GetHeap()->undefined_value());
2401 }
2402 
2403 
2404 void FixedArray::set_null(int index) {
2405  ASSERT(index >= 0 && index < this->length());
2406  ASSERT(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2407  WRITE_FIELD(this,
2408  kHeaderSize + index * kPointerSize,
2409  GetHeap()->null_value());
2410 }
2411 
2412 
2413 void FixedArray::set_the_hole(int index) {
2414  ASSERT(map() != GetHeap()->fixed_cow_array_map());
2415  ASSERT(index >= 0 && index < this->length());
2416  ASSERT(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2417  WRITE_FIELD(this,
2418  kHeaderSize + index * kPointerSize,
2419  GetHeap()->the_hole_value());
2420 }
2421 
2422 
2424  return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2425 }
2426 
2427 
2429  return HeapObject::RawField(this, kHeaderSize);
2430 }
2431 
2432 
2434  ASSERT(length() >= kFirstIndex ||
2435  this == GetHeap()->empty_descriptor_array());
2436  return length() < kFirstIndex;
2437 }
2438 
2439 
2440 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2441  WRITE_FIELD(
2442  this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2443 }
2444 
2445 
2446 // Perform a binary search in a fixed array. Low and high are entry indices. If
2447 // there are three entries in this array it should be called with low=0 and
2448 // high=2.
2449 template<SearchMode search_mode, typename T>
2450 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries) {
2451  uint32_t hash = name->Hash();
2452  int limit = high;
2453 
2454  ASSERT(low <= high);
2455 
2456  while (low != high) {
2457  int mid = (low + high) / 2;
2458  Name* mid_name = array->GetSortedKey(mid);
2459  uint32_t mid_hash = mid_name->Hash();
2460 
2461  if (mid_hash >= hash) {
2462  high = mid;
2463  } else {
2464  low = mid + 1;
2465  }
2466  }
2467 
2468  for (; low <= limit; ++low) {
2469  int sort_index = array->GetSortedKeyIndex(low);
2470  Name* entry = array->GetKey(sort_index);
2471  if (entry->Hash() != hash) break;
2472  if (entry->Equals(name)) {
2473  if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2474  return sort_index;
2475  }
2476  return T::kNotFound;
2477  }
2478  }
2479 
2480  return T::kNotFound;
2481 }
2482 
2483 
2484 // Perform a linear search in this fixed array. len is the number of entry
2485 // indices that are valid.
2486 template<SearchMode search_mode, typename T>
2487 int LinearSearch(T* array, Name* name, int len, int valid_entries) {
2488  uint32_t hash = name->Hash();
2489  if (search_mode == ALL_ENTRIES) {
2490  for (int number = 0; number < len; number++) {
2491  int sorted_index = array->GetSortedKeyIndex(number);
2492  Name* entry = array->GetKey(sorted_index);
2493  uint32_t current_hash = entry->Hash();
2494  if (current_hash > hash) break;
2495  if (current_hash == hash && entry->Equals(name)) return sorted_index;
2496  }
2497  } else {
2498  ASSERT(len >= valid_entries);
2499  for (int number = 0; number < valid_entries; number++) {
2500  Name* entry = array->GetKey(number);
2501  uint32_t current_hash = entry->Hash();
2502  if (current_hash == hash && entry->Equals(name)) return number;
2503  }
2504  }
2505  return T::kNotFound;
2506 }
2507 
2508 
2509 template<SearchMode search_mode, typename T>
2510 int Search(T* array, Name* name, int valid_entries) {
2511  if (search_mode == VALID_ENTRIES) {
2512  SLOW_ASSERT(array->IsSortedNoDuplicates(valid_entries));
2513  } else {
2514  SLOW_ASSERT(array->IsSortedNoDuplicates());
2515  }
2516 
2517  int nof = array->number_of_entries();
2518  if (nof == 0) return T::kNotFound;
2519 
2520  // Fast case: do linear search for small arrays.
2521  const int kMaxElementsForLinearSearch = 8;
2522  if ((search_mode == ALL_ENTRIES &&
2523  nof <= kMaxElementsForLinearSearch) ||
2524  (search_mode == VALID_ENTRIES &&
2525  valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2526  return LinearSearch<search_mode>(array, name, nof, valid_entries);
2527  }
2528 
2529  // Slow case: perform binary search.
2530  return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
2531 }
2532 
2533 
2534 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2535  return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors);
2536 }
2537 
2538 
2539 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2540  int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2541  if (number_of_own_descriptors == 0) return kNotFound;
2542 
2543  DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2544  int number = cache->Lookup(map, name);
2545 
2546  if (number == DescriptorLookupCache::kAbsent) {
2547  number = Search(name, number_of_own_descriptors);
2548  cache->Update(map, name, number);
2549  }
2550 
2551  return number;
2552 }
2553 
2554 
2556  return instance_descriptors()->GetDetails(LastAdded());
2557 }
2558 
2559 
2561  Name* name,
2562  LookupResult* result) {
2563  DescriptorArray* descriptors = this->instance_descriptors();
2564  int number = descriptors->SearchWithCache(name, this);
2565  if (number == DescriptorArray::kNotFound) return result->NotFound();
2566  result->DescriptorResult(holder, descriptors->GetDetails(number), number);
2567 }
2568 
2569 
2571  Name* name,
2572  LookupResult* result) {
2573  if (HasTransitionArray()) {
2574  TransitionArray* transition_array = transitions();
2575  int number = transition_array->Search(name);
2576  if (number != TransitionArray::kNotFound) {
2577  return result->TransitionResult(
2578  holder, transition_array->GetTarget(number));
2579  }
2580  }
2581  result->NotFound();
2582 }
2583 
2584 
2585 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2586  ASSERT(descriptor_number < number_of_descriptors());
2587  return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2588 }
2589 
2590 
2592  return GetKeySlot(descriptor_number);
2593 }
2594 
2595 
2597  return GetValueSlot(descriptor_number - 1) + 1;
2598 }
2599 
2600 
2601 Name* DescriptorArray::GetKey(int descriptor_number) {
2602  ASSERT(descriptor_number < number_of_descriptors());
2603  return Name::cast(get(ToKeyIndex(descriptor_number)));
2604 }
2605 
2606 
2607 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2608  return GetDetails(descriptor_number).pointer();
2609 }
2610 
2611 
2612 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2613  return GetKey(GetSortedKeyIndex(descriptor_number));
2614 }
2615 
2616 
2617 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2618  PropertyDetails details = GetDetails(descriptor_index);
2619  set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2620 }
2621 
2622 
2623 void DescriptorArray::SetRepresentation(int descriptor_index,
2624  Representation representation) {
2625  ASSERT(!representation.IsNone());
2626  PropertyDetails details = GetDetails(descriptor_index);
2627  set(ToDetailsIndex(descriptor_index),
2628  details.CopyWithRepresentation(representation).AsSmi());
2629 }
2630 
2631 
2633  int length = number_of_descriptors();
2634  for (int i = 0; i < length; i++) {
2635  SetRepresentation(i, representation);
2636  }
2637 }
2638 
2639 
2640 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
2641  ASSERT(descriptor_number < number_of_descriptors());
2642  return RawFieldOfElementAt(ToValueIndex(descriptor_number));
2643 }
2644 
2645 
2646 Object* DescriptorArray::GetValue(int descriptor_number) {
2647  ASSERT(descriptor_number < number_of_descriptors());
2648  return get(ToValueIndex(descriptor_number));
2649 }
2650 
2651 
2652 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
2653  ASSERT(descriptor_number < number_of_descriptors());
2654  Object* details = get(ToDetailsIndex(descriptor_number));
2655  return PropertyDetails(Smi::cast(details));
2656 }
2657 
2658 
2659 PropertyType DescriptorArray::GetType(int descriptor_number) {
2660  return GetDetails(descriptor_number).type();
2661 }
2662 
2663 
2664 int DescriptorArray::GetFieldIndex(int descriptor_number) {
2665  ASSERT(GetDetails(descriptor_number).type() == FIELD);
2666  return GetDetails(descriptor_number).field_index();
2667 }
2668 
2669 
2670 Object* DescriptorArray::GetConstant(int descriptor_number) {
2671  return GetValue(descriptor_number);
2672 }
2673 
2674 
2676  ASSERT(GetType(descriptor_number) == CALLBACKS);
2677  return GetValue(descriptor_number);
2678 }
2679 
2680 
2682  ASSERT(GetType(descriptor_number) == CALLBACKS);
2683  Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
2684  return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
2685 }
2686 
2687 
2688 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2689  desc->Init(GetKey(descriptor_number),
2690  GetValue(descriptor_number),
2691  GetDetails(descriptor_number));
2692 }
2693 
2694 
2695 void DescriptorArray::Set(int descriptor_number,
2696  Descriptor* desc,
2697  const WhitenessWitness&) {
2698  // Range check.
2699  ASSERT(descriptor_number < number_of_descriptors());
2700 
2702  ToKeyIndex(descriptor_number),
2703  desc->GetKey());
2705  ToValueIndex(descriptor_number),
2706  desc->GetValue());
2708  ToDetailsIndex(descriptor_number),
2709  desc->GetDetails().AsSmi());
2710 }
2711 
2712 
2713 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
2714  // Range check.
2715  ASSERT(descriptor_number < number_of_descriptors());
2716 
2717  set(ToKeyIndex(descriptor_number), desc->GetKey());
2718  set(ToValueIndex(descriptor_number), desc->GetValue());
2719  set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
2720 }
2721 
2722 
2723 void DescriptorArray::Append(Descriptor* desc,
2724  const WhitenessWitness& witness) {
2725  int descriptor_number = number_of_descriptors();
2726  SetNumberOfDescriptors(descriptor_number + 1);
2727  Set(descriptor_number, desc, witness);
2728 
2729  uint32_t hash = desc->GetKey()->Hash();
2730 
2731  int insertion;
2732 
2733  for (insertion = descriptor_number; insertion > 0; --insertion) {
2734  Name* key = GetSortedKey(insertion - 1);
2735  if (key->Hash() <= hash) break;
2736  SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2737  }
2738 
2739  SetSortedKey(insertion, descriptor_number);
2740 }
2741 
2742 
2743 void DescriptorArray::Append(Descriptor* desc) {
2744  int descriptor_number = number_of_descriptors();
2745  SetNumberOfDescriptors(descriptor_number + 1);
2746  Set(descriptor_number, desc);
2747 
2748  uint32_t hash = desc->GetKey()->Hash();
2749 
2750  int insertion;
2751 
2752  for (insertion = descriptor_number; insertion > 0; --insertion) {
2753  Name* key = GetSortedKey(insertion - 1);
2754  if (key->Hash() <= hash) break;
2755  SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2756  }
2757 
2758  SetSortedKey(insertion, descriptor_number);
2759 }
2760 
2761 
2762 void DescriptorArray::SwapSortedKeys(int first, int second) {
2763  int first_key = GetSortedKeyIndex(first);
2764  SetSortedKey(first, GetSortedKeyIndex(second));
2765  SetSortedKey(second, first_key);
2766 }
2767 
2768 
2770  : marking_(array->GetHeap()->incremental_marking()) {
2771  marking_->EnterNoMarkingScope();
2772  ASSERT(!marking_->IsMarking() ||
2773  Marking::Color(array) == Marking::WHITE_OBJECT);
2774 }
2775 
2776 
2778  marking_->LeaveNoMarkingScope();
2779 }
2780 
2781 
2782 template<typename Shape, typename Key>
2783 int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
2784  const int kMinCapacity = 32;
2785  int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
2786  if (capacity < kMinCapacity) {
2787  capacity = kMinCapacity; // Guarantee min capacity.
2788  }
2789  return capacity;
2790 }
2791 
2792 
2793 template<typename Shape, typename Key>
2795  return FindEntry(GetIsolate(), key);
2796 }
2797 
2798 
2799 // Find entry for key otherwise return kNotFound.
2800 template<typename Shape, typename Key>
2802  uint32_t capacity = Capacity();
2803  uint32_t entry = FirstProbe(HashTable<Shape, Key>::Hash(key), capacity);
2804  uint32_t count = 1;
2805  // EnsureCapacity will guarantee the hash table is never full.
2806  while (true) {
2807  Object* element = KeyAt(entry);
2808  // Empty entry. Uses raw unchecked accessors because it is called by the
2809  // string table during bootstrapping.
2810  if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
2811  if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
2812  Shape::IsMatch(key, element)) return entry;
2813  entry = NextProbe(entry, count++, capacity);
2814  }
2815  return kNotFound;
2816 }
2817 
2818 
2820  Object* max_index_object = get(kMaxNumberKeyIndex);
2821  if (!max_index_object->IsSmi()) return false;
2822  return 0 !=
2823  (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2824 }
2825 
2827  ASSERT(!requires_slow_elements());
2828  Object* max_index_object = get(kMaxNumberKeyIndex);
2829  if (!max_index_object->IsSmi()) return 0;
2830  uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
2831  return value >> kRequiresSlowElementsTagSize;
2832 }
2833 
2835  set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2836 }
2837 
2838 
2839 // ------------------------------------
2840 // Cast operations
2841 
2842 
2868 CAST_ACCESSOR(Symbol)
2875 CAST_ACCESSOR(Oddball)
2876 CAST_ACCESSOR(Cell)
2877 CAST_ACCESSOR(PropertyCell)
2878 CAST_ACCESSOR(SharedFunctionInfo)
2884 CAST_ACCESSOR(JSBuiltinsObject)
2913 
2914 template <class Traits>
2915 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
2916  SLOW_ASSERT(object->IsHeapObject() &&
2917  HeapObject::cast(object)->map()->instance_type() ==
2918  Traits::kInstanceType);
2919  return reinterpret_cast<FixedTypedArray<Traits>*>(object);
2920 }
2921 
2922 
2923 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2925 #undef MAKE_STRUCT_CAST
2926 
2927 
2928 template <typename Shape, typename Key>
2930  ASSERT(obj->IsHashTable());
2931  return reinterpret_cast<HashTable*>(obj);
2932 }
2933 
2934 
2936 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2937 
2939 
2940 
2941 uint32_t Name::hash_field() {
2942  return READ_UINT32_FIELD(this, kHashFieldOffset);
2943 }
2944 
2945 
2946 void Name::set_hash_field(uint32_t value) {
2947  WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
2948 #if V8_HOST_ARCH_64_BIT
2949  WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
2950 #endif
2951 }
2952 
2953 
2954 bool Name::Equals(Name* other) {
2955  if (other == this) return true;
2956  if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
2957  this->IsSymbol() || other->IsSymbol()) {
2958  return false;
2959  }
2960  return String::cast(this)->SlowEquals(String::cast(other));
2961 }
2962 
2963 
2964 ACCESSORS(Symbol, name, Object, kNameOffset)
2965 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
2966 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
2967 
2968 
2969 bool String::Equals(String* other) {
2970  if (other == this) return true;
2971  if (this->IsInternalizedString() && other->IsInternalizedString()) {
2972  return false;
2973  }
2974  return SlowEquals(other);
2975 }
2976 
2977 
2978 MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
2979  if (!StringShape(this).IsCons()) return this;
2980  ConsString* cons = ConsString::cast(this);
2981  if (cons->IsFlat()) return cons->first();
2982  return SlowTryFlatten(pretenure);
2983 }
2984 
2985 
2987  MaybeObject* flat = TryFlatten(pretenure);
2988  Object* successfully_flattened;
2989  if (!flat->ToObject(&successfully_flattened)) return this;
2990  return String::cast(successfully_flattened);
2991 }
2992 
2993 
2994 uint16_t String::Get(int index) {
2995  ASSERT(index >= 0 && index < length());
2996  switch (StringShape(this).full_representation_tag()) {
2998  return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3000  return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3003  return ConsString::cast(this)->ConsStringGet(index);
3010  return SlicedString::cast(this)->SlicedStringGet(index);
3011  default:
3012  break;
3013  }
3014 
3015  UNREACHABLE();
3016  return 0;
3017 }
3018 
3019 
3020 void String::Set(int index, uint16_t value) {
3021  ASSERT(index >= 0 && index < length());
3022  ASSERT(StringShape(this).IsSequential());
3023 
3024  return this->IsOneByteRepresentation()
3025  ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3026  : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3027 }
3028 
3029 
3031  if (!StringShape(this).IsCons()) return true;
3032  return ConsString::cast(this)->second()->length() == 0;
3033 }
3034 
3035 
3037  // Giving direct access to underlying string only makes sense if the
3038  // wrapping string is already flattened.
3039  ASSERT(this->IsFlat());
3040  ASSERT(StringShape(this).IsIndirect());
3042  const int kUnderlyingOffset = SlicedString::kParentOffset;
3043  return String::cast(READ_FIELD(this, kUnderlyingOffset));
3044 }
3045 
3046 
3047 template<class Visitor, class ConsOp>
3049  String* string,
3050  unsigned offset,
3051  Visitor& visitor,
3052  ConsOp& cons_op,
3053  int32_t type,
3054  unsigned length) {
3055  ASSERT(length == static_cast<unsigned>(string->length()));
3056  ASSERT(offset <= length);
3057  unsigned slice_offset = offset;
3058  while (true) {
3059  ASSERT(type == string->map()->instance_type());
3060 
3061  switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3063  visitor.VisitOneByteString(
3064  SeqOneByteString::cast(string)->GetChars() + slice_offset,
3065  length - offset);
3066  return;
3067 
3069  visitor.VisitTwoByteString(
3070  SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3071  length - offset);
3072  return;
3073 
3075  visitor.VisitOneByteString(
3076  ExternalAsciiString::cast(string)->GetChars() + slice_offset,
3077  length - offset);
3078  return;
3079 
3081  visitor.VisitTwoByteString(
3082  ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3083  length - offset);
3084  return;
3085 
3088  SlicedString* slicedString = SlicedString::cast(string);
3089  slice_offset += slicedString->offset();
3090  string = slicedString->parent();
3091  type = string->map()->instance_type();
3092  continue;
3093  }
3094 
3097  string = cons_op.Operate(string, &offset, &type, &length);
3098  if (string == NULL) return;
3099  slice_offset = offset;
3100  ASSERT(length == static_cast<unsigned>(string->length()));
3101  continue;
3102 
3103  default:
3104  UNREACHABLE();
3105  return;
3106  }
3107  }
3108 }
3109 
3110 
3111 // TODO(dcarney): Remove this class after conversion to VisitFlat.
3113  public:
3114  inline ConsStringCaptureOp() : cons_string_(NULL) {}
3115  inline String* Operate(String* string, unsigned*, int32_t*, unsigned*) {
3116  cons_string_ = ConsString::cast(string);
3117  return NULL;
3118  }
3120 };
3121 
3122 
3123 template<class Visitor>
3124 ConsString* String::VisitFlat(Visitor* visitor,
3125  String* string,
3126  int offset,
3127  int length,
3128  int32_t type) {
3129  ASSERT(length >= 0 && length == string->length());
3130  ASSERT(offset >= 0 && offset <= length);
3132  Visit(string, offset, *visitor, op, type, static_cast<unsigned>(length));
3133  return op.cons_string_;
3134 }
3135 
3136 
3138  ASSERT(index >= 0 && index < length());
3139  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3140 }
3141 
3142 
3144  ASSERT(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3145  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3146  static_cast<byte>(value));
3147 }
3148 
3149 
3151  return FIELD_ADDR(this, kHeaderSize);
3152 }
3153 
3154 
3156  return reinterpret_cast<uint8_t*>(GetCharsAddress());
3157 }
3158 
3159 
3161  return FIELD_ADDR(this, kHeaderSize);
3162 }
3163 
3164 
3166  return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3167 }
3168 
3169 
3171  ASSERT(index >= 0 && index < length());
3172  return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
3173 }
3174 
3175 
3177  ASSERT(index >= 0 && index < length());
3178  WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
3179 }
3180 
3181 
3183  return SizeFor(length());
3184 }
3185 
3186 
3188  return SizeFor(length());
3189 }
3190 
3191 
3193  return String::cast(READ_FIELD(this, kParentOffset));
3194 }
3195 
3196 
3198  ASSERT(parent->IsSeqString() || parent->IsExternalString());
3199  WRITE_FIELD(this, kParentOffset, parent);
3200  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3201 }
3202 
3203 
3204 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3205 
3206 
3207 String* ConsString::first() {
3208  return String::cast(READ_FIELD(this, kFirstOffset));
3209 }
3210 
3211 
3213  return READ_FIELD(this, kFirstOffset);
3214 }
3215 
3216 
3218  WRITE_FIELD(this, kFirstOffset, value);
3219  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3220 }
3221 
3222 
3224  return String::cast(READ_FIELD(this, kSecondOffset));
3225 }
3226 
3227 
3229  return READ_FIELD(this, kSecondOffset);
3230 }
3231 
3232 
3234  WRITE_FIELD(this, kSecondOffset, value);
3235  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3236 }
3237 
3238 
3240  InstanceType type = map()->instance_type();
3242 }
3243 
3244 
3246  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3247 }
3248 
3249 
3251  if (is_short()) return;
3252  const char** data_field =
3253  reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3254  *data_field = resource()->data();
3255 }
3256 
3257 
3259  const ExternalAsciiString::Resource* resource) {
3260  ASSERT(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3261  *reinterpret_cast<const Resource**>(
3262  FIELD_ADDR(this, kResourceOffset)) = resource;
3263  if (resource != NULL) update_data_cache();
3264 }
3265 
3266 
3268  return reinterpret_cast<const uint8_t*>(resource()->data());
3269 }
3270 
3271 
3273  ASSERT(index >= 0 && index < length());
3274  return GetChars()[index];
3275 }
3276 
3277 
3279  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3280 }
3281 
3282 
3284  if (is_short()) return;
3285  const uint16_t** data_field =
3286  reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3287  *data_field = resource()->data();
3288 }
3289 
3290 
3292  const ExternalTwoByteString::Resource* resource) {
3293  *reinterpret_cast<const Resource**>(
3294  FIELD_ADDR(this, kResourceOffset)) = resource;
3295  if (resource != NULL) update_data_cache();
3296 }
3297 
3298 
3300  return resource()->data();
3301 }
3302 
3303 
3305  ASSERT(index >= 0 && index < length());
3306  return GetChars()[index];
3307 }
3308 
3309 
3311  unsigned start) {
3312  return GetChars() + start;
3313 }
3314 
3315 
3316 String* ConsStringNullOp::Operate(String*, unsigned*, int32_t*, unsigned*) {
3317  return NULL;
3318 }
3319 
3320 
3321 unsigned ConsStringIteratorOp::OffsetForDepth(unsigned depth) {
3322  return depth & kDepthMask;
3323 }
3324 
3325 
3326 void ConsStringIteratorOp::PushLeft(ConsString* string) {
3327  frames_[depth_++ & kDepthMask] = string;
3328 }
3329 
3330 
3331 void ConsStringIteratorOp::PushRight(ConsString* string) {
3332  // Inplace update.
3333  frames_[(depth_-1) & kDepthMask] = string;
3334 }
3335 
3336 
3337 void ConsStringIteratorOp::AdjustMaximumDepth() {
3338  if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3339 }
3340 
3341 
3342 void ConsStringIteratorOp::Pop() {
3343  ASSERT(depth_ > 0);
3344  ASSERT(depth_ <= maximum_depth_);
3345  depth_--;
3346 }
3347 
3348 
3350  return depth_ != 0;
3351 }
3352 
3353 
3355  depth_ = 0;
3356 }
3357 
3358 
3360  unsigned* length_out) {
3361  bool blew_stack = false;
3362  String* string = NextLeaf(&blew_stack, type_out, length_out);
3363  // String found.
3364  if (string != NULL) {
3365  // Verify output.
3366  ASSERT(*length_out == static_cast<unsigned>(string->length()));
3367  ASSERT(*type_out == string->map()->instance_type());
3368  return string;
3369  }
3370  // Traversal complete.
3371  if (!blew_stack) return NULL;
3372  // Restart search from root.
3373  unsigned offset_out;
3374  string = Search(&offset_out, type_out, length_out);
3375  // Verify output.
3376  ASSERT(string == NULL || offset_out == 0);
3377  ASSERT(string == NULL ||
3378  *length_out == static_cast<unsigned>(string->length()));
3379  ASSERT(string == NULL || *type_out == string->map()->instance_type());
3380  return string;
3381 }
3382 
3383 
3385  ASSERT(buffer8_ != NULL && end_ != NULL);
3386  // Advance cursor if needed.
3387  // TODO(dcarney): Ensure uses of the api call HasMore first and avoid this.
3388  if (buffer8_ == end_) HasMore();
3389  ASSERT(buffer8_ < end_);
3390  return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3391 }
3392 
3393 
3396  unsigned offset)
3397  : is_one_byte_(false),
3398  op_(op) {
3399  Reset(string, offset);
3400 }
3401 
3402 
3403 void StringCharacterStream::Reset(String* string, unsigned offset) {
3404  op_->Reset();
3405  buffer8_ = NULL;
3406  end_ = NULL;
3407  int32_t type = string->map()->instance_type();
3408  unsigned length = string->length();
3409  String::Visit(string, offset, *this, *op_, type, length);
3410 }
3411 
3412 
3414  if (buffer8_ != end_) return true;
3415  if (!op_->HasMore()) return false;
3416  unsigned length;
3417  int32_t type;
3418  String* string = op_->ContinueOperation(&type, &length);
3419  if (string == NULL) return false;
3420  ASSERT(!string->IsConsString());
3421  ASSERT(string->length() != 0);
3422  ConsStringNullOp null_op;
3423  String::Visit(string, 0, *this, null_op, type, length);
3424  ASSERT(buffer8_ != end_);
3425  return true;
3426 }
3427 
3428 
3430  const uint8_t* chars, unsigned length) {
3431  is_one_byte_ = true;
3432  buffer8_ = chars;
3433  end_ = chars + length;
3434 }
3435 
3436 
3438  const uint16_t* chars, unsigned length) {
3439  is_one_byte_ = false;
3440  buffer16_ = chars;
3441  end_ = reinterpret_cast<const uint8_t*>(chars + length);
3442 }
3443 
3444 
3448 }
3449 
3450 
3452  int cache_size = size();
3453  Object** entries_start = RawFieldOfElementAt(kEntriesIndex);
3454  MemsetPointer(entries_start,
3455  GetHeap()->the_hole_value(),
3456  cache_size - kEntriesIndex);
3457  MakeZeroSize();
3458 }
3459 
3460 
3462  return Smi::cast(get(kCacheSizeIndex))->value();
3463 }
3464 
3465 
3468 }
3469 
3470 
3472  return Smi::cast(get(kFingerIndex))->value();
3473 }
3474 
3475 
3477  set(kFingerIndex, Smi::FromInt(finger_index));
3478 }
3479 
3480 
3481 byte ByteArray::get(int index) {
3482  ASSERT(index >= 0 && index < this->length());
3483  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3484 }
3485 
3486 
3487 void ByteArray::set(int index, byte value) {
3488  ASSERT(index >= 0 && index < this->length());
3489  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3490 }
3491 
3492 
3493 int ByteArray::get_int(int index) {
3494  ASSERT(index >= 0 && (index * kIntSize) < this->length());
3495  return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3496 }
3497 
3498 
3500  ASSERT_TAG_ALIGNED(address);
3501  return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3502 }
3503 
3504 
3506  return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3507 }
3508 
3509 
3511  return reinterpret_cast<uint8_t*>(external_pointer());
3512 }
3513 
3514 
3516  ASSERT((index >= 0) && (index < this->length()));
3517  uint8_t* ptr = external_uint8_clamped_pointer();
3518  return ptr[index];
3519 }
3520 
3521 
3522 MaybeObject* ExternalUint8ClampedArray::get(int index) {
3523  return Smi::FromInt(static_cast<int>(get_scalar(index)));
3524 }
3525 
3526 
3527 void ExternalUint8ClampedArray::set(int index, uint8_t value) {
3528  ASSERT((index >= 0) && (index < this->length()));
3529  uint8_t* ptr = external_uint8_clamped_pointer();
3530  ptr[index] = value;
3531 }
3532 
3533 
3534 void* ExternalArray::external_pointer() {
3535  intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
3536  return reinterpret_cast<void*>(ptr);
3537 }
3538 
3539 
3540 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
3541  intptr_t ptr = reinterpret_cast<intptr_t>(value);
3543 }
3544 
3545 
3547  ASSERT((index >= 0) && (index < this->length()));
3548  int8_t* ptr = static_cast<int8_t*>(external_pointer());
3549  return ptr[index];
3550 }
3551 
3552 
3553 MaybeObject* ExternalInt8Array::get(int index) {
3554  return Smi::FromInt(static_cast<int>(get_scalar(index)));
3555 }
3556 
3557 
3558 void ExternalInt8Array::set(int index, int8_t value) {
3559  ASSERT((index >= 0) && (index < this->length()));
3560  int8_t* ptr = static_cast<int8_t*>(external_pointer());
3561  ptr[index] = value;
3562 }
3563 
3564 
3566  ASSERT((index >= 0) && (index < this->length()));
3567  uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3568  return ptr[index];
3569 }
3570 
3571 
3572 MaybeObject* ExternalUint8Array::get(int index) {
3573  return Smi::FromInt(static_cast<int>(get_scalar(index)));
3574 }
3575 
3576 
3577 void ExternalUint8Array::set(int index, uint8_t value) {
3578  ASSERT((index >= 0) && (index < this->length()));
3579  uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3580  ptr[index] = value;
3581 }
3582 
3583 
3585  ASSERT((index >= 0) && (index < this->length()));
3586  int16_t* ptr = static_cast<int16_t*>(external_pointer());
3587  return ptr[index];
3588 }
3589 
3590 
3591 MaybeObject* ExternalInt16Array::get(int index) {
3592  return Smi::FromInt(static_cast<int>(get_scalar(index)));
3593 }
3594 
3595 
3596 void ExternalInt16Array::set(int index, int16_t value) {
3597  ASSERT((index >= 0) && (index < this->length()));
3598  int16_t* ptr = static_cast<int16_t*>(external_pointer());
3599  ptr[index] = value;
3600 }
3601 
3602 
3604  ASSERT((index >= 0) && (index < this->length()));
3605  uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3606  return ptr[index];
3607 }
3608 
3609 
3610 MaybeObject* ExternalUint16Array::get(int index) {
3611  return Smi::FromInt(static_cast<int>(get_scalar(index)));
3612 }
3613 
3614 
3615 void ExternalUint16Array::set(int index, uint16_t value) {
3616  ASSERT((index >= 0) && (index < this->length()));
3617  uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3618  ptr[index] = value;
3619 }
3620 
3621 
3623  ASSERT((index >= 0) && (index < this->length()));
3624  int32_t* ptr = static_cast<int32_t*>(external_pointer());
3625  return ptr[index];
3626 }
3627 
3628 
3629 MaybeObject* ExternalInt32Array::get(int index) {
3630  return GetHeap()->NumberFromInt32(get_scalar(index));
3631 }
3632 
3633 
3634 void ExternalInt32Array::set(int index, int32_t value) {
3635  ASSERT((index >= 0) && (index < this->length()));
3636  int32_t* ptr = static_cast<int32_t*>(external_pointer());
3637  ptr[index] = value;
3638 }
3639 
3640 
3641 uint32_t ExternalUint32Array::get_scalar(int index) {
3642  ASSERT((index >= 0) && (index < this->length()));
3643  uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3644  return ptr[index];
3645 }
3646 
3647 
3648 MaybeObject* ExternalUint32Array::get(int index) {
3649  return GetHeap()->NumberFromUint32(get_scalar(index));
3650 }
3651 
3652 
3653 void ExternalUint32Array::set(int index, uint32_t value) {
3654  ASSERT((index >= 0) && (index < this->length()));
3655  uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3656  ptr[index] = value;
3657 }
3658 
3659 
3661  ASSERT((index >= 0) && (index < this->length()));
3662  float* ptr = static_cast<float*>(external_pointer());
3663  return ptr[index];
3664 }
3665 
3666 
3667 MaybeObject* ExternalFloat32Array::get(int index) {
3668  return GetHeap()->NumberFromDouble(get_scalar(index));
3669 }
3670 
3671 
3672 void ExternalFloat32Array::set(int index, float value) {
3673  ASSERT((index >= 0) && (index < this->length()));
3674  float* ptr = static_cast<float*>(external_pointer());
3675  ptr[index] = value;
3676 }
3677 
3678 
3680  ASSERT((index >= 0) && (index < this->length()));
3681  double* ptr = static_cast<double*>(external_pointer());
3682  return ptr[index];
3683 }
3684 
3685 
3686 MaybeObject* ExternalFloat64Array::get(int index) {
3687  return GetHeap()->NumberFromDouble(get_scalar(index));
3688 }
3689 
3690 
3691 void ExternalFloat64Array::set(int index, double value) {
3692  ASSERT((index >= 0) && (index < this->length()));
3693  double* ptr = static_cast<double*>(external_pointer());
3694  ptr[index] = value;
3695 }
3696 
3697 
3699  return FIELD_ADDR(this, kDataOffset);
3700 }
3701 
3702 
3704  InstanceType instance_type = map()->instance_type();
3705  int element_size;
3706  switch (instance_type) {
3707 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
3708  case FIXED_##TYPE##_ARRAY_TYPE: \
3709  element_size = size; \
3710  break;
3711 
3713 #undef TYPED_ARRAY_CASE
3714  default:
3715  UNREACHABLE();
3716  return 0;
3717  }
3718  return length() * element_size;
3719 }
3720 
3721 
3724 }
3725 
3726 
3727 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
3728 
3729 
3730 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
3731 
3732 
3733 int8_t Int8ArrayTraits::defaultValue() { return 0; }
3734 
3735 
3736 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
3737 
3738 
3739 int16_t Int16ArrayTraits::defaultValue() { return 0; }
3740 
3741 
3742 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
3743 
3744 
3745 int32_t Int32ArrayTraits::defaultValue() { return 0; }
3746 
3747 
3748 float Float32ArrayTraits::defaultValue() {
3749  return static_cast<float>(OS::nan_value());
3750 }
3751 
3752 
3753 double Float64ArrayTraits::defaultValue() { return OS::nan_value(); }
3754 
3755 
3756 template <class Traits>
3757 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
3758  ASSERT((index >= 0) && (index < this->length()));
3759  ElementType* ptr = reinterpret_cast<ElementType*>(
3760  FIELD_ADDR(this, kDataOffset));
3761  return ptr[index];
3762 }
3763 
3764 
3765 template<> inline
3768  ASSERT((index >= 0) && (index < this->length()));
3769  return READ_DOUBLE_FIELD(this, ElementOffset(index));
3770 }
3771 
3772 
3773 template <class Traits>
3775  ASSERT((index >= 0) && (index < this->length()));
3776  ElementType* ptr = reinterpret_cast<ElementType*>(
3777  FIELD_ADDR(this, kDataOffset));
3778  ptr[index] = value;
3779 }
3780 
3781 
3782 template<> inline
3784  int index, Float64ArrayTraits::ElementType value) {
3785  ASSERT((index >= 0) && (index < this->length()));
3786  WRITE_DOUBLE_FIELD(this, ElementOffset(index), value);
3787 }
3788 
3789 
3790 template <class Traits>
3791 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
3792  return static_cast<ElementType>(value);
3793 }
3794 
3795 
3796 template <> inline
3798  if (value < 0) return 0;
3799  if (value > 0xFF) return 0xFF;
3800  return static_cast<uint8_t>(value);
3801 }
3802 
3803 
3804 template <class Traits>
3805 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
3806  double value) {
3807  return static_cast<ElementType>(DoubleToInt32(value));
3808 }
3809 
3810 
3811 template<> inline
3813  if (value < 0) return 0;
3814  if (value > 0xFF) return 0xFF;
3815  return static_cast<uint8_t>(lrint(value));
3816 }
3817 
3818 
3819 template<> inline
3821  return static_cast<float>(value);
3822 }
3823 
3824 
3825 template<> inline
3827  return value;
3828 }
3829 
3830 
3831 template <class Traits>
3832 MaybeObject* FixedTypedArray<Traits>::get(int index) {
3833  return Traits::ToObject(GetHeap(), get_scalar(index));
3834 }
3835 
3836 template <class Traits>
3837 MaybeObject* FixedTypedArray<Traits>::SetValue(uint32_t index, Object* value) {
3838  ElementType cast_value = Traits::defaultValue();
3839  if (index < static_cast<uint32_t>(length())) {
3840  if (value->IsSmi()) {
3841  int int_value = Smi::cast(value)->value();
3842  cast_value = from_int(int_value);
3843  } else if (value->IsHeapNumber()) {
3844  double double_value = HeapNumber::cast(value)->value();
3845  cast_value = from_double(double_value);
3846  } else {
3847  // Clamp undefined to the default value. All other types have been
3848  // converted to a number type further up in the call chain.
3849  ASSERT(value->IsUndefined());
3850  }
3851  set(index, cast_value);
3852  }
3853  return Traits::ToObject(GetHeap(), cast_value);
3854 }
3855 
3856 template <class Traits>
3859  uint32_t index,
3860  Handle<Object> value) {
3861  CALL_HEAP_FUNCTION(array->GetIsolate(),
3862  array->SetValue(index, *value),
3863  Object);
3864 }
3865 
3866 
3867 MaybeObject* Uint8ArrayTraits::ToObject(Heap*, uint8_t scalar) {
3868  return Smi::FromInt(scalar);
3869 }
3870 
3871 
3872 MaybeObject* Uint8ClampedArrayTraits::ToObject(Heap*, uint8_t scalar) {
3873  return Smi::FromInt(scalar);
3874 }
3875 
3876 
3877 MaybeObject* Int8ArrayTraits::ToObject(Heap*, int8_t scalar) {
3878  return Smi::FromInt(scalar);
3879 }
3880 
3881 
3882 MaybeObject* Uint16ArrayTraits::ToObject(Heap*, uint16_t scalar) {
3883  return Smi::FromInt(scalar);
3884 }
3885 
3886 
3887 MaybeObject* Int16ArrayTraits::ToObject(Heap*, int16_t scalar) {
3888  return Smi::FromInt(scalar);
3889 }
3890 
3891 
3892 MaybeObject* Uint32ArrayTraits::ToObject(Heap* heap, uint32_t scalar) {
3893  return heap->NumberFromUint32(scalar);
3894 }
3895 
3896 
3897 MaybeObject* Int32ArrayTraits::ToObject(Heap* heap, int32_t scalar) {
3898  return heap->NumberFromInt32(scalar);
3899 }
3900 
3901 
3902 MaybeObject* Float32ArrayTraits::ToObject(Heap* heap, float scalar) {
3903  return heap->NumberFromDouble(scalar);
3904 }
3905 
3906 
3907 MaybeObject* Float64ArrayTraits::ToObject(Heap* heap, double scalar) {
3908  return heap->NumberFromDouble(scalar);
3909 }
3910 
3911 
3913  return READ_BYTE_FIELD(this, kVisitorIdOffset);
3914 }
3915 
3916 
3917 void Map::set_visitor_id(int id) {
3918  ASSERT(0 <= id && id < 256);
3919  WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
3920 }
3921 
3922 
3925 }
3926 
3927 
3930 }
3931 
3932 
3935 }
3936 
3937 
3939  // Adjust for the number of properties stored in the object.
3940  index -= inobject_properties();
3941  ASSERT(index < 0);
3942  return instance_size() + (index * kPointerSize);
3943 }
3944 
3945 
3947  int instance_size = map->instance_size();
3948  if (instance_size != kVariableSizeSentinel) return instance_size;
3949  // Only inline the most frequent cases.
3950  int instance_type = static_cast<int>(map->instance_type());
3951  if (instance_type == FIXED_ARRAY_TYPE) {
3952  return FixedArray::BodyDescriptor::SizeOf(map, this);
3953  }
3954  if (instance_type == ASCII_STRING_TYPE ||
3955  instance_type == ASCII_INTERNALIZED_STRING_TYPE) {
3957  reinterpret_cast<SeqOneByteString*>(this)->length());
3958  }
3959  if (instance_type == BYTE_ARRAY_TYPE) {
3960  return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
3961  }
3962  if (instance_type == FREE_SPACE_TYPE) {
3963  return reinterpret_cast<FreeSpace*>(this)->size();
3964  }
3965  if (instance_type == STRING_TYPE ||
3966  instance_type == INTERNALIZED_STRING_TYPE) {
3968  reinterpret_cast<SeqTwoByteString*>(this)->length());
3969  }
3970  if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
3972  reinterpret_cast<FixedDoubleArray*>(this)->length());
3973  }
3974  if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
3976  reinterpret_cast<ConstantPoolArray*>(this)->count_of_int64_entries(),
3977  reinterpret_cast<ConstantPoolArray*>(this)->count_of_code_ptr_entries(),
3978  reinterpret_cast<ConstantPoolArray*>(this)->count_of_heap_ptr_entries(),
3979  reinterpret_cast<ConstantPoolArray*>(this)->count_of_int32_entries());
3980  }
3981  if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
3982  instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
3983  return reinterpret_cast<FixedTypedArrayBase*>(this)->size();
3984  }
3985  ASSERT(instance_type == CODE_TYPE);
3986  return reinterpret_cast<Code*>(this)->CodeSize();
3987 }
3988 
3989 
3990 void Map::set_instance_size(int value) {
3991  ASSERT_EQ(0, value & (kPointerSize - 1));
3992  value >>= kPointerSizeLog2;
3993  ASSERT(0 <= value && value < 256);
3994  WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
3995 }
3996 
3997 
3999  ASSERT(0 <= value && value < 256);
4000  WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
4001 }
4002 
4003 
4005  ASSERT(0 <= value && value < 256);
4006  WRITE_BYTE_FIELD(this,
4008  static_cast<byte>(value));
4009 }
4010 
4011 
4013  return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4014 }
4015 
4016 
4018  WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4019 }
4020 
4021 
4024 }
4025 
4026 
4028  WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4029 }
4030 
4031 
4033  return READ_BYTE_FIELD(this, kBitFieldOffset);
4034 }
4035 
4036 
4038  WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4039 }
4040 
4041 
4043  return READ_BYTE_FIELD(this, kBitField2Offset);
4044 }
4045 
4046 
4048  WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4049 }
4050 
4051 
4053  if (value) {
4055  } else {
4057  }
4058 }
4059 
4060 
4062  return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4063 }
4064 
4065 
4068 }
4069 
4070 
4073 }
4074 
4075 
4076 void Map::set_is_access_check_needed(bool access_check_needed) {
4077  if (access_check_needed) {
4079  } else {
4081  }
4082 }
4083 
4084 
4086  return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4087 }
4088 
4089 
4090 void Map::set_is_extensible(bool value) {
4091  if (value) {
4093  } else {
4095  }
4096 }
4097 
4099  return ((1 << kIsExtensible) & bit_field2()) != 0;
4100 }
4101 
4102 
4104  if (value) {
4106  } else {
4108  }
4109 }
4110 
4112  return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
4113 }
4114 
4115 
4116 void Map::set_is_shared(bool value) {
4118 }
4119 
4120 
4122  return IsShared::decode(bit_field3()); }
4123 
4124 
4125 void Map::set_dictionary_map(bool value) {
4126  uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4127  new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4128  set_bit_field3(new_bit_field3);
4129 }
4130 
4131 
4134 }
4135 
4136 
4138  return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4139 }
4140 
4141 
4142 void Map::set_owns_descriptors(bool is_shared) {
4144 }
4145 
4146 
4149 }
4150 
4151 
4154 }
4155 
4156 
4159 }
4160 
4161 
4164 }
4165 
4166 
4168  return Deprecated::decode(bit_field3());
4169 }
4170 
4171 
4172 void Map::set_migration_target(bool value) {
4174 }
4175 
4176 
4179 }
4180 
4181 
4182 void Map::freeze() {
4184 }
4185 
4186 
4188  return IsFrozen::decode(bit_field3());
4189 }
4190 
4191 
4194 }
4195 
4196 
4198  return !IsUnstable::decode(bit_field3());
4199 }
4200 
4201 
4203  return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4204 }
4205 
4206 
4208  int descriptor = LastAdded();
4209  for (int i = 0; i <= descriptor; i++) {
4210  PropertyDetails details = instance_descriptors()->GetDetails(i);
4211  if (details.representation().IsNone()) return true;
4212  if (details.representation().IsSmi()) return true;
4213  if (details.representation().IsDouble()) return true;
4214  if (details.representation().IsHeapObject()) return true;
4215  if (details.type() == CONSTANT) return true;
4216  }
4217  return false;
4218 }
4219 
4220 
4222  if (is_stable()) {
4223  mark_unstable();
4224  dependent_code()->DeoptimizeDependentCodeGroup(
4225  GetIsolate(),
4227  }
4228 }
4229 
4230 
4232  return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4233 }
4234 
4235 
4237  if (length() == 0) return 0;
4238  return Smi::cast(get(group))->value();
4239 }
4240 
4241 
4243  set(group, Smi::FromInt(value));
4244 }
4245 
4246 
4248  return get(kCodesStartIndex + i)->IsCode();
4249 }
4250 
4252  return Code::cast(get(kCodesStartIndex + i));
4253 }
4254 
4255 
4257  return reinterpret_cast<CompilationInfo*>(
4258  Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
4259 }
4260 
4261 
4262 void DependentCode::set_object_at(int i, Object* object) {
4263  set(kCodesStartIndex + i, object);
4264 }
4265 
4266 
4268  return get(kCodesStartIndex + i);
4269 }
4270 
4271 
4273  return RawFieldOfElementAt(kCodesStartIndex + i);
4274 }
4275 
4276 
4278  set_undefined(kCodesStartIndex + i);
4279 }
4280 
4281 
4282 void DependentCode::copy(int from, int to) {
4283  set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4284 }
4285 
4286 
4287 void DependentCode::ExtendGroup(DependencyGroup group) {
4288  GroupStartIndexes starts(this);
4289  for (int g = kGroupCount - 1; g > group; g--) {
4290  if (starts.at(g) < starts.at(g + 1)) {
4291  copy(starts.at(g), starts.at(g + 1));
4292  }
4293  }
4294 }
4295 
4296 
4299  WRITE_INT_FIELD(this, kFlagsOffset, flags);
4300 }
4301 
4302 
4304  return ExtractKindFromFlags(flags());
4305 }
4306 
4307 
4310  // Only allow uninitialized or debugger states for non-IC code
4311  // objects. This is used in the debugger to determine whether or not
4312  // a call to code object has been replaced with a debug break call.
4314  result == UNINITIALIZED ||
4315  result == DEBUG_STUB);
4316  return result;
4317 }
4318 
4319 
4323 }
4324 
4325 
4327  return ExtractTypeFromFlags(flags());
4328 }
4329 
4330 
4331 // For initialization.
4334 }
4335 
4336 
4339 }
4340 
4341 
4342 inline bool Code::is_crankshafted() {
4345 }
4346 
4347 
4348 inline void Code::set_is_crankshafted(bool value) {
4349  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4350  int updated = IsCrankshaftedField::update(previous, value);
4352 }
4353 
4354 
4356  ASSERT(has_major_key());
4359 }
4360 
4361 
4362 void Code::set_major_key(int major) {
4363  ASSERT(has_major_key());
4364  ASSERT(0 <= major && major < 256);
4365  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4366  int updated = StubMajorKeyField::update(previous, major);
4368 }
4369 
4370 
4372  return kind() == STUB ||
4373  kind() == HANDLER ||
4374  kind() == BINARY_OP_IC ||
4375  kind() == COMPARE_IC ||
4376  kind() == COMPARE_NIL_IC ||
4377  kind() == LOAD_IC ||
4378  kind() == KEYED_LOAD_IC ||
4379  kind() == STORE_IC ||
4380  kind() == KEYED_STORE_IC ||
4381  kind() == TO_BOOLEAN_IC;
4382 }
4383 
4384 
4386  ASSERT_EQ(FUNCTION, kind());
4387  return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
4388 }
4389 
4390 
4391 void Code::set_optimizable(bool value) {
4392  ASSERT_EQ(FUNCTION, kind());
4393  WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
4394 }
4395 
4396 
4398  ASSERT_EQ(FUNCTION, kind());
4401 }
4402 
4403 
4405  ASSERT_EQ(FUNCTION, kind());
4408  WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4409 }
4410 
4411 
4413  ASSERT_EQ(FUNCTION, kind());
4416 }
4417 
4418 
4420  ASSERT_EQ(FUNCTION, kind());
4422  flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
4423  WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4424 }
4425 
4426 
4428  ASSERT_EQ(FUNCTION, kind());
4431 }
4432 
4433 
4435  ASSERT_EQ(FUNCTION, kind());
4437  flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
4438  WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4439 }
4440 
4441 
4443  ASSERT_EQ(FUNCTION, kind());
4445 }
4446 
4447 
4449  ASSERT_EQ(FUNCTION, kind());
4450  ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
4452 }
4453 
4454 
4456  ASSERT_EQ(FUNCTION, kind());
4457  return READ_BYTE_FIELD(this, kProfilerTicksOffset);
4458 }
4459 
4460 
4461 void Code::set_profiler_ticks(int ticks) {
4462  ASSERT_EQ(FUNCTION, kind());
4463  ASSERT(ticks < 256);
4464  WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
4465 }
4466 
4467 
4468 unsigned Code::stack_slots() {
4470  return StackSlotsField::decode(
4472 }
4473 
4474 
4475 void Code::set_stack_slots(unsigned slots) {
4476  CHECK(slots <= (1 << kStackSlotsBitCount));
4478  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4479  int updated = StackSlotsField::update(previous, slots);
4481 }
4482 
4483 
4488 }
4489 
4490 
4491 void Code::set_safepoint_table_offset(unsigned offset) {
4492  CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
4494  ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4495  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4496  int updated = SafepointTableOffsetField::update(previous, offset);
4498 }
4499 
4500 
4502  ASSERT_EQ(FUNCTION, kind());
4505 }
4506 
4507 
4508 void Code::set_back_edge_table_offset(unsigned offset) {
4509  ASSERT_EQ(FUNCTION, kind());
4510  ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4511  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4512  int updated = BackEdgeTableOffsetField::update(previous, offset);
4514 }
4515 
4516 
4518  ASSERT_EQ(FUNCTION, kind());
4521 }
4522 
4523 
4525  ASSERT_EQ(FUNCTION, kind());
4526  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4527  int updated = BackEdgesPatchedForOSRField::update(previous, value);
4529 }
4530 
4531 
4532 
4534  return extra_ic_state();
4535 }
4536 
4537 
4539  ASSERT(kind() == STUB);
4542 }
4543 
4544 
4546  ASSERT(kind() == STUB);
4547  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4548  int updated = HasFunctionCacheField::update(previous, flag);
4550 }
4551 
4552 
4554  ASSERT(kind() == OPTIMIZED_FUNCTION);
4557 }
4558 
4559 
4561  ASSERT(kind() == OPTIMIZED_FUNCTION);
4562  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4563  int updated = MarkedForDeoptimizationField::update(previous, flag);
4565 }
4566 
4567 
4569  Kind kind = this->kind();
4570  switch (kind) {
4571 #define CASE(name) case name: return true;
4573 #undef CASE
4574  default: return false;
4575  }
4576 }
4577 
4578 
4581 }
4582 
4583 
4585  return ic_state() == DEBUG_STUB;
4586 }
4587 
4588 
4591 }
4592 
4593 
4595  ASSERT(value->IsConstantPoolArray());
4596  WRITE_FIELD(this, kConstantPoolOffset, value);
4597  WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value);
4598 }
4599 
4600 
4602  InlineCacheState ic_state,
4603  ExtraICState extra_ic_state,
4604  StubType type,
4605  InlineCacheHolderFlag holder) {
4606  // Compute the bit mask.
4607  unsigned int bits = KindField::encode(kind)
4608  | ICStateField::encode(ic_state)
4609  | TypeField::encode(type)
4610  | ExtraICStateField::encode(extra_ic_state)
4611  | CacheHolderField::encode(holder);
4612  return static_cast<Flags>(bits);
4613 }
4614 
4615 
4617  ExtraICState extra_ic_state,
4618  InlineCacheHolderFlag holder,
4619  StubType type) {
4620  return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
4621 }
4622 
4623 
4625  StubType type,
4626  InlineCacheHolderFlag holder) {
4627  return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
4628 }
4629 
4630 
4632  return KindField::decode(flags);
4633 }
4634 
4635 
4637  return ICStateField::decode(flags);
4638 }
4639 
4640 
4642  return ExtraICStateField::decode(flags);
4643 }
4644 
4645 
4647  return TypeField::decode(flags);
4648 }
4649 
4650 
4652  return CacheHolderField::decode(flags);
4653 }
4654 
4655 
4657  int bits = flags & ~TypeField::kMask;
4658  return static_cast<Flags>(bits);
4659 }
4660 
4661 
4664  // GetCodeFromTargetAddress might be called when marking objects during mark
4665  // sweep. reinterpret_cast is therefore used instead of the more appropriate
4666  // Code::cast. Code::cast does not work when the object's map is
4667  // marked.
4668  Code* result = reinterpret_cast<Code*>(code);
4669  return result;
4670 }
4671 
4672 
4674  return HeapObject::
4675  FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
4676 }
4677 
4678 
4681  if (object->IsMap()) {
4682  return Map::cast(object)->CanTransition() &&
4683  FLAG_collect_maps &&
4684  FLAG_weak_embedded_maps_in_optimized_code;
4685  }
4686  if (object->IsJSObject() ||
4687  (object->IsCell() && Cell::cast(object)->value()->IsJSObject())) {
4688  return FLAG_weak_embedded_objects_in_optimized_code;
4689  }
4690  return false;
4691 }
4692 
4693 
4695  public:
4696  FindAndReplacePattern() : count_(0) { }
4697  void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
4698  ASSERT(count_ < kMaxCount);
4699  find_[count_] = map_to_find;
4700  replace_[count_] = obj_to_replace;
4701  ++count_;
4702  }
4703  private:
4704  static const int kMaxCount = 4;
4705  int count_;
4706  Handle<Map> find_[kMaxCount];
4707  Handle<Object> replace_[kMaxCount];
4708  friend class Code;
4709 };
4710 
4711 
4712 Object* Map::prototype() {
4713  return READ_FIELD(this, kPrototypeOffset);
4714 }
4715 
4716 
4717 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
4718  ASSERT(value->IsNull() || value->IsJSReceiver());
4719  WRITE_FIELD(this, kPrototypeOffset, value);
4720  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
4721 }
4722 
4723 
4724 // If the descriptor is using the empty transition array, install a new empty
4725 // transition array that will have place for an element transition.
4726 static MaybeObject* EnsureHasTransitionArray(Map* map) {
4727  TransitionArray* transitions;
4728  MaybeObject* maybe_transitions;
4729  if (!map->HasTransitionArray()) {
4730  maybe_transitions = TransitionArray::Allocate(map->GetIsolate(), 0);
4731  if (!maybe_transitions->To(&transitions)) return maybe_transitions;
4732  transitions->set_back_pointer_storage(map->GetBackPointer());
4733  } else if (!map->transitions()->IsFullTransitionArray()) {
4734  maybe_transitions = map->transitions()->ExtendToFullTransitionArray();
4735  if (!maybe_transitions->To(&transitions)) return maybe_transitions;
4736  } else {
4737  return map;
4738  }
4739  map->set_transitions(transitions);
4740  return transitions;
4741 }
4742 
4743 
4745  int len = descriptors->number_of_descriptors();
4746  set_instance_descriptors(descriptors);
4748 }
4749 
4750 
4751 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
4752 
4753 
4754 void Map::set_bit_field3(uint32_t bits) {
4755  // Ensure the upper 2 bits have the same value by sign extending it. This is
4756  // necessary to be able to use the 31st bit.
4757  int value = bits << 1;
4758  WRITE_FIELD(this, kBitField3Offset, Smi::FromInt(value >> 1));
4759 }
4760 
4761 
4762 uint32_t Map::bit_field3() {
4763  Object* value = READ_FIELD(this, kBitField3Offset);
4764  return Smi::cast(value)->value();
4765 }
4766 
4767 
4769  Object* back_pointer = GetBackPointer();
4770 
4772  ZapTransitions();
4773  }
4774 
4775  WRITE_FIELD(this, kTransitionsOrBackPointerOffset, back_pointer);
4777  heap, this, kTransitionsOrBackPointerOffset, back_pointer, mode);
4778 }
4779 
4780 
4781 void Map::AppendDescriptor(Descriptor* desc,
4782  const DescriptorArray::WhitenessWitness& witness) {
4783  DescriptorArray* descriptors = instance_descriptors();
4784  int number_of_own_descriptors = NumberOfOwnDescriptors();
4785  ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors);
4786  descriptors->Append(desc, witness);
4787  SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
4788 }
4789 
4790 
4793  if (object->IsDescriptorArray()) {
4794  return TransitionArray::cast(object)->back_pointer_storage();
4795  } else {
4796  ASSERT(object->IsMap() || object->IsUndefined());
4797  return object;
4798  }
4799 }
4800 
4801 
4803  return HasTransitionArray() && transitions()->HasElementsTransition();
4804 }
4805 
4806 
4809  return object->IsTransitionArray();
4810 }
4811 
4812 
4814  int index = transitions()->Search(GetHeap()->elements_transition_symbol());
4815  return transitions()->GetTarget(index);
4816 }
4817 
4818 
4820  if (!HasTransitionArray()) return true;
4821  return FixedArray::SizeFor(transitions()->length() +
4824 }
4825 
4826 
4827 MaybeObject* Map::AddTransition(Name* key,
4828  Map* target,
4830  if (HasTransitionArray()) return transitions()->CopyInsert(key, target);
4831  return TransitionArray::NewWith(flag, key, target, GetBackPointer());
4832 }
4833 
4834 
4835 void Map::SetTransition(int transition_index, Map* target) {
4836  transitions()->SetTarget(transition_index, target);
4837 }
4838 
4839 
4840 Map* Map::GetTransition(int transition_index) {
4841  return transitions()->GetTarget(transition_index);
4842 }
4843 
4844 
4845 MaybeObject* Map::set_elements_transition_map(Map* transitioned_map) {
4846  TransitionArray* transitions;
4847  MaybeObject* maybe_transitions = AddTransition(
4848  GetHeap()->elements_transition_symbol(),
4849  transitioned_map,
4850  FULL_TRANSITION);
4851  if (!maybe_transitions->To(&transitions)) return maybe_transitions;
4852  set_transitions(transitions);
4853  return transitions;
4854 }
4855 
4856 
4858  if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
4859  if (!transitions()->HasPrototypeTransitions()) {
4860  return GetHeap()->empty_fixed_array();
4861  }
4862  return transitions()->GetPrototypeTransitions();
4863 }
4864 
4865 
4866 MaybeObject* Map::SetPrototypeTransitions(FixedArray* proto_transitions) {
4867  MaybeObject* allow_prototype = EnsureHasTransitionArray(this);
4868  if (allow_prototype->IsFailure()) return allow_prototype;
4869  int old_number_of_transitions = NumberOfProtoTransitions();
4870 #ifdef DEBUG
4871  if (HasPrototypeTransitions()) {
4872  ASSERT(GetPrototypeTransitions() != proto_transitions);
4874  }
4875 #endif
4876  transitions()->SetPrototypeTransitions(proto_transitions);
4877  SetNumberOfProtoTransitions(old_number_of_transitions);
4878  return this;
4879 }
4880 
4881 
4883  return HasTransitionArray() && transitions()->HasPrototypeTransitions();
4884 }
4885 
4886 
4887 TransitionArray* Map::transitions() {
4890  return TransitionArray::cast(object);
4891 }
4892 
4893 
4894 void Map::set_transitions(TransitionArray* transition_array,
4895  WriteBarrierMode mode) {
4896  // Transition arrays are not shared. When one is replaced, it should not
4897  // keep referenced objects alive, so we zap it.
4898  // When there is another reference to the array somewhere (e.g. a handle),
4899  // not zapping turns from a waste of memory into a source of crashes.
4900  if (HasTransitionArray()) {
4901 #ifdef DEBUG
4902  for (int i = 0; i < transitions()->number_of_transitions(); i++) {
4903  Map* target = transitions()->GetTarget(i);
4904  if (target->instance_descriptors() == instance_descriptors()) {
4905  Name* key = transitions()->GetKey(i);
4906  int new_target_index = transition_array->Search(key);
4907  ASSERT(new_target_index != TransitionArray::kNotFound);
4908  ASSERT(transition_array->GetTarget(new_target_index) == target);
4909  }
4910  }
4911 #endif
4912  ASSERT(transitions() != transition_array);
4913  ZapTransitions();
4914  }
4915 
4916  WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
4918  GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
4919 }
4920 
4921 
4922 void Map::init_back_pointer(Object* undefined) {
4923  ASSERT(undefined->IsUndefined());
4924  WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
4925 }
4926 
4927 
4930  ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
4931  (value->IsMap() && GetBackPointer()->IsUndefined()));
4933  if (object->IsTransitionArray()) {
4935  } else {
4938  GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
4939  }
4940 }
4941 
4942 
4943 // Can either be Smi (no transitions), normal transition array, or a transition
4944 // array with the header overwritten as a Smi (thus iterating).
4946  Object* object = *HeapObject::RawField(this,
4948  TransitionArray* transition_array = static_cast<TransitionArray*>(object);
4949  return transition_array;
4950 }
4951 
4952 
4957 }
4958 
4959 
4960 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
4961 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
4962 ACCESSORS(Map, constructor, Object, kConstructorOffset)
4963 
4964 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
4965 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
4966 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
4967 
4968 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
4969 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
4970 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
4971 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
4972 
4973 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
4974 
4975 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
4976 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
4977 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
4978  kExpectedReceiverTypeOffset)
4979 
4981  kSerializedDataOffset)
4982 
4983 ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor,
4984  kDescriptorOffset)
4985 
4986 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
4987 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
4988 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
4989 
4990 ACCESSORS(Box, value, Object, kValueOffset)
4991 
4992 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
4993 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
4994 ACCESSORS_TO_SMI(AccessorPair, access_flags, kAccessFlagsOffset)
4995 
4996 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
4997 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
4998 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
4999 
5000 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5001 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5002 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5003 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5004 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5005 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5006 
5007 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5008 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5009 
5010 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5011 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5012 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5013 
5014 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5015 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5016 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5017  kPrototypeTemplateOffset)
5018 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5019 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5020  kNamedPropertyHandlerOffset)
5021 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5022  kIndexedPropertyHandlerOffset)
5023 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5024  kInstanceTemplateOffset)
5025 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5026 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5027 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5028  kInstanceCallHandlerOffset)
5029 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5030  kAccessCheckInfoOffset)
5031 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
5032 
5033 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5034 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5035  kInternalFieldCountOffset)
5036 
5037 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
5038 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
5039 
5040 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5041 
5042 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5043 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5044 ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
5045 ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
5046  kPretenureCreateCountOffset)
5047 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5048  kDependentCodeOffset)
5049 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5050 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5051 
5052 ACCESSORS(Script, source, Object, kSourceOffset)
5053 ACCESSORS(Script, name, Object, kNameOffset)
5054 ACCESSORS(Script, id, Smi, kIdOffset)
5055 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
5056 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
5057 ACCESSORS(Script, context_data, Object, kContextOffset)
5058 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
5059 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
5060 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5061 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5062 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
5063  kEvalFrominstructionsOffsetOffset)
5064 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
5065 BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
5066 
5067 Script::CompilationType Script::compilation_type() {
5068  return BooleanBit::get(flags(), kCompilationTypeBit) ?
5069  COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5070 }
5072  set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5073  type == COMPILATION_TYPE_EVAL));
5074 }
5076  return BooleanBit::get(flags(), kCompilationStateBit) ?
5078 }
5080  set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5081  state == COMPILATION_STATE_COMPILED));
5082 }
5083 
5084 
5085 #ifdef ENABLE_DEBUGGER_SUPPORT
5086 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5087 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
5088 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
5089 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5090 
5091 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
5092 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
5093 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5094 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5095 #endif
5096 
5097 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5098 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5099  kOptimizedCodeMapOffset)
5100 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5101 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
5102 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5103  kInstanceClassNameOffset)
5104 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5105 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5106 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5107 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5108 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5109 
5110 
5111 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5112 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5113  kHiddenPrototypeBit)
5114 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5115 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5116  kNeedsAccessCheckBit)
5117 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5118  kReadOnlyPrototypeBit)
5119 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5120  kRemovePrototypeBit)
5121 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5122  kDoNotCacheBit)
5124  kIsExpressionBit)
5125 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5126  kIsTopLevelBit)
5127 
5128 BOOL_ACCESSORS(SharedFunctionInfo,
5131  kAllowLazyCompilation)
5132 BOOL_ACCESSORS(SharedFunctionInfo,
5134  allows_lazy_compilation_without_context,
5135  kAllowLazyCompilationWithoutContext)
5136 BOOL_ACCESSORS(SharedFunctionInfo,
5137  compiler_hints,
5139  kUsesArguments)
5140 BOOL_ACCESSORS(SharedFunctionInfo,
5141  compiler_hints,
5142  has_duplicate_parameters,
5143  kHasDuplicateParameters)
5144 
5145 
5146 #if V8_HOST_ARCH_32_BIT
5147 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5148 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
5149  kFormalParameterCountOffset)
5150 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5151  kExpectedNofPropertiesOffset)
5152 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5153 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5154  kStartPositionAndTypeOffset)
5155 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5156 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5157  kFunctionTokenPositionOffset)
5158 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5159  kCompilerHintsOffset)
5160 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5161  kOptCountAndBailoutReasonOffset)
5162 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5163 
5164 #else
5165 
5166 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5167  STATIC_ASSERT(holder::offset % kPointerSize == 0); \
5168  int holder::name() { \
5169  int value = READ_INT_FIELD(this, offset); \
5170  ASSERT(kHeapObjectTag == 1); \
5171  ASSERT((value & kHeapObjectTag) == 0); \
5172  return value >> 1; \
5173  } \
5174  void holder::set_##name(int value) { \
5175  ASSERT(kHeapObjectTag == 1); \
5176  ASSERT((value & 0xC0000000) == 0xC0000000 || \
5177  (value & 0xC0000000) == 0x000000000); \
5178  WRITE_INT_FIELD(this, \
5179  offset, \
5180  (value << 1) & ~kHeapObjectTag); \
5181  }
5182 
5183 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5184  STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
5185  INT_ACCESSORS(holder, name, offset)
5186 
5187 
5188 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5189 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5190  formal_parameter_count,
5191  kFormalParameterCountOffset)
5192 
5193 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5194  expected_nof_properties,
5195  kExpectedNofPropertiesOffset)
5196 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5197 
5198 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5199 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5200  start_position_and_type,
5201  kStartPositionAndTypeOffset)
5202 
5203 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5204  function_token_position,
5205  kFunctionTokenPositionOffset)
5206 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5207  compiler_hints,
5208  kCompilerHintsOffset)
5209 
5210 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5211  opt_count_and_bailout_reason,
5212  kOptCountAndBailoutReasonOffset)
5213 
5214 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5215 
5216 #endif
5217 
5218 
5221 }
5222 
5223 
5225  ASSERT(0 <= value && value < 256);
5226  WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
5227 }
5228 
5229 
5230 BOOL_ACCESSORS(SharedFunctionInfo,
5231  compiler_hints,
5232  live_objects_may_exist,
5233  kLiveObjectsMayExist)
5234 
5235 
5236 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
5237  return initial_map() != GetHeap()->undefined_value();
5238 }
5239 
5240 
5241 BOOL_GETTER(SharedFunctionInfo,
5242  compiler_hints,
5243  optimization_disabled,
5244  kOptimizationDisabled)
5245 
5246 
5247 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5248  set_compiler_hints(BooleanBit::set(compiler_hints(),
5249  kOptimizationDisabled,
5250  disable));
5251  // If disabling optimizations we reflect that in the code object so
5252  // it will not be counted as optimizable code.
5253  if ((code()->kind() == Code::FUNCTION) && disable) {
5254  code()->set_optimizable(false);
5255  }
5256 }
5257 
5258 
5260  if (code()->kind() != Code::FUNCTION) return 0;
5261  return code()->profiler_ticks();
5262 }
5263 
5264 
5267  ? STRICT : SLOPPY;
5268 }
5269 
5270 
5272  // We only allow mode transitions from sloppy to strict.
5273  ASSERT(this->strict_mode() == SLOPPY || this->strict_mode() == strict_mode);
5274  int hints = compiler_hints();
5275  hints = BooleanBit::set(hints, kStrictModeFunction, strict_mode == STRICT);
5276  set_compiler_hints(hints);
5277 }
5278 
5279 
5280 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5281 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin,
5282  kInlineBuiltin)
5283 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5285  kNameShouldPrintAsAnonymous)
5286 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5287 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5288 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5289 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
5290  kDontOptimize)
5291 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
5292 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
5293 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5294 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5295 
5296 void SharedFunctionInfo::BeforeVisitingPointers() {
5297  if (IsInobjectSlackTrackingInProgress()) DetachInitialMap();
5298 }
5299 
5300 
5301 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5302 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5303 
5304 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5305 
5306 bool Script::HasValidSource() {
5307  Object* src = this->source();
5308  if (!src->IsString()) return true;
5309  String* src_str = String::cast(src);
5310  if (!StringShape(src_str).IsExternal()) return true;
5311  if (src_str->IsOneByteRepresentation()) {
5312  return ExternalAsciiString::cast(src)->resource() != NULL;
5313  } else if (src_str->IsTwoByteRepresentation()) {
5314  return ExternalTwoByteString::cast(src)->resource() != NULL;
5315  }
5316  return true;
5317 }
5318 
5319 
5321  ASSERT(code()->kind() == Code::BUILTIN);
5323 }
5324 
5325 
5328 }
5329 
5330 
5331 void SharedFunctionInfo::set_start_position(int start_position) {
5334 }
5335 
5336 
5337 Code* SharedFunctionInfo::code() {
5338  return Code::cast(READ_FIELD(this, kCodeOffset));
5339 }
5340 
5341 
5342 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5343  ASSERT(value->kind() != Code::OPTIMIZED_FUNCTION);
5344  WRITE_FIELD(this, kCodeOffset, value);
5345  CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5346 }
5347 
5348 
5350  // If the GC metadata field is already used then the function was
5351  // enqueued as a code flushing candidate and we remove it now.
5352  if (code()->gc_metadata() != NULL) {
5354  flusher->EvictCandidate(this);
5355  }
5356 
5357  ASSERT(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5358  set_code(value);
5359 }
5360 
5361 
5362 ScopeInfo* SharedFunctionInfo::scope_info() {
5363  return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5364 }
5365 
5366 
5367 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5368  WriteBarrierMode mode) {
5369  WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5371  this,
5373  reinterpret_cast<Object*>(value),
5374  mode);
5375 }
5376 
5377 
5379  return code() !=
5380  GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
5381 }
5382 
5383 
5385  return function_data()->IsFunctionTemplateInfo();
5386 }
5387 
5388 
5389 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5390  ASSERT(IsApiFunction());
5391  return FunctionTemplateInfo::cast(function_data());
5392 }
5393 
5394 
5396  return function_data()->IsSmi();
5397 }
5398 
5399 
5402  return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
5403 }
5404 
5405 
5407  return ICAgeBits::decode(counters());
5408 }
5409 
5410 
5413 }
5414 
5415 
5417  return DeoptCountBits::decode(counters());
5418 }
5419 
5420 
5422  set_counters(DeoptCountBits::update(counters(), deopt_count));
5423 }
5424 
5425 
5427  int value = counters();
5428  int deopt_count = DeoptCountBits::decode(value);
5429  deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
5430  set_counters(DeoptCountBits::update(value, deopt_count));
5431 }
5432 
5433 
5436 }
5437 
5438 
5441 }
5442 
5443 
5446 }
5447 
5448 
5452 }
5453 
5454 
5456  BailoutReason reason = static_cast<BailoutReason>(
5458  return reason;
5459 }
5460 
5461 
5463  Code* code = this->code();
5464  return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
5465 }
5466 
5467 
5469  int tries = opt_reenable_tries();
5471  // We reenable optimization whenever the number of tries is a large
5472  // enough power of 2.
5473  if (tries >= 16 && (((tries - 1) & tries) == 0)) {
5474  set_optimization_disabled(false);
5475  set_opt_count(0);
5476  set_deopt_count(0);
5477  code()->set_optimizable(true);
5478  }
5479 }
5480 
5481 
5483  return context()->global_object()->IsJSBuiltinsObject();
5484 }
5485 
5486 
5488  return shared()->formal_parameter_count() !=
5490 }
5491 
5492 
5494  return code()->kind() == Code::OPTIMIZED_FUNCTION;
5495 }
5496 
5497 
5499  return code()->kind() == Code::FUNCTION && code()->optimizable();
5500 }
5501 
5502 
5504  return code() == GetIsolate()->builtins()->builtin(
5505  Builtins::kCompileOptimized);
5506 }
5507 
5508 
5510  return code() == GetIsolate()->builtins()->builtin(
5511  Builtins::kCompileOptimizedConcurrent);
5512 }
5513 
5514 
5516  return code() == GetIsolate()->builtins()->builtin(
5517  Builtins::kInOptimizationQueue);
5518 }
5519 
5520 
5522  return Code::cast(
5524 }
5525 
5526 
5527 void JSFunction::set_code(Code* value) {
5528  ASSERT(!GetHeap()->InNewSpace(value));
5529  Address entry = value->entry();
5530  WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5531  GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
5532  this,
5534  value);
5535 }
5536 
5537 
5539  ASSERT(!GetHeap()->InNewSpace(value));
5540  Address entry = value->entry();
5541  WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5542 }
5543 
5544 
5545 void JSFunction::ReplaceCode(Code* code) {
5546  bool was_optimized = IsOptimized();
5547  bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
5548 
5549  if (was_optimized && is_optimized) {
5550  shared()->EvictFromOptimizedCodeMap(this->code(),
5551  "Replacing with another optimized code");
5552  }
5553 
5554  set_code(code);
5555 
5556  // Add/remove the function from the list of optimized functions for this
5557  // context based on the state change.
5558  if (!was_optimized && is_optimized) {
5560  }
5561  if (was_optimized && !is_optimized) {
5562  // TODO(titzer): linear in the number of optimized functions; fix!
5564  }
5565 }
5566 
5567 
5569  return Context::cast(READ_FIELD(this, kContextOffset));
5570 }
5571 
5572 
5573 void JSFunction::set_context(Object* value) {
5574  ASSERT(value->IsUndefined() || value->IsContext());
5575  WRITE_FIELD(this, kContextOffset, value);
5576  WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
5577 }
5578 
5579 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
5580  kPrototypeOrInitialMapOffset)
5581 
5582 
5583 Map* JSFunction::initial_map() {
5584  return Map::cast(prototype_or_initial_map());
5585 }
5586 
5587 
5589  set_prototype_or_initial_map(value);
5590 }
5591 
5592 
5594  return prototype_or_initial_map()->IsMap();
5595 }
5596 
5597 
5599  return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
5600 }
5601 
5602 
5605 }
5606 
5607 
5610  if (has_initial_map()) return initial_map()->prototype();
5611  // When there is no initial map and the prototype is a JSObject, the
5612  // initial map field is used for the prototype field.
5613  return prototype_or_initial_map();
5614 }
5615 
5616 
5618  ASSERT(has_prototype());
5619  // If the function's prototype property has been set to a non-JSObject
5620  // value, that value is stored in the constructor field of the map.
5621  if (map()->has_non_instance_prototype()) return map()->constructor();
5622  return instance_prototype();
5623 }
5624 
5625 
5627  return map()->function_with_prototype();
5628 }
5629 
5630 
5632  return code() !=
5633  GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
5634 }
5635 
5636 
5637 FixedArray* JSFunction::literals() {
5638  ASSERT(!shared()->bound());
5639  return literals_or_bindings();
5640 }
5641 
5642 
5644  ASSERT(!shared()->bound());
5645  set_literals_or_bindings(literals);
5646 }
5647 
5648 
5650  ASSERT(shared()->bound());
5651  return literals_or_bindings();
5652 }
5653 
5654 
5655 void JSFunction::set_function_bindings(FixedArray* bindings) {
5656  ASSERT(shared()->bound());
5657  // Bound function literal may be initialized to the empty fixed array
5658  // before the bindings are set.
5659  ASSERT(bindings == GetHeap()->empty_fixed_array() ||
5660  bindings->map() == GetHeap()->fixed_cow_array_map());
5661  set_literals_or_bindings(bindings);
5662 }
5663 
5664 
5666  ASSERT(!shared()->bound());
5667  return literals()->length();
5668 }
5669 
5670 
5672  ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5673  return READ_FIELD(this, OffsetOfFunctionWithId(id));
5674 }
5675 
5676 
5678  Object* value) {
5679  ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5680  WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
5681  WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
5682 }
5683 
5684 
5686  ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5687  return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
5688 }
5689 
5690 
5692  Code* value) {
5693  ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5694  WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
5695  ASSERT(!GetHeap()->InNewSpace(value));
5696 }
5697 
5698 
5699 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
5700 ACCESSORS(JSProxy, hash, Object, kHashOffset)
5701 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
5702 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
5703 
5704 
5705 void JSProxy::InitializeBody(int object_size, Object* value) {
5706  ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
5707  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
5708  WRITE_FIELD(this, offset, value);
5709  }
5710 }
5711 
5712 
5713 ACCESSORS(JSSet, table, Object, kTableOffset)
5714 ACCESSORS(JSMap, table, Object, kTableOffset)
5715 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
5716 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
5717 
5718 
5719 Address Foreign::foreign_address() {
5720  return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
5721 }
5722 
5723 
5726 }
5727 
5728 
5729 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
5730 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
5731 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
5732 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
5733 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
5734 SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
5735 
5736 
5737 JSGeneratorObject* JSGeneratorObject::cast(Object* obj) {
5738  ASSERT(obj->IsJSGeneratorObject());
5740  return reinterpret_cast<JSGeneratorObject*>(obj);
5741 }
5742 
5743 
5744 ACCESSORS(JSModule, context, Object, kContextOffset)
5745 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
5746 
5747 
5748 JSModule* JSModule::cast(Object* obj) {
5749  ASSERT(obj->IsJSModule());
5750  ASSERT(HeapObject::cast(obj)->Size() == JSModule::kSize);
5751  return reinterpret_cast<JSModule*>(obj);
5752 }
5753 
5754 
5755 ACCESSORS(JSValue, value, Object, kValueOffset)
5756 
5757 
5758 JSValue* JSValue::cast(Object* obj) {
5759  ASSERT(obj->IsJSValue());
5760  ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
5761  return reinterpret_cast<JSValue*>(obj);
5762 }
5763 
5764 
5765 ACCESSORS(JSDate, value, Object, kValueOffset)
5766 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
5767 ACCESSORS(JSDate, year, Object, kYearOffset)
5768 ACCESSORS(JSDate, month, Object, kMonthOffset)
5769 ACCESSORS(JSDate, day, Object, kDayOffset)
5770 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
5771 ACCESSORS(JSDate, hour, Object, kHourOffset)
5772 ACCESSORS(JSDate, min, Object, kMinOffset)
5773 ACCESSORS(JSDate, sec, Object, kSecOffset)
5774 
5775 
5776 JSDate* JSDate::cast(Object* obj) {
5777  ASSERT(obj->IsJSDate());
5778  ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
5779  return reinterpret_cast<JSDate*>(obj);
5780 }
5781 
5782 
5783 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
5784 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
5785 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
5786 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
5787 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
5788 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
5789 
5790 
5791 JSMessageObject* JSMessageObject::cast(Object* obj) {
5792  ASSERT(obj->IsJSMessageObject());
5794  return reinterpret_cast<JSMessageObject*>(obj);
5795 }
5796 
5797 
5798 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
5799 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
5800 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
5801 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
5802 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
5803 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
5804 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
5805 
5806 
5807 void Code::WipeOutHeader() {
5808  WRITE_FIELD(this, kRelocationInfoOffset, NULL);
5809  WRITE_FIELD(this, kHandlerTableOffset, NULL);
5810  WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
5811  WRITE_FIELD(this, kConstantPoolOffset, NULL);
5812  // Do not wipe out e.g. a minor key.
5813  if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
5814  WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
5815  }
5816 }
5817 
5818 
5820  ASSERT(kind() == FUNCTION);
5821  return raw_type_feedback_info();
5822 }
5823 
5824 
5826  ASSERT(kind() == FUNCTION);
5827  set_raw_type_feedback_info(value, mode);
5829  value, mode);
5830 }
5831 
5832 
5834  ASSERT(kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
5835  kind() == BINARY_OP_IC || kind() == LOAD_IC);
5836  return Smi::cast(raw_type_feedback_info())->value();
5837 }
5838 
5839 
5840 void Code::set_stub_info(int value) {
5841  ASSERT(kind() == COMPARE_IC ||
5842  kind() == COMPARE_NIL_IC ||
5843  kind() == BINARY_OP_IC ||
5844  kind() == STUB ||
5845  kind() == LOAD_IC ||
5846  kind() == KEYED_LOAD_IC ||
5847  kind() == STORE_IC ||
5848  kind() == KEYED_STORE_IC);
5849  set_raw_type_feedback_info(Smi::FromInt(value));
5850 }
5851 
5852 
5853 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
5854 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
5855 
5856 
5857 byte* Code::instruction_start() {
5858  return FIELD_ADDR(this, kHeaderSize);
5859 }
5860 
5861 
5863  return instruction_start() + instruction_size();
5864 }
5865 
5866 
5869 }
5870 
5871 
5873  return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
5874 }
5875 
5876 
5879 }
5880 
5881 
5883  return unchecked_relocation_info()->length();
5884 }
5885 
5886 
5888  return instruction_start();
5889 }
5890 
5891 
5892 bool Code::contains(byte* inner_pointer) {
5893  return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
5894 }
5895 
5896 
5897 ACCESSORS(JSArray, length, Object, kLengthOffset)
5898 
5899 
5900 void* JSArrayBuffer::backing_store() {
5901  intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
5902  return reinterpret_cast<void*>(ptr);
5903 }
5904 
5905 
5906 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
5907  intptr_t ptr = reinterpret_cast<intptr_t>(value);
5909 }
5910 
5911 
5912 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
5913 ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
5914 
5915 
5916 bool JSArrayBuffer::is_external() {
5917  return BooleanBit::get(flag(), kIsExternalBit);
5918 }
5919 
5920 
5922  set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
5923 }
5924 
5925 
5927  return BooleanBit::get(flag(), kShouldBeFreed);
5928 }
5929 
5930 
5932  set_flag(BooleanBit::set(flag(), kShouldBeFreed, value));
5933 }
5934 
5935 
5936 ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
5937 ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
5938 
5939 
5940 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
5941 ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
5942 ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
5943 ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
5944 ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
5945 
5946 ACCESSORS(JSRegExp, data, Object, kDataOffset)
5947 
5948 
5949 JSRegExp::Type JSRegExp::TypeTag() {
5950  Object* data = this->data();
5951  if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
5952  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
5953  return static_cast<JSRegExp::Type>(smi->value());
5954 }
5955 
5956 
5958  switch (TypeTag()) {
5959  case ATOM:
5960  return 0;
5961  case IRREGEXP:
5963  default:
5964  UNREACHABLE();
5965  return -1;
5966  }
5967 }
5968 
5969 
5971  ASSERT(this->data()->IsFixedArray());
5972  Object* data = this->data();
5973  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
5974  return Flags(smi->value());
5975 }
5976 
5977 
5979  ASSERT(this->data()->IsFixedArray());
5980  Object* data = this->data();
5981  String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
5982  return pattern;
5983 }
5984 
5985 
5986 Object* JSRegExp::DataAt(int index) {
5987  ASSERT(TypeTag() != NOT_COMPILED);
5988  return FixedArray::cast(data())->get(index);
5989 }
5990 
5991 
5992 void JSRegExp::SetDataAt(int index, Object* value) {
5993  ASSERT(TypeTag() != NOT_COMPILED);
5994  ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
5995  FixedArray::cast(data())->set(index, value);
5996 }
5997 
5998 
6000  ElementsKind kind = map()->elements_kind();
6001 #if DEBUG
6002  FixedArrayBase* fixed_array =
6003  reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6004 
6005  // If a GC was caused while constructing this object, the elements
6006  // pointer may point to a one pointer filler map.
6007  if (ElementsAreSafeToExamine()) {
6008  Map* map = fixed_array->map();
6010  (map == GetHeap()->fixed_array_map() ||
6011  map == GetHeap()->fixed_cow_array_map())) ||
6012  (IsFastDoubleElementsKind(kind) &&
6013  (fixed_array->IsFixedDoubleArray() ||
6014  fixed_array == GetHeap()->empty_fixed_array())) ||
6015  (kind == DICTIONARY_ELEMENTS &&
6016  fixed_array->IsFixedArray() &&
6017  fixed_array->IsDictionary()) ||
6018  (kind > DICTIONARY_ELEMENTS));
6019  ASSERT((kind != SLOPPY_ARGUMENTS_ELEMENTS) ||
6020  (elements()->IsFixedArray() && elements()->length() >= 2));
6021  }
6022 #endif
6023  return kind;
6024 }
6025 
6026 
6029 }
6030 
6031 
6034 }
6035 
6036 
6039 }
6040 
6041 
6044 }
6045 
6046 
6049 }
6050 
6051 
6054 }
6055 
6056 
6059 }
6060 
6061 
6064 }
6065 
6066 
6069 }
6070 
6071 
6073  HeapObject* array = elements();
6074  ASSERT(array != NULL);
6075  return array->IsExternalArray();
6076 }
6077 
6078 
6079 #define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6080 bool JSObject::HasExternal##Type##Elements() { \
6081  HeapObject* array = elements(); \
6082  ASSERT(array != NULL); \
6083  if (!array->IsHeapObject()) \
6084  return false; \
6085  return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
6086 }
6087 
6089 
6090 #undef EXTERNAL_ELEMENTS_CHECK
6091 
6092 
6094  HeapObject* array = elements();
6095  ASSERT(array != NULL);
6096  return array->IsFixedTypedArrayBase();
6097 }
6098 
6099 
6100 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6101 bool JSObject::HasFixed##Type##Elements() { \
6102  HeapObject* array = elements(); \
6103  ASSERT(array != NULL); \
6104  if (!array->IsHeapObject()) \
6105  return false; \
6106  return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6107 }
6108 
6110 
6111 #undef FIXED_TYPED_ELEMENTS_CHECK
6112 
6113 
6115  return map()->has_named_interceptor();
6116 }
6117 
6118 
6120  return map()->has_indexed_interceptor();
6121 }
6122 
6123 
6126  FixedArray* elems = FixedArray::cast(elements());
6127  Isolate* isolate = GetIsolate();
6128  if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
6129  Object* writable_elems;
6130  { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
6131  elems, isolate->heap()->fixed_array_map());
6132  if (!maybe_writable_elems->ToObject(&writable_elems)) {
6133  return maybe_writable_elems;
6134  }
6135  }
6136  set_elements(FixedArray::cast(writable_elems));
6137  isolate->counters()->cow_arrays_converted()->Increment();
6138  return writable_elems;
6139 }
6140 
6141 
6144  return NameDictionary::cast(properties());
6145 }
6146 
6147 
6150  return SeededNumberDictionary::cast(elements());
6151 }
6152 
6153 
6154 bool Name::IsHashFieldComputed(uint32_t field) {
6155  return (field & kHashNotComputedMask) == 0;
6156 }
6157 
6158 
6160  return IsHashFieldComputed(hash_field());
6161 }
6162 
6163 
6164 uint32_t Name::Hash() {
6165  // Fast case: has hash code already been computed?
6166  uint32_t field = hash_field();
6167  if (IsHashFieldComputed(field)) return field >> kHashShift;
6168  // Slow case: compute hash code and set it. Has to be a string.
6169  return String::cast(this)->ComputeAndSetHash();
6170 }
6171 
6172 
6173 StringHasher::StringHasher(int length, uint32_t seed)
6174  : length_(length),
6175  raw_running_hash_(seed),
6176  array_index_(0),
6177  is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6178  is_first_char_(true) {
6179  ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
6180 }
6181 
6182 
6184  return length_ > String::kMaxHashCalcLength;
6185 }
6186 
6187 
6188 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6189  running_hash += c;
6190  running_hash += (running_hash << 10);
6191  running_hash ^= (running_hash >> 6);
6192  return running_hash;
6193 }
6194 
6195 
6196 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6197  running_hash += (running_hash << 3);
6198  running_hash ^= (running_hash >> 11);
6199  running_hash += (running_hash << 15);
6200  if ((running_hash & String::kHashBitMask) == 0) {
6201  return kZeroHash;
6202  }
6203  return running_hash;
6204 }
6205 
6206 
6207 void StringHasher::AddCharacter(uint16_t c) {
6208  // Use the Jenkins one-at-a-time hash function to update the hash
6209  // for the given character.
6210  raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6211 }
6212 
6213 
6214 bool StringHasher::UpdateIndex(uint16_t c) {
6215  ASSERT(is_array_index_);
6216  if (c < '0' || c > '9') {
6217  is_array_index_ = false;
6218  return false;
6219  }
6220  int d = c - '0';
6221  if (is_first_char_) {
6222  is_first_char_ = false;
6223  if (c == '0' && length_ > 1) {
6224  is_array_index_ = false;
6225  return false;
6226  }
6227  }
6228  if (array_index_ > 429496729U - ((d + 2) >> 3)) {
6229  is_array_index_ = false;
6230  return false;
6231  }
6232  array_index_ = array_index_ * 10 + d;
6233  return true;
6234 }
6235 
6236 
6237 template<typename Char>
6238 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6239  ASSERT(sizeof(Char) == 1 || sizeof(Char) == 2);
6240  int i = 0;
6241  if (is_array_index_) {
6242  for (; i < length; i++) {
6243  AddCharacter(chars[i]);
6244  if (!UpdateIndex(chars[i])) {
6245  i++;
6246  break;
6247  }
6248  }
6249  }
6250  for (; i < length; i++) {
6251  ASSERT(!is_array_index_);
6252  AddCharacter(chars[i]);
6253  }
6254 }
6255 
6256 
6257 template <typename schar>
6258 uint32_t StringHasher::HashSequentialString(const schar* chars,
6259  int length,
6260  uint32_t seed) {
6261  StringHasher hasher(length, seed);
6262  if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6263  return hasher.GetHashField();
6264 }
6265 
6266 
6267 bool Name::AsArrayIndex(uint32_t* index) {
6268  return IsString() && String::cast(this)->AsArrayIndex(index);
6269 }
6270 
6271 
6272 bool String::AsArrayIndex(uint32_t* index) {
6273  uint32_t field = hash_field();
6274  if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6275  return false;
6276  }
6277  return SlowAsArrayIndex(index);
6278 }
6279 
6280 
6282  return map()->prototype();
6283 }
6284 
6285 
6287  return map()->constructor();
6288 }
6289 
6290 
6292  Handle<Name> name) {
6293  if (object->IsJSProxy()) {
6294  Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6295  return JSProxy::HasPropertyWithHandler(proxy, name);
6296  }
6297  return GetPropertyAttribute(object, name) != ABSENT;
6298 }
6299 
6300 
6302  Handle<Name> name) {
6303  if (object->IsJSProxy()) {
6304  Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6305  return JSProxy::HasPropertyWithHandler(proxy, name);
6306  }
6307  return GetLocalPropertyAttribute(object, name) != ABSENT;
6308 }
6309 
6310 
6312  Handle<Name> key) {
6313  uint32_t index;
6314  if (object->IsJSObject() && key->AsArrayIndex(&index)) {
6315  return GetElementAttribute(object, index);
6316  }
6317  return GetPropertyAttributeWithReceiver(object, object, key);
6318 }
6319 
6320 
6322  uint32_t index) {
6323  if (object->IsJSProxy()) {
6325  Handle<JSProxy>::cast(object), object, index);
6326  }
6328  Handle<JSObject>::cast(object), object, index, true);
6329 }
6330 
6331 
6333  return JSGlobalProxy::cast(global_receiver())->IsDetachedFrom(this);
6334 }
6335 
6336 
6338  return GetPrototype() != global;
6339 }
6340 
6341 
6343  return object->IsJSProxy()
6344  ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
6345  : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
6346 }
6347 
6348 
6350  return IsJSProxy()
6351  ? JSProxy::cast(this)->GetIdentityHash()
6352  : JSObject::cast(this)->GetIdentityHash();
6353 }
6354 
6355 
6356 bool JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
6357  if (object->IsJSProxy()) {
6358  Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6359  return JSProxy::HasElementWithHandler(proxy, index);
6360  }
6362  Handle<JSObject>::cast(object), object, index, true) != ABSENT;
6363 }
6364 
6365 
6366 bool JSReceiver::HasLocalElement(Handle<JSReceiver> object, uint32_t index) {
6367  if (object->IsJSProxy()) {
6368  Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6369  return JSProxy::HasElementWithHandler(proxy, index);
6370  }
6372  Handle<JSObject>::cast(object), object, index, false) != ABSENT;
6373 }
6374 
6375 
6377  Handle<JSReceiver> object, uint32_t index) {
6378  if (object->IsJSProxy()) {
6380  Handle<JSProxy>::cast(object), object, index);
6381  }
6383  Handle<JSObject>::cast(object), object, index, false);
6384 }
6385 
6386 
6388  return BooleanBit::get(flag(), kAllCanReadBit);
6389 }
6390 
6391 
6393  set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
6394 }
6395 
6396 
6398  return BooleanBit::get(flag(), kAllCanWriteBit);
6399 }
6400 
6401 
6403  set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
6404 }
6405 
6406 
6408  return BooleanBit::get(flag(), kProhibitsOverwritingBit);
6409 }
6410 
6411 
6413  set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
6414 }
6415 
6416 
6418  return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
6419 }
6420 
6421 
6423  set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
6424 }
6425 
6426 
6427 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
6428  Object* function_template = expected_receiver_type();
6429  if (!function_template->IsFunctionTemplateInfo()) return true;
6430  return FunctionTemplateInfo::cast(function_template)->IsTemplateFor(receiver);
6431 }
6432 
6433 
6435  int current = access_flags()->value();
6436  current = BooleanBit::set(current,
6437  kProhibitsOverwritingBit,
6438  access_control & PROHIBITS_OVERWRITING);
6439  current = BooleanBit::set(current,
6440  kAllCanReadBit,
6441  access_control & ALL_CAN_READ);
6442  current = BooleanBit::set(current,
6443  kAllCanWriteBit,
6444  access_control & ALL_CAN_WRITE);
6445  set_access_flags(Smi::FromInt(current));
6446 }
6447 
6448 
6450  return BooleanBit::get(access_flags(), kAllCanReadBit);
6451 }
6452 
6453 
6455  return BooleanBit::get(access_flags(), kAllCanWriteBit);
6456 }
6457 
6458 
6460  return BooleanBit::get(access_flags(), kProhibitsOverwritingBit);
6461 }
6462 
6463 
6464 template<typename Shape, typename Key>
6466  Object* key,
6467  Object* value) {
6468  SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
6469 }
6470 
6471 
6472 template<typename Shape, typename Key>
6474  Object* key,
6475  Object* value,
6476  PropertyDetails details) {
6477  ASSERT(!key->IsName() ||
6478  details.IsDeleted() ||
6479  details.dictionary_index() > 0);
6480  int index = HashTable<Shape, Key>::EntryToIndex(entry);
6481  DisallowHeapAllocation no_gc;
6483  FixedArray::set(index, key, mode);
6484  FixedArray::set(index+1, value, mode);
6485  FixedArray::set(index+2, details.AsSmi());
6486 }
6487 
6488 
6489 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
6490  ASSERT(other->IsNumber());
6491  return key == static_cast<uint32_t>(other->Number());
6492 }
6493 
6494 
6495 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
6496  return ComputeIntegerHash(key, 0);
6497 }
6498 
6499 
6501  Object* other) {
6502  ASSERT(other->IsNumber());
6503  return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
6504 }
6505 
6506 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
6507  return ComputeIntegerHash(key, seed);
6508 }
6509 
6511  uint32_t seed,
6512  Object* other) {
6513  ASSERT(other->IsNumber());
6514  return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
6515 }
6516 
6517 MaybeObject* NumberDictionaryShape::AsObject(Heap* heap, uint32_t key) {
6518  return heap->NumberFromUint32(key);
6519 }
6520 
6521 
6522 bool NameDictionaryShape::IsMatch(Name* key, Object* other) {
6523  // We know that all entries in a hash table had their hash keys created.
6524  // Use that knowledge to have fast failure.
6525  if (key->Hash() != Name::cast(other)->Hash()) return false;
6526  return key->Equals(Name::cast(other));
6527 }
6528 
6529 
6531  return key->Hash();
6532 }
6533 
6534 
6535 uint32_t NameDictionaryShape::HashForObject(Name* key, Object* other) {
6536  return Name::cast(other)->Hash();
6537 }
6538 
6539 
6540 MaybeObject* NameDictionaryShape::AsObject(Heap* heap, Name* key) {
6541  ASSERT(key->IsUniqueName());
6542  return key;
6543 }
6544 
6545 
6546 template <int entrysize>
6547 bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
6548  return key->SameValue(other);
6549 }
6550 
6551 
6552 template <int entrysize>
6554  return Smi::cast(key->GetHash())->value();
6555 }
6556 
6557 
6558 template <int entrysize>
6560  Object* other) {
6561  return Smi::cast(other->GetHash())->value();
6562 }
6563 
6564 
6565 template <int entrysize>
6567  Object* key) {
6568  return key;
6569 }
6570 
6571 
6572 template <int entrysize>
6573 bool WeakHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
6574  return key->SameValue(other);
6575 }
6576 
6577 
6578 template <int entrysize>
6580  intptr_t hash = reinterpret_cast<intptr_t>(key);
6581  return (uint32_t)(hash & 0xFFFFFFFF);
6582 }
6583 
6584 
6585 template <int entrysize>
6587  Object* other) {
6588  intptr_t hash = reinterpret_cast<intptr_t>(other);
6589  return (uint32_t)(hash & 0xFFFFFFFF);
6590 }
6591 
6592 
6593 template <int entrysize>
6595  Object* key) {
6596  return key;
6597 }
6598 
6599 
6601  // No write barrier is needed since empty_fixed_array is not in new space.
6602  // Please note this function is used during marking:
6603  // - MarkCompactCollector::MarkUnmarkedObject
6604  // - IncrementalMarking::Step
6605  ASSERT(!heap->InNewSpace(heap->empty_fixed_array()));
6606  WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
6607 }
6608 
6609 
6610 void JSArray::EnsureSize(Handle<JSArray> array, int required_size) {
6611  ASSERT(array->HasFastSmiOrObjectElements());
6612  Handle<FixedArray> elts = handle(FixedArray::cast(array->elements()));
6613  const int kArraySizeThatFitsComfortablyInNewSpace = 128;
6614  if (elts->length() < required_size) {
6615  // Doubling in size would be overkill, but leave some slack to avoid
6616  // constantly growing.
6617  Expand(array, required_size + (required_size >> 3));
6618  // It's a performance benefit to keep a frequently used array in new-space.
6619  } else if (!array->GetHeap()->new_space()->Contains(*elts) &&
6620  required_size < kArraySizeThatFitsComfortablyInNewSpace) {
6621  // Expand will allocate a new backing store in new space even if the size
6622  // we asked for isn't larger than what we had before.
6623  Expand(array, required_size);
6624  }
6625 }
6626 
6627 
6628 void JSArray::set_length(Smi* length) {
6629  // Don't need a write barrier for a Smi.
6630  set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
6631 }
6632 
6633 
6635  bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
6636  ASSERT(result == !HasExternalArrayElements());
6637  return result;
6638 }
6639 
6640 
6642  Handle<FixedArrayBase> storage) {
6643  EnsureCanContainElements(array, storage, storage->length(),
6645 
6646  ASSERT((storage->map() == array->GetHeap()->fixed_double_array_map() &&
6647  IsFastDoubleElementsKind(array->GetElementsKind())) ||
6648  ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
6649  (IsFastObjectElementsKind(array->GetElementsKind()) ||
6650  (IsFastSmiElementsKind(array->GetElementsKind()) &&
6651  Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
6652  array->set_elements(*storage);
6653  array->set_length(Smi::FromInt(storage->length()));
6654 }
6655 
6656 
6657 MaybeObject* FixedArray::Copy() {
6658  if (length() == 0) return this;
6659  return GetHeap()->CopyFixedArray(this);
6660 }
6661 
6662 
6663 MaybeObject* FixedDoubleArray::Copy() {
6664  if (length() == 0) return this;
6665  return GetHeap()->CopyFixedDoubleArray(this);
6666 }
6667 
6668 
6669 MaybeObject* ConstantPoolArray::Copy() {
6670  if (length() == 0) return this;
6671  return GetHeap()->CopyConstantPoolArray(this);
6672 }
6673 
6674 
6676  return isolate->factory()->uninitialized_symbol();
6677 }
6678 
6679 
6681  return isolate->factory()->megamorphic_symbol();
6682 }
6683 
6684 
6686  ElementsKind elements_kind) {
6687  return Handle<Object>(Smi::FromInt(static_cast<int>(elements_kind)), isolate);
6688 }
6689 
6690 
6692  return heap->uninitialized_symbol();
6693 }
6694 
6695 
6697  int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6698  return ICTotalCountField::decode(current);
6699 }
6700 
6701 
6703  int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6704  value = ICTotalCountField::update(value,
6705  ICTotalCountField::decode(count));
6706  WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
6707 }
6708 
6709 
6711  int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6712  return ICsWithTypeInfoCountField::decode(current);
6713 }
6714 
6715 
6717  int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6718  int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
6719  // We can get negative count here when the type-feedback info is
6720  // shared between two code objects. The can only happen when
6721  // the debugger made a shallow copy of code object (see Heap::CopyCode).
6722  // Since we do not optimize when the debugger is active, we can skip
6723  // this counter update.
6724  if (new_count >= 0) {
6725  new_count &= ICsWithTypeInfoCountField::kMask;
6726  value = ICsWithTypeInfoCountField::update(value, new_count);
6727  WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
6728  }
6729 }
6730 
6731 
6735 }
6736 
6737 
6739  int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6740  int checksum = OwnTypeChangeChecksum::decode(value);
6741  checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
6742  value = OwnTypeChangeChecksum::update(value, checksum);
6743  // Ensure packed bit field is in Smi range.
6744  if (value > Smi::kMaxValue) value |= Smi::kMinValue;
6745  if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
6746  WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
6747 }
6748 
6749 
6751  int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6752  int mask = (1 << kTypeChangeChecksumBits) - 1;
6753  value = InlinedTypeChangeChecksum::update(value, checksum & mask);
6754  // Ensure packed bit field is in Smi range.
6755  if (value > Smi::kMaxValue) value |= Smi::kMinValue;
6756  if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
6757  WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
6758 }
6759 
6760 
6762  int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6763  return OwnTypeChangeChecksum::decode(value);
6764 }
6765 
6766 
6768  int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6769  int mask = (1 << kTypeChangeChecksumBits) - 1;
6770  return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
6771 }
6772 
6773 
6774 ACCESSORS(TypeFeedbackInfo, feedback_vector, FixedArray,
6775  kFeedbackVectorOffset)
6776 
6777 
6778 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
6779 
6780 
6781 Relocatable::Relocatable(Isolate* isolate) {
6782  isolate_ = isolate;
6783  prev_ = isolate->relocatable_top();
6784  isolate->set_relocatable_top(this);
6785 }
6786 
6787 
6788 Relocatable::~Relocatable() {
6789  ASSERT_EQ(isolate_->relocatable_top(), this);
6790  isolate_->set_relocatable_top(prev_);
6791 }
6792 
6793 
6795  return map->instance_size();
6796 }
6797 
6798 
6799 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
6800  v->VisitExternalReference(
6801  reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
6802 }
6803 
6804 
6805 template<typename StaticVisitor>
6807  StaticVisitor::VisitExternalReference(
6808  reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
6809 }
6810 
6811 
6814  v->VisitExternalAsciiString(
6815  reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6816 }
6817 
6818 
6819 template<typename StaticVisitor>
6822  StaticVisitor::VisitExternalAsciiString(
6823  reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6824 }
6825 
6826 
6829  v->VisitExternalTwoByteString(
6830  reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6831 }
6832 
6833 
6834 template<typename StaticVisitor>
6837  StaticVisitor::VisitExternalTwoByteString(
6838  reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6839 }
6840 
6841 
6842 template<int start_offset, int end_offset, int size>
6844  HeapObject* obj,
6845  ObjectVisitor* v) {
6846  v->VisitPointers(HeapObject::RawField(obj, start_offset),
6847  HeapObject::RawField(obj, end_offset));
6848 }
6849 
6850 
6851 template<int start_offset>
6853  int object_size,
6854  ObjectVisitor* v) {
6855  v->VisitPointers(HeapObject::RawField(obj, start_offset),
6856  HeapObject::RawField(obj, object_size));
6857 }
6858 
6859 
6860 #undef TYPE_CHECKER
6861 #undef CAST_ACCESSOR
6862 #undef INT_ACCESSORS
6863 #undef ACCESSORS
6864 #undef ACCESSORS_TO_SMI
6865 #undef SMI_ACCESSORS
6866 #undef BOOL_GETTER
6867 #undef BOOL_ACCESSORS
6868 #undef FIELD_ADDR
6869 #undef READ_FIELD
6870 #undef WRITE_FIELD
6871 #undef WRITE_BARRIER
6872 #undef CONDITIONAL_WRITE_BARRIER
6873 #undef READ_DOUBLE_FIELD
6874 #undef WRITE_DOUBLE_FIELD
6875 #undef READ_INT_FIELD
6876 #undef WRITE_INT_FIELD
6877 #undef READ_INTPTR_FIELD
6878 #undef WRITE_INTPTR_FIELD
6879 #undef READ_UINT32_FIELD
6880 #undef WRITE_UINT32_FIELD
6881 #undef READ_SHORT_FIELD
6882 #undef WRITE_SHORT_FIELD
6883 #undef READ_BYTE_FIELD
6884 #undef WRITE_BYTE_FIELD
6885 
6886 } } // namespace v8::internal
6887 
6888 #endif // V8_OBJECTS_INL_H_
String * Operate(String *string, unsigned *, int32_t *, unsigned *)
Definition: objects-inl.h:3115
MUST_USE_RESULT MaybeObject * CopyConstantPoolArray(ConstantPoolArray *src)
Definition: heap-inl.h:212
byte * Address
Definition: globals.h:186
static int SizeOf(Map *map, HeapObject *object)
Definition: objects-inl.h:6794
#define WRITE_BYTE_FIELD(p, offset, value)
Definition: objects-inl.h:1195
void FastPropertyAtPut(int index, Object *value)
Definition: objects-inl.h:1977
static const double kPretenureRatio
Definition: objects.h:8261
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
Object * type_feedback_info()
Definition: objects-inl.h:5819
static Handle< Object > GetElementWithReceiver(Isolate *isolate, Handle< Object > object, Handle< Object > receiver, uint32_t index)
Definition: objects.cc:965
#define HAS_FAILURE_TAG(value)
Definition: v8globals.h:382
#define CHECK_NOT_EMPTY_HANDLE(isolate, call)
Definition: isolate.h:145
void SetBackPointer(Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:4928
static const int kBitFieldOffset
Definition: objects.h:6461
#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode)
Definition: objects-inl.h:1108
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
static bool IsMatch(uint32_t key, Object *other)
Definition: objects-inl.h:6489
void set_prohibits_overwriting(bool value)
Definition: objects-inl.h:6412
static void EnsureCanContainElements(Handle< JSObject > object, Object **elements, uint32_t count, EnsureElementsMode mode)
Definition: objects-inl.h:1603
Code * builtin(Name name)
Definition: builtins.h:322
static const int kTypeOffset
Definition: objects.h:9597
#define IC_KIND_LIST(V)
Definition: objects.h:5194
bool IsDetachedFrom(GlobalObject *global)
Definition: objects-inl.h:6337
#define SLOW_ASSERT(condition)
Definition: checks.h:306
int allow_osr_at_loop_nesting_level()
Definition: objects-inl.h:4442
const intptr_t kSmiTagMask
Definition: v8.h:5480
void set_deopt_dependent_code(bool deopt)
Definition: objects.h:8326
static ElementType from_double(double value)
Definition: objects-inl.h:3805
static const int kVisitorIdOffset
Definition: objects.h:6456
static const int kExternalAsciiRepresentationTag
Definition: v8.h:5566
static const int kCodeOffset
Definition: objects.h:7103
void AddCharacters(const Char *chars, int len)
Definition: objects-inl.h:6238
static bool is_the_hole_nan(double value)
Definition: objects-inl.h:2156
bool IsTrue() const
Definition: api.cc:2347
FixedArray * function_bindings()
Definition: objects-inl.h:5649
static ConstantPoolArray * cast(Object *obj)
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:3610
static const int kCodeEntryOffset
Definition: objects.h:7518
bool HasElementsTransition()
Definition: objects-inl.h:4802
static V8_INLINE int SmiValue(internal::Object *value)
Definition: v8.h:5607
static int EntryToIndex(int entry)
Definition: objects.h:3754
void EvictCandidate(SharedFunctionInfo *shared_info)
static ByteArray * FromDataStartAddress(Address address)
Definition: objects-inl.h:3499
void set_all_can_write(bool value)
Definition: objects-inl.h:6402
static const int kCacheSizeIndex
Definition: objects.h:4327
void set_constant_pool(Object *constant_pool)
Definition: objects-inl.h:4594
static const int kValueOffset
Definition: objects.h:9547
void set_has_deoptimization_support(bool value)
Definition: objects-inl.h:4404
static uint32_t Hash(uint32_t key)
Definition: objects-inl.h:6495
Object ** RawFieldOfElementAt(int index)
Definition: objects.h:3073
Utf8StringKey(Vector< const char > string, uint32_t seed)
Definition: objects-inl.h:564
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
Definition: heap-inl.h:207
void set(int index, Object *value)
Definition: objects-inl.h:2147
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths true
Definition: flags.cc:208
int GetInternalFieldOffset(int index)
Definition: objects-inl.h:1918
static ElementType from_int(int value)
Definition: objects-inl.h:3791
static bool get(Smi *smi, int bit_position)
Definition: objects.h:10803
uint32_t bit_field3()
Definition: objects-inl.h:4762
void PrintF(const char *format,...)
Definition: v8utils.cc:40
static const int kSize
Definition: objects.h:7671
#define ASSERT_TAG_ALIGNED(address)
Definition: v8checks.h:57
bool IsOneByteEqualTo(Vector< const uint8_t > str)
Definition: objects.cc:9002
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
void set(int index, uint16_t value)
Definition: objects-inl.h:3615
void set_all_can_read(bool value)
Definition: objects-inl.h:6392
virtual MaybeObject * AsObject(Heap *heap)
void set_function_with_prototype(bool value)
Definition: objects-inl.h:4066
static double hole_nan_as_double()
Definition: objects-inl.h:2161
bool InNewSpace(Object *object)
Definition: heap-inl.h:307
unsigned stack_slots()
Definition: objects-inl.h:4468
kInstanceClassNameOffset kNeedsAccessCheckBit kRemovePrototypeBit is_expression
Definition: objects-inl.h:5123
static const int kTransitionsOrBackPointerOffset
Definition: objects.h:6433
static String * cast(Object *obj)
#define FATAL(msg)
Definition: checks.h:48
#define READ_DOUBLE_FIELD(p, offset)
Definition: objects-inl.h:1118
#define READ_INTPTR_FIELD(p, offset)
Definition: objects-inl.h:1162
static const int kAllowOSRAtLoopNestingLevelOffset
Definition: objects.h:5616
void copy(int from, int to)
Definition: objects-inl.h:4282
kInstanceClassNameOffset needs_access_check
Definition: objects-inl.h:5115
bool Equals(Name *other)
Definition: objects-inl.h:2954
MaybeObject * TryFlatten(PretenureFlag pretenure=NOT_TENURED)
Definition: objects-inl.h:2978
const uint32_t kTwoByteStringTag
Definition: objects.h:610
const int kFailureTypeTagSize
Definition: objects.h:1712
static const uint32_t kExponentMask
Definition: objects.h:1981
void set_access_flags(v8::AccessControl access_control)
Definition: objects-inl.h:6434
bool function_with_prototype()
Definition: objects-inl.h:4071
void set_opt_count(int opt_count)
Definition: objects-inl.h:5449
static uint32_t Hash(Object *key)
Definition: objects-inl.h:6579
static DescriptorArray * cast(Object *obj)
static Failure * InternalError()
Definition: objects-inl.h:1239
virtual MaybeObject * AsObject(Heap *heap)
Definition: objects.cc:13755
static int SizeOf(Map *map, HeapObject *object)
Definition: objects.h:3107
static const int kFlagsOffset
Definition: objects.h:5592
Isolate * isolate()
Definition: heap-inl.h:624
int unused_property_fields()
Definition: objects-inl.h:4022
void set_length(Smi *length)
Definition: objects-inl.h:6628
void set_javascript_builtin(Builtins::JavaScript id, Object *value)
Definition: objects-inl.h:5677
Object * InObjectPropertyAt(int index)
Definition: objects-inl.h:1996
static const int kStorage2Offset
Definition: objects.h:8210
static Smi * FromInt(int value)
Definition: objects-inl.h:1209
bool IsFastObjectElementsKind(ElementsKind kind)
void IteratePointer(ObjectVisitor *v, int offset)
Definition: objects-inl.h:1391
void set_number_of_entries(DependencyGroup group, int value)
Definition: objects-inl.h:4242
MUST_USE_RESULT MaybeObject * ToSmi()
Definition: objects-inl.h:1042
static Flags ComputeHandlerFlags(Kind handler_kind, StubType type=NORMAL, InlineCacheHolderFlag holder=OWN_MAP)
Definition: objects-inl.h:4624
Map * elements_transition_map()
Definition: objects-inl.h:4813
void set_second(String *second, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:3233
static Object * GetObjectFromEntryAddress(Address location_of_address)
Definition: objects-inl.h:4673
void set_memento_create_count(int count)
Definition: objects.h:8344
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size)
Object * GetHash()
Definition: objects.cc:1070
kInstanceClassNameOffset kNeedsAccessCheckBit remove_prototype
Definition: objects-inl.h:5119
int NumberOfOwnDescriptors()
Definition: objects.h:6174
kInstanceClassNameOffset kNeedsAccessCheckBit kRemovePrototypeBit start_position_and_type
Definition: objects-inl.h:5123
static MemoryChunk * FromAddress(Address a)
Definition: spaces.h:305
static const int kDataOffset
Definition: objects.h:4970
void VerifyApiCallResultType()
Definition: objects-inl.h:2104
static HeapObject * cast(Object *obj)
static bool HasLocalElement(Handle< JSReceiver > object, uint32_t index)
Definition: objects-inl.h:6366
MaybeObject * AllocateNewStorageFor(Heap *heap, Representation representation)
Definition: objects-inl.h:279
#define READ_UINT32_FIELD(p, offset)
Definition: objects-inl.h:1168
static Handle< T > cast(Handle< S > that)
Definition: handles.h:75
void set_function_bindings(FixedArray *bindings)
Definition: objects-inl.h:5655
static const byte kArgumentMarker
Definition: objects.h:9506
static const int kMaxHashCalcLength
Definition: objects.h:8926
bool is_access_check_needed()
Definition: objects-inl.h:4085
void set_pre_allocated_property_fields(int value)
Definition: objects-inl.h:4004
static const byte kUndefined
Definition: objects.h:9507
static const int kConstructionCountOffset
Definition: objects.h:7189
T Max(T a, T b)
Definition: utils.h:227
String * ContinueOperation(int32_t *type_out, unsigned *length_out)
Definition: objects-inl.h:3359
const int kVariableSizeSentinel
Definition: objects.h:314
static void IterateBody(HeapObject *obj, int object_size, ObjectVisitor *v)
Definition: objects-inl.h:6852
void Get(int descriptor_number, Descriptor *desc)
Definition: objects-inl.h:2688
static const int kFastPropertiesSoftLimit
Definition: objects.h:2746
void ZapPrototypeTransitions()
Definition: objects.cc:11487
PropertyAttributes property_attributes()
Definition: objects-inl.h:6417
static const int kSize
Definition: objects.h:9950
static const int kStackSlotsBitCount
Definition: objects.h:5630
HeapObject * UncheckedPrototypeTransitions()
static const int kJSBuiltinsCount
Definition: objects.h:7667
static ExternalTwoByteString * cast(Object *obj)
static const int kFullCodeFlags
Definition: objects.h:5610
void ZapTransitions()
Definition: objects.cc:11474
int32_t uc32
Definition: globals.h:310
SeededNumberDictionary * element_dictionary()
Definition: objects-inl.h:6148
static Map * cast(Object *obj)
void set_has_debug_break_slots(bool value)
Definition: objects-inl.h:4419
static void EnsureCanContainHeapObjectElements(Handle< JSObject > obj)
Definition: objects-inl.h:1590
bool has_non_instance_prototype()
Definition: objects-inl.h:4061
kSerializedDataOffset Object
Definition: objects-inl.h:5016
SubStringKey(Handle< String > string, int from, int length)
Definition: objects-inl.h:504
static StubType ExtractTypeFromFlags(Flags flags)
Definition: objects-inl.h:4646
static const byte kTheHole
Definition: objects.h:9504
int32_t get_int32_entry(int index)
Definition: objects-inl.h:2306
static const int kExponentBias
Definition: objects.h:1985
bool attached_to_shared_function_info()
Definition: objects-inl.h:4111
bool is_optimized_code()
Definition: objects.h:5311
Builtins * builtins()
Definition: isolate.h:948
int int32_t
Definition: unicode.cc:47
static Object * RawUninitializedSentinel(Heap *heap)
Definition: objects-inl.h:6691
void set_context(Object *context)
Definition: objects-inl.h:5573
#define READ_FIELD(p, offset)
Definition: objects-inl.h:1095
static void EnsureSize(Handle< JSArray > array, int minimum_size_of_backing_fixed_array)
Definition: objects-inl.h:6610
void set(int index, double value)
Definition: objects-inl.h:3691
uint32_t get_scalar(int index)
Definition: objects-inl.h:3641
void Set(int descriptor_number, Descriptor *desc, const WhitenessWitness &)
Definition: objects-inl.h:2695
static const int kSize
Definition: objects.h:9882
bool SameValue(Object *other)
Definition: objects.cc:1102
void Add(Handle< Map > map_to_find, Handle< Object > obj_to_replace)
Definition: objects-inl.h:4697
static SeqOneByteString * cast(Object *obj)
#define MAKE_STRUCT_CAST(NAME, Name, name)
Definition: objects-inl.h:2923
void set_is_crankshafted(bool value)
Definition: objects-inl.h:4348
void set_object_at(int i, Object *object)
Definition: objects-inl.h:4262
WriteBarrierMode GetWriteBarrierMode(const DisallowHeapAllocation &promise)
Definition: objects-inl.h:2350
static Failure * Exception()
Definition: objects-inl.h:1244
static const int kExternalPointerOffset
Definition: objects.h:4702
static MUST_USE_RESULT MaybeObject * AsObject(Heap *heap, Name *key)
Definition: objects-inl.h:6540
static const int kSize
Definition: objects.h:7568
static Foreign * cast(Object *obj)
MUST_USE_RESULT MaybeObject * GetElementsTransitionMapSlow(ElementsKind elements_kind)
Definition: objects.cc:3322
bool marked_for_deoptimization()
Definition: objects-inl.h:4553
void set_map(Map *value)
Definition: objects-inl.h:1341
const uint32_t kIsNotInternalizedMask
Definition: objects.h:603
FixedArray * literals()
Definition: objects-inl.h:5637
static bool IsMatch(Object *key, Object *other)
Definition: objects-inl.h:6573
static Handle< Object > GetElementNoExceptionThrown(Isolate *isolate, Handle< Object > object, uint32_t index)
Definition: objects-inl.h:1071
virtual bool IsMatch(Object *string)
Definition: objects-inl.h:567
static V8_INLINE bool HasHeapObjectTag(internal::Object *value)
Definition: v8.h:5602
#define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset)
static const int kIsAccessCheckNeeded
Definition: objects.h:6474
uint32_t Flags
Definition: objects.h:5184
void set(int index, uint32_t value)
Definition: objects-inl.h:3653
#define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size)
Definition: objects-inl.h:623
bool IsTwoByteEqualTo(Vector< const uc16 > str)
Definition: objects.cc:9018
byte * instruction_end()
Definition: objects-inl.h:5862
uint16_t SlicedStringGet(int index)
Definition: objects.cc:8629
static Smi * FromIntptr(intptr_t value)
Definition: objects-inl.h:1215
#define READ_BYTE_FIELD(p, offset)
Definition: objects-inl.h:1192
static const int kSize
Definition: objects.h:7922
bool is_migration_target()
Definition: objects-inl.h:4177
void change_ic_with_type_info_count(int count)
Definition: objects-inl.h:6716
#define ASSERT(condition)
Definition: checks.h:329
bool TooManyFastProperties(StoreFromKeyed store_mode=MAY_BE_STORE_FROM_KEYED)
Definition: objects-inl.h:2044
void set(int index, int16_t value)
Definition: objects-inl.h:3596
void set_profiler_ticks(int ticks)
Definition: objects-inl.h:4461
#define READ_INT32_FIELD(p, offset)
Definition: objects-inl.h:1174
static Handle< Object > GetPropertyWithReceiver(Handle< Object > object, Handle< Object > receiver, Handle< Name > name, PropertyAttributes *attributes)
Definition: objects.cc:159
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
Definition: objects-inl.h:6680
const int kPointerSizeLog2
Definition: globals.h:281
static const int kKindSpecificFlags2Offset
Definition: objects.h:5594
void set_start_position(int start_position)
Definition: objects-inl.h:5331
#define WRITE_INT_FIELD(p, offset, value)
Definition: objects-inl.h:1159
static const int kInstanceSizeOffset
Definition: objects.h:6448
unsigned short uint16_t
Definition: unicode.cc:46
void set_optimizable(bool value)
Definition: objects-inl.h:4391
#define READ_INT64_FIELD(p, offset)
Definition: objects-inl.h:1180
#define WRITE_UINT32_FIELD(p, offset, value)
Definition: objects-inl.h:1171
static Context * cast(Object *context)
Definition: contexts.h:244
static int OffsetOfFunctionWithId(Builtins::JavaScript id)
Definition: objects.h:7674
static uint32_t HashForObject(Object *key, Object *object)
Definition: objects-inl.h:6559
static const int kSafepointTableOffsetBitCount
Definition: objects.h:5660
bool back_edges_patched_for_osr()
Definition: objects-inl.h:4517
static const int kSourceIndex
Definition: objects.h:7926
static const int kForeignAddressOffset
Definition: objects.h:10009
static V8_INLINE bool IsValidSmi(intptr_t value)
Definition: v8.h:5615
#define WRITE_INTPTR_FIELD(p, offset, value)
Definition: objects-inl.h:1165
const uint32_t kStringRepresentationMask
Definition: objects.h:615
static const int kProfilerTicksOffset
Definition: objects.h:5617
bool NonFailureIsHeapObject()
Definition: objects-inl.h:173
int SizeFromMap(Map *map)
Definition: objects-inl.h:3946
void set_compiled_optimizable(bool value)
Definition: objects-inl.h:4434
Object * DataAt(int index)
Definition: objects-inl.h:5986
Handle< Object > NewNumber(double value, PretenureFlag pretenure=NOT_TENURED)
Definition: factory.cc:998
#define CHECK(condition)
Definition: checks.h:75
static const int kSize
Definition: objects.h:9789
Object ** GetKeySlot(int descriptor_number)
Definition: objects-inl.h:2585
bool IsInternalError() const
Definition: objects-inl.h:1227
ACCESSORS(AccessorInfo, expected_receiver_type, Object, kExpectedReceiverTypeOffset) ACCESSORS(DeclaredAccessorDescriptor
Name * GetSortedKey(int descriptor_number)
Definition: objects-inl.h:2612
bool HasSpecificClassOf(String *name)
Definition: objects-inl.h:1055
Name * GetKey(int transition_number)
static const int kUnusedPropertyFieldsOffset
Definition: objects.h:6460
const bool FLAG_enable_slow_asserts
Definition: checks.h:307
int number_of_entries(DependencyGroup group)
Definition: objects-inl.h:4236
void set_first(String *first, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:3217
void ReplaceCode(Code *code)
Definition: objects-inl.h:5545
void SetRepresentation(int descriptor_number, Representation representation)
Definition: objects-inl.h:2623
void set_map_and_elements(Map *map, FixedArrayBase *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:1701
Factory * factory()
Definition: isolate.h:995
bool IsFastElementsKind(ElementsKind kind)
static ExternalAsciiString * cast(Object *obj)
MUST_USE_RESULT MaybeObject * EnsureWritableFastElements()
Definition: objects-inl.h:6124
static MUST_USE_RESULT MaybeObject * NewWith(SimpleTransitionFlag flag, Name *key, Map *target, Object *back_pointer)
Definition: transitions.cc:72
PropertyAttributes
void set_the_hole(int index)
Definition: objects-inl.h:2413
void init_back_pointer(Object *undefined)
Definition: objects-inl.h:4922
void set_foreign_address(Address value)
Definition: objects-inl.h:5724
MUST_USE_RESULT MaybeObject * Copy()
Definition: objects-inl.h:6657
static const int kContextOffset
Definition: objects.h:7523
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:3522
void SeqTwoByteStringSet(int index, uint16_t value)
Definition: objects-inl.h:3176
static PropertyAttributes GetElementAttribute(Handle< JSReceiver > object, uint32_t index)
Definition: objects-inl.h:6321
static Handle< Map > ExpectedTransitionTarget(Handle< Map > map)
Definition: objects-inl.h:1798
static Code * cast(Object *obj)
#define CAST_ACCESSOR(type)
Definition: objects-inl.h:83
const uint32_t kShortExternalStringMask
Definition: objects.h:643
#define WRITE_INT64_FIELD(p, offset, value)
Definition: objects-inl.h:1183
Handle< Object > get_as_handle(int index)
Definition: objects-inl.h:2198
bool AsArrayIndex(uint32_t *index)
Definition: objects-inl.h:6272
kInstanceClassNameOffset BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype, kHiddenPrototypeBit) BOOL_ACCESSORS(FunctionTemplateInfo
void set_compilation_state(CompilationState state)
Definition: objects-inl.h:5079
kSerializedDataOffset kPrototypeTemplateOffset kIndexedPropertyHandlerOffset kInstanceCallHandlerOffset internal_field_count
Definition: objects-inl.h:5034
Object * GetValue(int descriptor_number)
Definition: objects-inl.h:2646
static bool HasElement(Handle< JSReceiver > object, uint32_t index)
Definition: objects-inl.h:6356
static const int kPretenureMinimumCreated
Definition: objects.h:8262
BOOL_GETTER(SharedFunctionInfo, compiler_hints, optimization_disabled, kOptimizationDisabled) void SharedFunctionInfo
Definition: objects-inl.h:5241
static const int kSize
Definition: objects.h:7371
static Object ** RawField(HeapObject *obj, int offset)
Definition: objects-inl.h:1199
const int kIntSize
Definition: globals.h:263
TransitionArray * unchecked_transition_array()
Definition: objects-inl.h:4945
ConstantPoolArray * constant_pool()
Definition: objects-inl.h:4589
static Smi * cast(Object *object)
void set_literals(FixedArray *literals)
Definition: objects-inl.h:5643
static void IterateBody(HeapObject *obj, ObjectVisitor *v)
Definition: objects-inl.h:6843
static uint32_t Hash(Object *key)
Definition: objects-inl.h:6553
void ClearCodeCache(Heap *heap)
Definition: objects-inl.h:6600
Object ** GetDescriptorStartSlot(int descriptor_number)
Definition: objects-inl.h:2591
static const int kZeroHash
Definition: objects.h:8520
static const int kHeaderSize
Definition: objects.h:1891
FixedTypedArrayBase * EmptyFixedTypedArrayForMap(Map *map)
Definition: heap.cc:3813
Code * javascript_builtin_code(Builtins::JavaScript id)
Definition: objects-inl.h:5685
int GetInObjectPropertyOffset(int index)
Definition: objects-inl.h:1991
kInstanceClassNameOffset flag
Definition: objects-inl.h:5115
bool contains(byte *pc)
Definition: objects-inl.h:5892
int isnan(double x)
#define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size)
Definition: objects-inl.h:6100
Object * GetInternalField(int index)
Definition: objects-inl.h:1924
void set_dictionary_map(bool value)
Definition: objects-inl.h:4125
static void TransitionElementsKind(Handle< JSObject > object, ElementsKind to_kind)
Definition: objects.cc:12779
static const int kSize
Definition: objects.h:10077
bool has_instance_call_handler()
Definition: objects-inl.h:4157
void set_opt_count_and_bailout_reason(int value)
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
Definition: heap.cc:5198
void set_has_instance_call_handler()
Definition: objects-inl.h:4152
uint8_t byte
Definition: globals.h:185
GlobalObject * global_object()
Definition: contexts.h:388
Object * InObjectPropertyAtPut(int index, Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:2002
uint16_t ExternalTwoByteStringGet(int index)
Definition: objects-inl.h:3304
Map * GetTransition(int transition_index)
Definition: objects-inl.h:4840
MUST_USE_RESULT MaybeObject * AllocateHeapNumber(double value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:2969
static void SetContent(Handle< JSArray > array, Handle< FixedArrayBase > storage)
Definition: objects-inl.h:6641
static const int kFirstOffset
Definition: objects.h:9165
void IterateNextCodeLink(ObjectVisitor *v, int offset)
Definition: objects-inl.h:1396
void set_null(int index)
Definition: objects-inl.h:2404
ByteArray * unchecked_relocation_info()
Definition: objects-inl.h:5872
static PropertyAttributes GetLocalElementAttribute(Handle< JSReceiver > object, uint32_t index)
Definition: objects-inl.h:6376
void LookupTransition(JSObject *holder, Name *name, LookupResult *result)
Definition: objects-inl.h:2570
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:3572
static const int kKindOffset
Definition: objects.h:9498
V8_INLINE bool IsNull() const
Definition: v8.h:6247
static void NoWriteBarrierSet(FixedArray *array, int index, Object *value)
Definition: objects-inl.h:2384
#define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size)
Definition: objects-inl.h:6079
static const int kParentOffset
Definition: objects.h:9209
static Handle< String > ExpectedTransitionKey(Handle< Map > map)
Definition: objects-inl.h:1783
Object ** GetDescriptorEndSlot(int descriptor_number)
Definition: objects-inl.h:2596
void set_back_edges_patched_for_osr(bool value)
Definition: objects-inl.h:4524
static const int kTransitionSize
Definition: transitions.h:168
const uint64_t kHoleNanInt64
Definition: v8globals.h:458
#define READ_SHORT_FIELD(p, offset)
Definition: objects-inl.h:1186
#define FIELD_ADDR(p, offset)
Definition: objects-inl.h:1092
void set_opt_reenable_tries(int value)
Definition: objects-inl.h:5439
#define UNREACHABLE()
Definition: checks.h:52
static const int kStartPositionShift
Definition: objects.h:7207
void SetEntryCounts(int number_of_int64_entries, int number_of_code_ptr_entries, int number_of_heap_ptr_entries, int number_of_int32_entries)
Definition: objects-inl.h:2263
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:3553
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Definition: flags.cc:211
T * start() const
Definition: utils.h:426
Object * get_heap_ptr_entry(int index)
Definition: objects-inl.h:2299
static uint32_t ComputeUtf8Hash(Vector< const char > chars, uint32_t seed, int *utf16_length_out)
Definition: objects.cc:9214
bool IsUtf8EqualTo(Vector< const char > str, bool allow_prefix_match=false)
Definition: objects.cc:8972
static SeededNumberDictionary * cast(Object *obj)
Definition: objects.h:4104
static JSGlobalProxy * cast(Object *obj)
void Append(Descriptor *desc, const WhitenessWitness &)
Definition: objects-inl.h:2723
virtual void Validate(JSObject *obj)=0
void set_ic_total_count(int count)
Definition: objects-inl.h:6702
static const int kDescriptorLengthOffset
Definition: objects.h:3498
Vector< const Char > string_
Definition: objects-inl.h:482
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
MUST_USE_RESULT MaybeObject * SetValue(uint32_t index, Object *value)
Definition: objects-inl.h:3837
static const int kExponentShift
Definition: objects.h:1986
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:3591
static Cell * cast(Object *obj)
bool IsStringObjectWithCharacterAt(uint32_t index)
Definition: objects-inl.h:2091
static const int kValueOffset
Definition: objects.h:1971
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
Definition: flags.cc:665
const int kFailureTagSize
Definition: v8globals.h:63
String * GetUnderlying()
Definition: objects-inl.h:3036
const uint32_t kHoleNanUpper32
Definition: v8globals.h:454
static InlineCacheHolderFlag ExtractCacheHolderFromFlags(Flags flags)
Definition: objects-inl.h:4651
const int kDoubleSize
Definition: globals.h:266
void set_undefined(int index)
Definition: objects-inl.h:2394
void set_migration_target(bool value)
Definition: objects-inl.h:4172
static SlicedString * cast(Object *obj)
static const int kHashNotComputedMask
Definition: objects.h:8637
MUST_USE_RESULT MaybeObject * SetPrototypeTransitions(FixedArray *prototype_transitions)
Definition: objects-inl.h:4866
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:3667
static const int kDataIndex
Definition: objects.h:7928
static const int kDontAdaptArgumentsSentinel
Definition: objects.h:7098
int pre_allocated_property_fields()
Definition: objects-inl.h:3933
static uint32_t SeededHash(uint32_t key, uint32_t seed)
Definition: objects-inl.h:6506
static const int kScopeInfoOffset
Definition: objects.h:7105
void SetNumberOfProtoTransitions(int value)
Definition: objects.h:6144
#define WRITE_BARRIER(heap, object, offset, value)
Definition: objects-inl.h:1101
double get_int64_entry_as_double(int index)
Definition: objects-inl.h:2284
static ExtraICState ExtractExtraICStateFromFlags(Flags flags)
Definition: objects-inl.h:4641
#define HAS_SMI_TAG(value)
Definition: v8globals.h:379
virtual uint32_t Hash()
Definition: objects-inl.h:512
static uint32_t update(uint32_tprevious, intvalue)
Definition: utils.h:296
Context * native_context()
Definition: contexts.cc:67
void InitializeBody(int object_size)
Definition: objects-inl.h:2064
#define MAKE_STRUCT_PREDICATE(NAME, Name, name)
Definition: objects-inl.h:971
int64_t get_int64_entry(int index)
Definition: objects-inl.h:2278
static const int kFirstOffset
Definition: objects.h:3500
static Failure * RetryAfterGC()
Definition: objects-inl.h:1255
void IteratePointers(ObjectVisitor *v, int start, int end)
Definition: objects-inl.h:1385
int SeqTwoByteStringSize(InstanceType instance_type)
Definition: objects-inl.h:3182
Object * GetConstant(int descriptor_number)
Definition: objects-inl.h:2670
static const int kNotFound
Definition: transitions.h:145
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1278
void set_resource(const Resource *buffer)
Definition: objects-inl.h:3258
static Failure * cast(MaybeObject *object)
Definition: objects-inl.h:667
#define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE)
Definition: heap-inl.h:679
const uint32_t kIsIndirectStringMask
Definition: objects.h:622
void set_inlined_type_change_checksum(int checksum)
Definition: objects-inl.h:6750
#define READ_INT_FIELD(p, offset)
Definition: objects-inl.h:1156
static const int kMinValue
Definition: objects.h:1679
bool ToArrayIndex(uint32_t *index)
Definition: objects-inl.h:2072
int get_int(int index)
Definition: objects-inl.h:3493
MUST_USE_RESULT MaybeObject * ResetElements()
Definition: objects-inl.h:1755
int SeqOneByteStringSize(InstanceType instance_type)
Definition: objects-inl.h:3187
ElementsKind GetElementsKind()
Definition: objects-inl.h:5999
byte * instruction_start()
Definition: objects-inl.h:5857
static Handle< Map > GetElementsTransitionMap(Handle< JSObject > object, ElementsKind to_kind)
Definition: objects.cc:3313
const int kPointerSize
Definition: globals.h:268
#define TYPE_CHECKER(type, instancetype)
Definition: objects-inl.h:76
static Oddball * cast(Object *obj)
static Address & Address_at(Address addr)
Definition: v8memory.h:79
intptr_t OffsetFrom(T x)
Definition: utils.h:120
int GetInObjectPropertyOffset(int index)
Definition: objects-inl.h:3938
int GetFieldIndex(int descriptor_number)
Definition: objects-inl.h:2664
void VisitTwoByteString(const uint16_t *chars, unsigned length)
Definition: objects-inl.h:3437
static Handle< Map > FindTransitionToField(Handle< Map > map, Handle< Name > key)
Definition: objects-inl.h:1805
const int kHeapObjectTag
Definition: v8.h:5473
bool IsAligned(T value, U alignment)
Definition: utils.h:211
static PropertyAttributes GetElementAttributeWithHandler(Handle< JSProxy > proxy, Handle< JSReceiver > receiver, uint32_t index)
Definition: objects.cc:3766
static DependentCode * cast(Object *object)
virtual uint32_t Hash()
Definition: objects-inl.h:571
void set_inobject_properties(int value)
Definition: objects-inl.h:3998
unsigned safepoint_table_offset()
Definition: objects-inl.h:4484
Object * RawFastPropertyAt(int index)
Definition: objects-inl.h:1964
const uint16_t * ExternalTwoByteStringGetData(unsigned start)
Definition: objects-inl.h:3310
static const int kBackingStoreOffset
Definition: objects.h:9877
void set(int index, float value)
Definition: objects-inl.h:3672
#define WRITE_SHORT_FIELD(p, offset, value)
Definition: objects-inl.h:1189
bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind, ElementsKind to_kind)
AllocationSpace allocation_space() const
Definition: objects-inl.h:1232
STATIC_ASSERT(NUMBER_OF_KINDS<=16)
virtual uint32_t HashForObject(Object *other)
Definition: objects-inl.h:478
static const int kMaxRegularHeapObjectSize
Definition: spaces.h:820
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static MUST_USE_RESULT MaybeObject * Allocate(Heap *heap, int at_least_space_for, PretenureFlag pretenure=NOT_TENURED)
const uint32_t kOneByteDataHintMask
Definition: objects.h:638
static Handle< Object > GetOrCreateIdentityHash(Handle< JSReceiver > object)
Definition: objects-inl.h:6342
int Search(T *array, Name *name, int valid_entries)
Definition: objects-inl.h:2510
OneByteStringKey(Vector< const uint8_t > str, uint32_t seed)
Definition: objects-inl.h:490
bool IsTwoByteRepresentationUnderneath()
Definition: objects-inl.h:349
static FunctionTemplateInfo * cast(Object *obj)
static const int kIsNotArrayIndexMask
Definition: objects.h:8638
static PropertyAttributes GetPropertyAttribute(Handle< JSReceiver > object, Handle< Name > name)
Definition: objects-inl.h:6311
virtual bool IsMatch(Object *string)
bool IsOneByteRepresentationUnderneath()
Definition: objects-inl.h:333
virtual uint32_t HashForObject(Object *other)
Definition: objects-inl.h:579
ExternalArray * EmptyExternalArrayForMap(Map *map)
Definition: heap.cc:3807
int length() const
Definition: utils.h:420
static const int kFirstDeoptEntryIndex
Definition: objects.h:5056
static const int kPropertiesOffset
Definition: objects.h:2755
static const byte kUninitialized
Definition: objects.h:9508
static const int kStorage1Offset
Definition: objects.h:8209
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:3629
T RoundUp(T x, intptr_t m)
Definition: utils.h:144
static bool IsMatch(Name *key, Object *other)
Definition: objects-inl.h:6522
PretenureFlag GetPretenureMode()
Definition: objects.cc:12650
static PropertyAttributes GetPropertyAttributeWithReceiver(Handle< JSReceiver > object, Handle< JSReceiver > receiver, Handle< Name > name)
Definition: objects.cc:4327
bool IsTwoByteRepresentation()
Definition: objects-inl.h:327
static NameDictionary * cast(Object *obj)
Definition: objects.h:4049
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:3686
uint16_t ExternalAsciiStringGet(int index)
Definition: objects-inl.h:3272
static Code * GetCodeFromTargetAddress(Address address)
Definition: objects-inl.h:4662
bool is_inline_cache_stub()
Definition: objects-inl.h:4568
static const int kInObjectPropertiesOffset
Definition: objects.h:6450
bool IsFastSmiElementsKind(ElementsKind kind)
virtual MaybeObject * AsObject(Heap *heap)
Definition: objects-inl.h:583
const uint32_t kShortExternalStringTag
Definition: objects.h:644
void SetNumberOfOwnDescriptors(int number)
Definition: objects.h:6178
ElementsKind FastSmiToObjectElementsKind(ElementsKind from_kind)
static String * Operate(String *, unsigned *, int32_t *, unsigned *)
Definition: objects-inl.h:3316
void RemoveOptimizedFunction(JSFunction *function)
Definition: contexts.cc:298
MUST_USE_RESULT MaybeObject * FastPropertyAt(Representation representation, int index)
Definition: objects-inl.h:1954
void set_kind(byte kind)
Definition: objects-inl.h:1827
void NotifyLeafMapLayoutChange()
Definition: objects-inl.h:4221
void set(int index, uint8_t value)
Definition: objects-inl.h:3577
int BinarySearch(T *array, Name *name, int low, int high, int valid_entries)
Definition: objects-inl.h:2450
StringRepresentationTag
Definition: objects.h:616
HeapObject * UncheckedPrototypeTransitions()
Definition: objects-inl.h:4953
static int SizeFor(int length)
Definition: objects.h:3152
static const int kElementsOffset
Definition: objects.h:2756
void set_start_position_and_type(int value)
void set_resource(const Resource *buffer)
Definition: objects-inl.h:3291
static Handle< Object > MonomorphicArraySentinel(Isolate *isolate, ElementsKind elements_kind)
Definition: objects-inl.h:6685
PropertyDetails GetDetails(int descriptor_number)
Definition: objects-inl.h:2652
static const int kIrregexpCaptureCountIndex
Definition: objects.h:7956
Object ** GetFirstElementAddress()
Definition: objects-inl.h:1425
bool Is(Object *obj)
kSerializedDataOffset kPrototypeTemplateOffset kIndexedPropertyHandlerOffset kInstanceCallHandlerOffset kInternalFieldCountOffset DependentCode
Definition: objects-inl.h:5047
static uint32_t HashForObject(uint32_t key, Object *object)
Definition: objects-inl.h:6500
void set_type_feedback_info(Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:5825
unsigned back_edge_table_offset()
Definition: objects-inl.h:4501
BuiltinFunctionId builtin_function_id()
Definition: objects-inl.h:5400
MUST_USE_RESULT MaybeObject * Copy()
Definition: objects-inl.h:6663
const uint32_t kStringTag
Definition: objects.h:598
byte * relocation_start()
Definition: objects-inl.h:5877
InlineCacheState ic_state()
Definition: objects-inl.h:4308
void Reset(String *string, unsigned offset=0)
Definition: objects-inl.h:3403
V8_INLINE bool IsUndefined() const
Definition: v8.h:6229
static const int kSize
Definition: objects.h:9807
void set_construction_count(int value)
Definition: objects-inl.h:5224
double get_scalar(int index)
Definition: objects-inl.h:2173
MUST_USE_RESULT MaybeObject * Copy()
Definition: objects-inl.h:6669
static const int kTypeFeedbackInfoOffset
Definition: objects.h:5586
uint16_t ConsStringGet(int index)
Definition: objects.cc:8598
StringCharacterStream(String *string, ConsStringIteratorOp *op, unsigned offset=0)
Definition: objects-inl.h:3394
DescriptorLookupCache * descriptor_lookup_cache()
Definition: isolate.h:896
const uint32_t kInternalizedTag
Definition: objects.h:605
void set_map_no_write_barrier(Map *value)
Definition: objects-inl.h:1352
static const int kRelocationInfoOffset
Definition: objects.h:5582
MUST_USE_RESULT MaybeObject * GetProperty(Name *key)
Definition: objects-inl.h:1081
void set(int index, ElementType value)
Definition: objects-inl.h:3774
static const int kSimpleTransitionIndex
Definition: transitions.h:156
virtual bool IsMatch(Object *string)
Definition: objects-inl.h:493
static const int kSize
Definition: objects.h:9768
void SetTransition(int transition_index, Map *target)
Definition: objects-inl.h:4835
BailoutReason DisableOptimizationReason()
Definition: objects-inl.h:5455
virtual bool IsMatch(Object *string)
Definition: objects-inl.h:553
static int SizeFor(int length)
Definition: objects.h:3067
#define T(name, string, precedence)
Definition: token.cc:48
static MUST_USE_RESULT MaybeObject * Allocate(Isolate *isolate, int number_of_transitions)
Definition: transitions.cc:47
static TransitionArray * cast(Object *obj)
Context * context()
Definition: isolate.h:557
static const int kMaxLoopNestingMarker
Definition: objects.h:5578
static bool HasProperty(Handle< JSReceiver > object, Handle< Name > name)
Definition: objects-inl.h:6291
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static ElementsAccessor * ForKind(ElementsKind elements_kind)
Definition: elements.h:178
static SeqTwoByteString * cast(Object *obj)
bool HasTransitionArray()
Definition: objects-inl.h:4807
void SetDataAt(int index, Object *value)
Definition: objects-inl.h:5992
static bool IsMatch(Object *key, Object *other)
Definition: objects-inl.h:6547
static const int kHeaderSize
Definition: objects.h:3016
void set(int index, double value)
Definition: objects-inl.h:2207
void SetElementsKind(ElementsKind kind)
Definition: objects.h:8366
MUST_USE_RESULT MaybeObject * NumberFromDouble(double value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:3819
SimpleTransitionFlag
Definition: objects.h:282
static InlineCacheState ExtractICStateFromFlags(Flags flags)
Definition: objects-inl.h:4636
CompilationState compilation_state()
Definition: objects-inl.h:5075
static const int kSize
Definition: objects.h:7702
bool has_deoptimization_support()
Definition: objects-inl.h:4397
bool AsArrayIndex(uint32_t *index)
Definition: objects-inl.h:6267
static Kind ExtractKindFromFlags(Flags flags)
Definition: objects-inl.h:4631
static const int kMapOffset
Definition: objects.h:1890
bool has_named_interceptor()
Definition: objects.h:5898
void set_memento_found_count(int count)
Definition: objects-inl.h:1525
bool HasPrototypeTransitions()
Definition: objects-inl.h:4882
int32_t DoubleToInt32(double x)
bool is_the_hole(int index)
Definition: objects-inl.h:2133
void set_instance_type(InstanceType value)
Definition: objects-inl.h:4017
const uint32_t kIsNotStringMask
Definition: objects.h:597
virtual uint32_t HashForObject(Object *other)
Definition: objects-inl.h:523
static HeapNumber * cast(Object *obj)
#define CASE(name)
bool CanHaveMoreTransitions()
Definition: objects-inl.h:4819
static PropertyAttributes GetElementAttributeWithReceiver(Handle< JSObject > object, Handle< JSReceiver > receiver, uint32_t index, bool continue_search)
Definition: objects.cc:4398
NameDictionary * property_dictionary()
Definition: objects-inl.h:6142
byte get(int index)
Definition: objects-inl.h:3481
bool CanTransition()
Definition: objects.h:6357
static uint32_t HashSequentialString(const schar *chars, int length, uint32_t seed)
Definition: objects-inl.h:6258
void InitializeRepresentations(Representation representation)
Definition: objects-inl.h:2632
void set_value(double value)
Definition: objects-inl.h:1406
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
Definition: heap-inl.h:202
const uint32_t kNotInternalizedTag
Definition: objects.h:604
static const int kLengthOffset
Definition: objects.h:3015
static double nan_value()
void set_raw_kind_specific_flags1(int value)
Definition: objects-inl.h:4332
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Definition: objects-inl.h:6675
const int kSpaceTagMask
Definition: v8globals.h:194
void set_code_no_write_barrier(Code *code)
Definition: objects-inl.h:5538
uint32_t ComputeIntegerHash(uint32_t key, uint32_t seed)
Definition: utils.h:322
Handle< T > handle(T *t, Isolate *isolate)
Definition: handles.h:103
#define STRUCT_LIST(V)
Definition: objects.h:590
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
AccessorDescriptor * GetCallbacks(int descriptor_number)
Definition: objects-inl.h:2681
static const int kOptimizableOffset
Definition: objects.h:5608
void set_bit_field3(uint32_t bits)
Definition: objects-inl.h:4754
bool IsNumber() const
Definition: api.cc:2416
#define OBJECT_POINTER_ALIGN(value)
Definition: v8globals.h:386
Map * GetTarget(int transition_number)
const intptr_t kObjectAlignment
Definition: v8globals.h:44
bool IsWeakObjectInOptimizedCode(Object *object)
Definition: objects-inl.h:4679
static const int kHasNonInstancePrototype
Definition: objects.h:6468
void SetInternalField(int index, Object *value)
Definition: objects-inl.h:1933
PropertyType GetType(int descriptor_number)
Definition: objects-inl.h:2659
name_should_print_as_anonymous
Definition: objects-inl.h:5283
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:280
IncrementalMarking * incremental_marking()
Definition: heap.h:1781
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:2189
bool has_indexed_interceptor()
Definition: objects.h:5907
ElementsKind GetInitialFastElementsKind()
kInstanceClassNameOffset kNeedsAccessCheckBit kRemovePrototypeBit kIsExpressionBit kAllowLazyCompilation kUsesArguments kFormalParameterCountOffset PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, expected_nof_properties, kExpectedNofPropertiesOffset) PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo
void set_back_edge_table_offset(unsigned offset)
Definition: objects-inl.h:4508
void SeqOneByteStringSet(int index, uint16_t value)
Definition: objects-inl.h:3143
uint16_t uc16
Definition: globals.h:309
void SetNumberOfDescriptors(int number_of_descriptors)
Definition: objects-inl.h:2440
Object * GetBackPointer()
Definition: objects-inl.h:4791
virtual MaybeObject * AsObject(Heap *heap)
Definition: objects.cc:13761
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, InlineCacheHolderFlag holder=OWN_MAP)
Definition: objects-inl.h:4601
static const int kBitField3Offset
Definition: objects.h:6439
TwoByteStringKey(Vector< const uc16 > str, uint32_t seed)
Definition: objects-inl.h:550
Traits::ElementType ElementType
Definition: objects.h:4987
static const uint32_t kSignMask
Definition: objects.h:1980
static ConsString * VisitFlat(Visitor *visitor, String *string, int offset, int length, int32_t type)
Definition: objects-inl.h:3124
void set_bit_field(byte value)
Definition: objects-inl.h:4037
#define WRITE_INT32_FIELD(p, offset, value)
Definition: objects-inl.h:1177
static int SizeFor(int length)
Definition: objects.h:9118
static const int kSize
Definition: objects.h:7845
static uint32_t HashForObject(Name *key, Object *object)
Definition: objects-inl.h:6535
kSerializedDataOffset kPrototypeTemplateOffset kIndexedPropertyHandlerOffset instance_call_handler
Definition: objects-inl.h:5027
const int kSmiShiftSize
Definition: v8.h:5539
const uint32_t kOneByteDataHintTag
Definition: objects.h:639
static JSValue * cast(Object *obj)
Definition: objects-inl.h:5758
const int kSmiTagSize
Definition: v8.h:5479
const int kInt64Size
Definition: globals.h:265
static const int kHeaderSize
Definition: objects.h:5604
FunctionTemplateInfo * get_api_func_data()
Definition: objects-inl.h:5389
void set_back_pointer_storage(Object *back_pointer, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
void set_strict_mode(StrictMode strict_mode)
Definition: objects-inl.h:5271
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
Definition: objects-inl.h:1477
static void Visit(String *string, unsigned offset, Visitor &visitor, ConsOp &cons_op, int32_t type, unsigned length)
Definition: objects-inl.h:3048
#define TYPED_ARRAYS(V)
Definition: objects.h:4663
static Handle< T > null()
Definition: handles.h:80
bool is_undetectable()
Definition: objects.h:5921
#define WRITE_FIELD(p, offset, value)
Definition: objects-inl.h:1098
static const int kFullStringRepresentationMask
Definition: v8.h:5563
void MemsetPointer(T **dest, U *value, int counter)
Definition: v8utils.h:198
bool is_keyed_store_stub()
Definition: objects.h:5305
static int SizeFor(int number_of_int64_entries, int number_of_code_ptr_entries, int number_of_heap_ptr_entries, int number_of_int32_entries)
Definition: objects.h:3232
void set_major_key(int value)
Definition: objects-inl.h:4362
void Set(int index, uint16_t value)
Definition: objects-inl.h:3020
static void NoIncrementalWriteBarrierSet(FixedArray *array, int index, Object *value)
Definition: objects-inl.h:2370
bool IsFalse() const
Definition: api.cc:2352
void set_is_access_check_needed(bool access_check_needed)
Definition: objects-inl.h:4076
static const int kSize
Definition: objects.h:7788
static void Expand(Handle< JSArray > array, int minimum_size_of_backing_fixed_array)
Definition: objects.cc:11251
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
bool is_dictionary_map()
Definition: objects-inl.h:4132
static const byte kNull
Definition: objects.h:9505
void set_owns_descriptors(bool is_shared)
Definition: objects-inl.h:4142
const int kShortSize
Definition: globals.h:262
static PropertyAttributes GetLocalPropertyAttribute(Handle< JSReceiver > object, Handle< Name > name)
Definition: objects.cc:4384
InstanceType instance_type()
Definition: objects-inl.h:4012
static JSProxy * cast(Object *obj)
static const int kMaxFastProperties
Definition: objects.h:2747
static bool ShouldZapGarbage()
Definition: heap.h:1486
kSerializedDataOffset kPrototypeTemplateOffset kIndexedPropertyHandlerOffset kInstanceCallHandlerOffset kInternalFieldCountOffset ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count, kPretenureCreateCountOffset) ACCESSORS(AllocationSite
static HeapObject * FromAddress(Address address)
Definition: objects-inl.h:1369
static MUST_USE_RESULT MaybeObject * AsObject(Heap *heap, Object *key)
Definition: objects-inl.h:6566
kInstanceClassNameOffset kNeedsAccessCheckBit kRemovePrototypeBit kIsExpressionBit kAllowLazyCompilation kUsesArguments formal_parameter_count
Definition: objects-inl.h:5190
int NumberOfProtoTransitions()
Definition: objects.h:6137
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:3648
SequentialStringKey(Vector< const Char > string, uint32_t seed)
Definition: objects-inl.h:464
const uint32_t kOneByteStringTag
Definition: objects.h:611
Counters * counters()
Definition: isolate.h:859
void set(int index, byte value)
Definition: objects-inl.h:3487
Object ** slot_at(int i)
Definition: objects-inl.h:4272
void VisitOneByteString(const uint8_t *chars, unsigned length)
Definition: objects-inl.h:3429
PropertyDetails GetLastDescriptorDetails()
Definition: objects-inl.h:2555
static double canonical_not_the_hole_nan_as_double()
Definition: objects-inl.h:2166
void set(int index, Address value)
Definition: objects-inl.h:2313
#define INT_ACCESSORS(holder, name, offset)
Definition: objects-inl.h:97
const int kSmiTag
Definition: v8.h:5478
ElementType get_scalar(int index)
Definition: objects-inl.h:3757
bool IsTemplateFor(Object *object)
Definition: objects.cc:219
static FixedArray * cast(Object *obj)
bool end_
MUST_USE_RESULT MaybeObject * AllocateInternalizedStringFromUtf8(Vector< const char > str, int chars, uint32_t hash_field)
Definition: heap-inl.h:118
void AppendDescriptor(Descriptor *desc, const DescriptorArray::WhitenessWitness &)
Definition: objects-inl.h:4781
StringHasher(int length, uint32_t seed)
Definition: objects-inl.h:6173
static const int kHeaderSize
Definition: objects.h:2757
static Smi * set(Smi *smi, int bit_position, bool v)
Definition: objects.h:10811
bool IsCompatibleReceiver(Object *receiver)
Definition: objects-inl.h:6427
kSerializedDataOffset kPrototypeTemplateOffset kIndexedPropertyHandlerOffset kInstanceCallHandlerOffset kInternalFieldCountOffset dependent_code
Definition: objects-inl.h:5047
static Flags ComputeMonomorphicFlags(Kind kind, ExtraICState extra_ic_state=kNoExtraICState, InlineCacheHolderFlag holder=OWN_MAP, StubType type=NORMAL)
Definition: objects-inl.h:4616
static Handle< TransitionArray > AddTransition(Handle< Map > map, Handle< Name > key, Handle< Map > target, SimpleTransitionFlag flag)
Definition: objects.cc:2458
static HashTable * cast(Object *obj)
Definition: objects-inl.h:2929
void set_is_extensible(bool value)
Definition: objects-inl.h:4090
ElementsKind elements_kind()
Definition: objects.h:5945
void ClearTransitions(Heap *heap, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:4768
void set_is_shared(bool value)
Definition: objects-inl.h:4116
void set_is_external(bool value)
Definition: objects-inl.h:5921
const int kFailureTag
Definition: v8globals.h:62
static const int kKindSpecificFlags1Offset
Definition: objects.h:5593
void set_stub_info(int info)
Definition: objects-inl.h:5840
void set_attached_to_shared_function_info(bool value)
Definition: objects-inl.h:4103
bool IsOneByteRepresentation()
Definition: objects-inl.h:321
void set_stack_slots(unsigned slots)
Definition: objects-inl.h:4475
int ExtraICState
Definition: objects.h:310
double FastI2D(int x)
Definition: conversions.h:81
void AddOptimizedFunction(JSFunction *function)
Definition: contexts.cc:260
int OffsetOfElementAt(int index)
Definition: objects.h:3244
MUST_USE_RESULT MaybeObject * NumberFromInt32(int32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:272
const uint32_t kIsIndirectStringTag
Definition: objects.h:623
void SetEntry(int entry, Object *key, Object *value)
Definition: objects-inl.h:6465
kSerializedDataOffset kPrototypeTemplateOffset indexed_property_handler
Definition: objects-inl.h:5021
static const int kFlagsIndex
Definition: objects.h:7927
Object * GetCallbacksObject(int descriptor_number)
Definition: objects-inl.h:2675
static const uint32_t kHashBitMask
Definition: objects.h:8646
static const int kStringEncodingMask
Definition: v8.h:5564
void set_instance_size(int value)
Definition: objects-inl.h:3990
Object * get(int index)
Definition: objects-inl.h:2127
static const int kPrototypeOffset
Definition: objects.h:6427
bool IsFastHoleyElementsKind(ElementsKind kind)
#define BUILTIN(name)
Definition: builtins.cc:143
static Handle< Object > GetElement(Isolate *isolate, Handle< Object > object, uint32_t index)
Definition: objects-inl.h:1060
void set_formal_parameter_count(int value)
static uint32_t HashForObject(Object *key, Object *object)
Definition: objects-inl.h:6586
void set_javascript_builtin_code(Builtins::JavaScript id, Code *value)
Definition: objects-inl.h:5691
static const int kSize
Definition: objects.h:7527
ElementsAccessor * GetElementsAccessor()
Definition: objects-inl.h:6027
static const int kAttachedToSharedFunctionInfo
Definition: objects.h:6479
String * TryFlattenGetString(PretenureFlag pretenure=NOT_TENURED)
Definition: objects-inl.h:2986
HeapObject * obj
void set_bit_field2(byte value)
Definition: objects-inl.h:4047
static MUST_USE_RESULT MaybeObject * AsObject(Heap *heap, Object *key)
Definition: objects-inl.h:6594
kInstanceClassNameOffset kNeedsAccessCheckBit kRemovePrototypeBit kIsExpressionBit kAllowLazyCompilation uses_arguments
Definition: objects-inl.h:5137
void set_marked_for_deoptimization(bool flag)
Definition: objects-inl.h:4560
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric literals(0o77, 0b11)") DEFINE_bool(harmony_strings
static const int kHashShift
Definition: objects.h:8642
uint16_t get_scalar(int index)
Definition: objects-inl.h:3603
void set_finger_index(int finger_index)
Definition: objects-inl.h:3476
void set_map_word(MapWord map_word)
Definition: objects-inl.h:1362
static Name * cast(Object *obj)
bool has_debug_break_slots()
Definition: objects-inl.h:4412
static uint32_t Hash(Name *key)
Definition: objects-inl.h:6530
static const byte kNotBooleanMask
Definition: objects.h:9503
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:3832
int GetSortedKeyIndex(int descriptor_number)
Definition: objects-inl.h:2607
static const int kExternalTwoByteRepresentationTag
Definition: v8.h:5565
const int kFailureTypeTagMask
Definition: objects.h:1713
static const byte kFalse
Definition: objects.h:9501
void set_compilation_type(CompilationType type)
Definition: objects-inl.h:5071
Definition: objects.h:8475
Type type() const
Definition: objects-inl.h:1222
static Flags RemoveTypeFromFlags(Flags flags)
Definition: objects-inl.h:4656
kInstanceClassNameOffset kNeedsAccessCheckBit kRemovePrototypeBit kIsExpressionBit compiler_hints
Definition: objects-inl.h:5129
void set_visitor_id(int visitor_id)
Definition: objects-inl.h:3917
void set_should_be_freed(bool value)
Definition: objects-inl.h:5931
MUST_USE_RESULT MaybeObject * set_elements_transition_map(Map *transitioned_map)
Definition: objects-inl.h:4845
uint32_t hash_field()
Definition: objects-inl.h:2941
kSerializedDataOffset prototype_template
Definition: objects-inl.h:5016
kInstanceClassNameOffset kNeedsAccessCheckBit kRemovePrototypeBit kIsExpressionBit allows_lazy_compilation
Definition: objects-inl.h:5129
static int SizeFor(int length)
Definition: objects.h:9078
T Min(T a, T b)
Definition: utils.h:234
void set_property_attributes(PropertyAttributes attributes)
Definition: objects-inl.h:6422
signed short int16_t
Definition: unicode.cc:45
static const int kSize
Definition: objects.h:7638
void set_code(Code *code)
Definition: objects-inl.h:5527
static ConsString * cast(Object *obj)
void set_pretenure_decision(PretenureDecision decision)
Definition: objects.h:8314
void set_safepoint_table_offset(unsigned offset)
Definition: objects-inl.h:4491
static FixedArrayBase * cast(Object *object)
Definition: objects-inl.h:2121
bool is_compiled_optimizable()
Definition: objects-inl.h:4427
static V8_INLINE internal::Object * IntToSmi(int value)
Definition: v8.h:5611
void set_flags(Flags flags)
Definition: objects-inl.h:4297
static bool CanTrack(InstanceType type)
Definition: objects-inl.h:1500
static const int kMaxValue
Definition: objects.h:1681
bool IsMarkedForConcurrentOptimization()
Definition: objects-inl.h:5509
static const int kCodeCacheOffset
Definition: objects.h:6437
static const int kBitField2Offset
Definition: objects.h:6462
static const int kConstantPoolOffset
Definition: objects.h:5598
#define WRITE_DOUBLE_FIELD(p, offset, value)
Definition: objects-inl.h:1136
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
static const int kSize
Definition: objects.h:9975
static const int kNotFound
Definition: objects.h:3486
void set_non_instance_prototype(bool value)
Definition: objects-inl.h:4052
void set(int index, int8_t value)
Definition: objects-inl.h:3558
uint16_t SeqTwoByteStringGet(int index)
Definition: objects-inl.h:3170
Object ** GetValueSlot(int descriptor_number)
Definition: objects-inl.h:2640
static uint32_t SeededHashForObject(uint32_t key, uint32_t seed, Object *object)
Definition: objects-inl.h:6510
const int kCharSize
Definition: globals.h:261
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
Vector< const char > string_
Definition: objects-inl.h:590
static const byte kTrue
Definition: objects.h:9502
int LinearSearch(T *array, Name *name, int len, int valid_entries)
Definition: objects-inl.h:2487
static const int kExponentOffset
Definition: objects.h:1977
static MUST_USE_RESULT MaybeObject * AsObject(Heap *heap, uint32_t key)
Definition: objects-inl.h:6517
Address get_code_ptr_entry(int index)
Definition: objects-inl.h:2292
void SetSortedKey(int pointer, int descriptor_number)
Definition: objects-inl.h:2617
void set(int index, uint8_t value)
Definition: objects-inl.h:3527
void InitializeDescriptors(DescriptorArray *descriptors)
Definition: objects-inl.h:4744
AccessControl
Definition: v8.h:2165
void set_hash_field(uint32_t value)
Definition: objects-inl.h:2946
FixedArray * GetPrototypeTransitions()
Definition: objects-inl.h:4857
void set_allow_osr_at_loop_nesting_level(int level)
Definition: objects-inl.h:4448
ExtraICState extra_ic_state()
Definition: objects-inl.h:4320
static JSObject * cast(Object *obj)
uint32_t RoundUpToPowerOf2(uint32_t x)
Definition: utils.h:191
int64_t get_representation(int index)
Definition: objects-inl.h:2182
bool matches_inlined_type_change_checksum(int checksum)
Definition: objects-inl.h:6767
#define MAKE_STRUCT_CASE(NAME, Name, name)
Object * javascript_builtin(Builtins::JavaScript id)
Definition: objects-inl.h:5671
MarkCompactCollector * mark_compact_collector()
Definition: heap.h:1769
void set_raw_kind_specific_flags2(int value)
Definition: objects-inl.h:4337
static bool IsHashFieldComputed(uint32_t field)
Definition: objects-inl.h:6154
int Lookup(Map *source, Name *name)
Definition: heap.h:2811
int FastD2I(double x)
Definition: conversions.h:74
static int OffsetOfCodeWithId(Builtins::JavaScript id)
Definition: objects.h:7678
void set(int index, int32_t value)
Definition: objects-inl.h:3634
CompilationInfo * compilation_info_at(int i)
Definition: objects-inl.h:4256
PropertyDetails GetTargetDetails(int transition_number)
void set_initial_map(Map *value)
Definition: objects-inl.h:5588
bool IsFastDoubleElementsKind(ElementsKind kind)
void set_has_function_cache(bool flag)
Definition: objects-inl.h:4545
static const int kFirstIndex
Definition: objects.h:3490
void set_unused_property_fields(int value)
Definition: objects-inl.h:4027
bool is_keyed_load_stub()
Definition: objects.h:5303
const uint32_t kStringEncodingMask
Definition: objects.h:609
Name * GetKey(int descriptor_number)
Definition: objects-inl.h:2601
void LookupDescriptor(JSObject *holder, Name *name, LookupResult *result)
Definition: objects-inl.h:2560
uint16_t SeqOneByteStringGet(int index)
Definition: objects-inl.h:3137
static const int kIsExtensible
Definition: objects.h:6477
SMI_ACCESSORS(ConstantPoolArray, first_code_ptr_index, kFirstCodePointerIndexOffset) SMI_ACCESSORS(ConstantPoolArray
static const int kInstanceTypeOffset
Definition: objects.h:6459
static int ComputeCapacity(int at_least_space_for)
Definition: objects-inl.h:2783
static const int kPreAllocatedPropertyFieldsOffset
Definition: objects.h:6453
void InitializeBody(Map *map, Object *pre_allocated_value, Object *filler_value)
Definition: objects-inl.h:2014
static const int kStartPositionMask
Definition: objects.h:7208
void set_parent(String *parent, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:3197
static bool HasLocalProperty(Handle< JSReceiver >, Handle< Name > name)
Definition: objects-inl.h:6301