35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_
55 PropertyDetails::PropertyDetails(Smi* smi) {
56 value_ = smi->value();
60 Smi* PropertyDetails::AsSmi() {
65 PropertyDetails PropertyDetails::AsDeleted() {
66 Smi* smi =
Smi::FromInt(value_ | DeletedField::encode(1));
67 return PropertyDetails(smi);
71 #define TYPE_CHECKER(type, instancetype) \
72 bool Object::Is##type() { \
73 return Object::IsHeapObject() && \
74 HeapObject::cast(this)->map()->instance_type() == instancetype; \
78 #define CAST_ACCESSOR(type) \
79 type* type::cast(Object* object) { \
80 ASSERT(object->Is##type()); \
81 return reinterpret_cast<type*>(object); \
85 #define INT_ACCESSORS(holder, name, offset) \
86 int holder::name() { return READ_INT_FIELD(this, offset); } \
87 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
90 #define ACCESSORS(holder, name, type, offset) \
91 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
92 void holder::set_##name(type* value, WriteBarrierMode mode) { \
93 WRITE_FIELD(this, offset, value); \
94 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
99 #define ACCESSORS_TO_SMI(holder, name, offset) \
100 Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \
101 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
102 WRITE_FIELD(this, offset, value); \
107 #define SMI_ACCESSORS(holder, name, offset) \
108 int holder::name() { \
109 Object* value = READ_FIELD(this, offset); \
110 return Smi::cast(value)->value(); \
112 void holder::set_##name(int value) { \
113 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
117 #define BOOL_GETTER(holder, field, name, offset) \
118 bool holder::name() { \
119 return BooleanBit::get(field(), offset); \
123 #define BOOL_ACCESSORS(holder, field, name, offset) \
124 bool holder::name() { \
125 return BooleanBit::get(field(), offset); \
127 void holder::set_##name(bool value) { \
128 set_##field(BooleanBit::set(field(), offset, value)); \
133 return IsFixedArray() || IsFixedDoubleArray();
139 if (!this->IsJSObject())
return false;
142 if (!cons_obj->IsJSFunction())
return false;
146 for (
Object* type = fun->shared()->function_data();
147 type->IsFunctionTemplateInfo();
149 if (type == expected)
return true;
156 bool Object::IsSmi() {
161 bool Object::IsHeapObject() {
167 ASSERT(!this->IsFailure());
168 return (reinterpret_cast<intptr_t>(
this) &
kSmiTagMask) != 0;
176 return Object::IsHeapObject()
181 bool Object::IsSpecObject() {
182 return Object::IsHeapObject()
187 bool Object::IsSpecFunction() {
188 if (!Object::IsHeapObject())
return false;
194 bool Object::IsSymbol() {
195 if (!this->IsHeapObject())
return false;
206 bool Object::IsConsString() {
207 if (!IsString())
return false;
212 bool Object::IsSlicedString() {
213 if (!IsString())
return false;
218 bool Object::IsSeqString() {
219 if (!IsString())
return false;
224 bool Object::IsSeqAsciiString() {
225 if (!IsString())
return false;
226 return StringShape(
String::cast(
this)).IsSequential() &&
231 bool Object::IsSeqTwoByteString() {
232 if (!IsString())
return false;
233 return StringShape(
String::cast(
this)).IsSequential() &&
238 bool Object::IsExternalString() {
239 if (!IsString())
return false;
244 bool Object::IsExternalAsciiString() {
245 if (!IsString())
return false;
251 bool Object::IsExternalTwoByteString() {
252 if (!IsString())
return false;
259 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
262 StringShape::StringShape(
String* str)
263 : type_(str->map()->instance_type()) {
269 StringShape::StringShape(Map* map)
270 : type_(map->instance_type()) {
277 : type_(static_cast<uint32_t>(t)) {
283 bool StringShape::IsSymbol() {
341 bool StringShape::IsCons() {
346 bool StringShape::IsSliced() {
351 bool StringShape::IsIndirect() {
356 bool StringShape::IsExternal() {
361 bool StringShape::IsSequential() {
372 uint32_t StringShape::encoding_tag() {
377 uint32_t StringShape::full_representation_tag() {
389 bool StringShape::IsSequentialAscii() {
394 bool StringShape::IsSequentialTwoByte() {
399 bool StringShape::IsExternalAscii() {
410 bool StringShape::IsExternalTwoByte() {
421 ASSERT(0 <= index && index <= length_);
423 return static_cast<const byte*
>(start_)[index];
425 return static_cast<const uc16*
>(start_)[index];
431 return IsSmi() || IsHeapNumber();
440 if (!Object::IsHeapObject())
return false;
449 bool
Object::IsExternalArray() {
450 if (!Object::IsHeapObject())
469 bool MaybeObject::IsFailure() {
474 bool MaybeObject::IsRetryAfterGC() {
480 bool MaybeObject::IsOutOfMemory() {
486 bool MaybeObject::IsException() {
491 bool MaybeObject::IsTheHole() {
492 return !IsFailure() && ToObjectUnchecked()->IsTheHole();
498 return reinterpret_cast<Failure*
>(obj);
502 bool Object::IsJSReceiver() {
504 return IsHeapObject() &&
509 bool Object::IsJSObject() {
511 return IsHeapObject() &&
516 bool Object::IsJSProxy() {
517 if (!Object::IsHeapObject())
return false;
533 bool Object::IsDescriptorArray() {
534 return IsFixedArray();
538 bool Object::IsTransitionArray() {
539 return IsFixedArray();
543 bool Object::IsDeoptimizationInputData() {
545 if (!IsFixedArray())
return false;
552 if (length == 0)
return true;
555 return length >= 0 &&
560 bool Object::IsDeoptimizationOutputData() {
561 if (!IsFixedArray())
return false;
570 bool Object::IsTypeFeedbackCells() {
571 if (!IsFixedArray())
return false;
580 bool Object::IsContext() {
581 if (!Object::IsHeapObject())
return false;
584 return (map == heap->function_context_map() ||
585 map == heap->catch_context_map() ||
586 map == heap->with_context_map() ||
587 map == heap->native_context_map() ||
588 map == heap->block_context_map() ||
589 map == heap->module_context_map() ||
590 map == heap->global_context_map());
594 bool Object::IsNativeContext() {
595 return Object::IsHeapObject() &&
601 bool Object::IsScopeInfo() {
602 return Object::IsHeapObject() &&
612 return obj->IsJSFunction();
626 bool Object::IsStringWrapper() {
627 return IsJSValue() &&
JSValue::cast(
this)->value()->IsString();
634 bool
Object::IsBoolean() {
635 return IsOddball() &&
645 return obj->IsJSArray();
649 bool Object::IsHashTable() {
650 return Object::IsHeapObject() &&
656 bool Object::IsDictionary() {
657 return IsHashTable() &&
662 bool Object::IsSymbolTable() {
663 return IsHashTable() &&
this ==
668 bool Object::IsJSFunctionResultCache() {
669 if (!IsFixedArray())
return false;
671 int length =
self->length();
678 if (FLAG_verify_heap) {
679 reinterpret_cast<JSFunctionResultCache*
>(
this)->
680 JSFunctionResultCacheVerify();
687 bool Object::IsNormalizedMapCache() {
688 if (!IsFixedArray())
return false;
693 if (FLAG_verify_heap) {
694 reinterpret_cast<NormalizedMapCache*
>(
this)->NormalizedMapCacheVerify();
701 bool Object::IsCompilationCacheTable() {
702 return IsHashTable();
706 bool Object::IsCodeCacheHashTable() {
707 return IsHashTable();
711 bool Object::IsPolymorphicCodeCacheHashTable() {
712 return IsHashTable();
716 bool Object::IsMapCache() {
717 return IsHashTable();
721 bool Object::IsPrimitive() {
722 return IsOddball() || IsNumber() || IsString();
726 bool Object::IsJSGlobalProxy() {
727 bool result = IsHeapObject() &&
730 ASSERT(!result || IsAccessCheckNeeded());
735 bool Object::IsGlobalObject() {
736 if (!IsHeapObject())
return false;
748 bool Object::IsUndetectableObject() {
749 return IsHeapObject()
754 bool Object::IsAccessCheckNeeded() {
755 return IsHeapObject()
761 if (!IsHeapObject())
return false;
763 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
765 #undef MAKE_STRUCT_CASE
766 default:
return false;
771 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
772 bool Object::Is##Name() { \
773 return Object::IsHeapObject() \
774 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
777 #undef MAKE_STRUCT_PREDICATE
790 bool Object::IsTheHole() {
813 ?
static_cast<double>(
reinterpret_cast<Smi*
>(
this)->value())
814 : reinterpret_cast<HeapNumber*>(
this)->value();
824 if (IsSmi())
return this;
825 if (IsHeapNumber()) {
827 int int_value =
FastD2I(value);
852 ASSERT(!maybe->IsFailure());
854 maybe->ToObject(&result);
870 #define FIELD_ADDR(p, offset) \
871 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
873 #define READ_FIELD(p, offset) \
874 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
876 #define WRITE_FIELD(p, offset, value) \
877 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
879 #define WRITE_BARRIER(heap, object, offset, value) \
880 heap->incremental_marking()->RecordWrite( \
881 object, HeapObject::RawField(object, offset), value); \
882 if (heap->InNewSpace(value)) { \
883 heap->RecordWrite(object->address(), offset); \
886 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
887 if (mode == UPDATE_WRITE_BARRIER) { \
888 heap->incremental_marking()->RecordWrite( \
889 object, HeapObject::RawField(object, offset), value); \
890 if (heap->InNewSpace(value)) { \
891 heap->RecordWrite(object->address(), offset); \
895 #ifndef V8_TARGET_ARCH_MIPS
896 #define READ_DOUBLE_FIELD(p, offset) \
897 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
898 #else // V8_TARGET_ARCH_MIPS
901 static inline double read_double_field(
void* p,
int offset) {
906 c.u[0] = (*
reinterpret_cast<uint32_t*
>(
FIELD_ADDR(p, offset)));
907 c.u[1] = (*
reinterpret_cast<uint32_t*
>(
FIELD_ADDR(p, offset + 4)));
910 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
911 #endif // V8_TARGET_ARCH_MIPS
913 #ifndef V8_TARGET_ARCH_MIPS
914 #define WRITE_DOUBLE_FIELD(p, offset, value) \
915 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
916 #else // V8_TARGET_ARCH_MIPS
919 static inline void write_double_field(
void* p,
int offset,
926 (*
reinterpret_cast<uint32_t*
>(
FIELD_ADDR(p, offset))) = c.u[0];
927 (*
reinterpret_cast<uint32_t*
>(
FIELD_ADDR(p, offset + 4))) = c.u[1];
929 #define WRITE_DOUBLE_FIELD(p, offset, value) \
930 write_double_field(p, offset, value)
931 #endif // V8_TARGET_ARCH_MIPS
934 #define READ_INT_FIELD(p, offset) \
935 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
937 #define WRITE_INT_FIELD(p, offset, value) \
938 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
940 #define READ_INTPTR_FIELD(p, offset) \
941 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
943 #define WRITE_INTPTR_FIELD(p, offset, value) \
944 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
946 #define READ_UINT32_FIELD(p, offset) \
947 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
949 #define WRITE_UINT32_FIELD(p, offset, value) \
950 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
952 #define READ_INT64_FIELD(p, offset) \
953 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
955 #define WRITE_INT64_FIELD(p, offset, value) \
956 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
958 #define READ_SHORT_FIELD(p, offset) \
959 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
961 #define WRITE_SHORT_FIELD(p, offset, value) \
962 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
964 #define READ_BYTE_FIELD(p, offset) \
965 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
967 #define WRITE_BYTE_FIELD(p, offset, value) \
968 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
984 intptr_t tagged_value =
985 (
static_cast<intptr_t
>(
value) << smi_shift_bits) |
kSmiTag;
986 return reinterpret_cast<Smi*
>(tagged_value);
993 return reinterpret_cast<Smi*
>((value << smi_shift_bits) |
kSmiTag);
1034 intptr_t Failure::value()
const {
1035 return static_cast<intptr_t
>(
1051 Failure* Failure::Construct(Type type, intptr_t value) {
1064 #ifdef V8_TARGET_ARCH_X64
1079 bool result = (
static_cast<uintptr_t
>(value + 0x40000000
U) < 0x80000000U);
1081 ASSERT(result == in_range);
1086 MapWord MapWord::FromMap(
Map* map) {
1087 return MapWord(reinterpret_cast<uintptr_t>(map));
1091 Map* MapWord::ToMap() {
1092 return reinterpret_cast<Map*
>(value_);
1096 bool MapWord::IsForwardingAddress() {
1097 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1101 MapWord MapWord::FromForwardingAddress(HeapObject*
object) {
1103 return MapWord(reinterpret_cast<uintptr_t>(raw));
1107 HeapObject* MapWord::ToForwardingAddress() {
1108 ASSERT(IsForwardingAddress());
1114 void HeapObject::VerifyObjectField(
int offset) {
1118 void HeapObject::VerifySmiField(
int offset) {
1145 if (value !=
NULL) {
1188 v->VisitPointers(reinterpret_cast<Object**>(
FIELD_ADDR(
this, start)),
1189 reinterpret_cast<Object**>(
FIELD_ADDR(
this, end)));
1194 v->VisitPointer(reinterpret_cast<Object**>(
FIELD_ADDR(
this, offset)));
1230 for (
int i = 0; i <
length(); ++i) {
1231 Object* candidate = *current++;
1232 if (!candidate->IsSmi() && candidate != the_hole)
return false;
1277 Object* the_hole = heap->the_hole_value();
1278 for (uint32_t i = 0; i < count; ++i) {
1279 Object* current = *objects++;
1280 if (current == the_hole) {
1283 }
else if (!current->IsSmi()) {
1292 }
else if (is_holey) {
1301 if (target_kind != current_kind) {
1311 if (elements->
map() !=
GetHeap()->fixed_double_array_map()) {
1313 elements->
map() ==
GetHeap()->fixed_cow_array_map());
1326 for (uint32_t i = 0; i < length; ++i) {
1340 Map* current_map =
map();
1342 if (from_kind == to_kind)
return current_map;
1345 Object* maybe_array_maps = native_context->js_array_maps();
1346 if (maybe_array_maps->IsFixedArray()) {
1348 if (array_maps->
get(from_kind) == current_map) {
1349 Object* maybe_transitioned_map = array_maps->
get(to_kind);
1350 if (maybe_transitioned_map->IsMap()) {
1351 return Map::cast(maybe_transitioned_map);
1364 if (new_map !=
NULL) {
1372 ASSERT((
map()->has_fast_smi_or_object_elements() ||
1373 (value ==
GetHeap()->empty_fixed_array())) ==
1374 (value->
map() ==
GetHeap()->fixed_array_map() ||
1375 value->
map() ==
GetHeap()->fixed_cow_array_map()));
1377 (
map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
1388 void JSObject::initialize_properties() {
1395 ASSERT(
map()->has_fast_smi_or_object_elements() ||
1396 map()->has_fast_double_elements());
1405 if (!FLAG_smi_only_arrays) {
1410 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
1418 ASSERT(this->
map()->NumberOfOwnDescriptors() + 1 ==
1423 MaybeObject* maybe_properties = properties()->
CopySize(new_size);
1424 if (!maybe_properties->To(&new_properties))
return maybe_properties;
1425 set_properties(new_properties);
1434 if (!object->map()->HasTransitionArray())
return false;
1436 int transition = transitions->Search(*key);
1438 PropertyDetails target_details = transitions->GetTargetDetails(transition);
1439 if (target_details.type() !=
FIELD)
return false;
1440 if (target_details.attributes() !=
NONE)
return false;
1441 Handle<Map> target(transitions->GetTarget(transition));
1450 return map->instance_descriptors()->GetFieldIndex(last_added);
1468 Object* JSGlobalPropertyCell::value() {
1475 ASSERT(!val->IsJSGlobalPropertyCell());
1573 ASSERT(index < properties()->length());
1574 return properties()->get(index);
1587 ASSERT(index < properties()->length());
1588 properties()->set(index, value);
1626 Object* pre_allocated_value,
1628 ASSERT(!filler_value->IsHeapObject() ||
1630 ASSERT(!pre_allocated_value->IsHeapObject() ||
1634 if (filler_value != pre_allocated_value) {
1637 for (
int i = 0; i < pre_allocated; i++) {
1642 while (offset < size) {
1650 ASSERT(properties()->IsDictionary() ==
map()->is_dictionary_map());
1651 return !properties()->IsDictionary();
1670 return properties > limit;
1685 if (value < 0)
return false;
1689 if (IsHeapNumber()) {
1691 uint32_t uint_value =
static_cast<uint32_t
>(value);
1692 if (value == static_cast<double>(uint_value)) {
1693 *index = uint_value;
1702 if (!this->IsJSValue())
return false;
1705 if (!js_value->value()->IsString())
return false;
1708 if (index >= (uint32_t)str->
length())
return false;
1716 #if ENABLE_EXTRA_CHECKS
1725 FATAL(
"API call returned invalid object");
1727 #endif // ENABLE_EXTRA_CHECKS
1732 ASSERT(object->IsFixedArray() ||
object->IsFixedDoubleArray());
1744 return get(index) ==
GetHeap()->the_hole_value();
1751 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1785 map() !=
HEAP->fixed_array_map());
1794 map() !=
HEAP->fixed_array_map());
1801 return GetHeap()->the_hole_value();
1810 map() !=
HEAP->fixed_array_map());
1819 map() !=
HEAP->fixed_array_map());
1853 ASSERT(array->
map() !=
HEAP->raw_unchecked_fixed_cow_array_map());
1867 ASSERT(array->
map() !=
HEAP->raw_unchecked_fixed_cow_array_map());
1884 heap->undefined_value());
1911 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1941 this ==
HEAP->empty_descriptor_array());
1955 template<SearchMode search_mode,
typename T>
1957 uint32_t hash = name->
Hash();
1962 while (low != high) {
1963 int mid = (low + high) / 2;
1964 String* mid_name = array->GetSortedKey(mid);
1965 uint32_t mid_hash = mid_name->
Hash();
1967 if (mid_hash >= hash) {
1974 for (; low <= limit; ++low) {
1975 int sort_index = array->GetSortedKeyIndex(low);
1976 String* entry = array->GetKey(sort_index);
1977 if (entry->
Hash() != hash)
break;
1978 if (entry->
Equals(name)) {
1979 if (search_mode ==
ALL_ENTRIES || sort_index < valid_entries) {
1982 return T::kNotFound;
1986 return T::kNotFound;
1992 template<SearchMode search_mode,
typename T>
1994 uint32_t hash = name->
Hash();
1996 for (
int number = 0; number < len; number++) {
1997 int sorted_index = array->GetSortedKeyIndex(number);
1998 String* entry = array->GetKey(sorted_index);
1999 uint32_t current_hash = entry->
Hash();
2000 if (current_hash > hash)
break;
2001 if (current_hash == hash && entry->
Equals(name))
return sorted_index;
2004 ASSERT(len >= valid_entries);
2005 for (
int number = 0; number < valid_entries; number++) {
2006 String* entry = array->GetKey(number);
2007 uint32_t current_hash = entry->
Hash();
2008 if (current_hash == hash && entry->
Equals(name))
return number;
2011 return T::kNotFound;
2015 template<SearchMode search_mode,
typename T>
2018 SLOW_ASSERT(array->IsSortedNoDuplicates(valid_entries));
2023 int nof = array->number_of_entries();
2024 if (nof == 0)
return T::kNotFound;
2027 const int kMaxElementsForLinearSearch = 8;
2029 nof <= kMaxElementsForLinearSearch) ||
2031 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2032 return LinearSearch<search_mode>(array, name, nof, valid_entries);
2036 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
2041 return internal::Search<VALID_ENTRIES>(
this, name, valid_descriptors);
2045 int DescriptorArray::SearchWithCache(String* name, Map* map) {
2046 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2047 if (number_of_own_descriptors == 0)
return kNotFound;
2050 int number = cache->
Lookup(map, name);
2053 number =
Search(name, number_of_own_descriptors);
2054 cache->Update(map, name, number);
2063 LookupResult* result) {
2065 int number = descriptors->SearchWithCache(name,
this);
2067 result->DescriptorResult(holder, descriptors->
GetDetails(number), number);
2073 LookupResult* result) {
2076 int number = transition_array->
Search(name);
2078 return result->TransitionResult(holder, number);
2088 reinterpret_cast<HeapObject*>(
this),
2095 return String::cast(
get(ToKeyIndex(descriptor_number)));
2100 return GetDetails(descriptor_number).pointer();
2110 PropertyDetails details =
GetDetails(descriptor_index);
2111 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2118 reinterpret_cast<HeapObject*>(
this),
2125 return get(ToValueIndex(descriptor_number));
2131 Object* details =
get(ToDetailsIndex(descriptor_number));
2132 return PropertyDetails(
Smi::cast(details));
2142 return Descriptor::IndexFromValue(
GetValue(descriptor_number));
2153 return GetValue(descriptor_number);
2165 desc->Init(
GetKey(descriptor_number),
2176 ASSERT(desc->GetDetails().descriptor_index() <=
2178 ASSERT(desc->GetDetails().descriptor_index() > 0);
2181 ToKeyIndex(descriptor_number),
2184 ToValueIndex(descriptor_number),
2187 ToDetailsIndex(descriptor_number),
2188 desc->GetDetails().AsSmi());
2195 ASSERT(desc->GetDetails().descriptor_index() <=
2197 ASSERT(desc->GetDetails().descriptor_index() > 0);
2199 set(ToKeyIndex(descriptor_number), desc->GetKey());
2200 set(ToValueIndex(descriptor_number), desc->GetValue());
2201 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
2208 int enumeration_index = descriptor_number + 1;
2210 desc->SetEnumerationIndex(enumeration_index);
2211 Set(descriptor_number, desc, witness);
2213 uint32_t hash = desc->GetKey()->Hash();
2217 for (insertion = descriptor_number; insertion > 0; --insertion) {
2219 if (key->
Hash() <= hash)
break;
2229 int enumeration_index = descriptor_number + 1;
2231 desc->SetEnumerationIndex(enumeration_index);
2232 Set(descriptor_number, desc);
2234 uint32_t hash = desc->GetKey()->Hash();
2238 for (insertion = descriptor_number; insertion > 0; --insertion) {
2240 if (key->
Hash() <= hash)
break;
2248 void DescriptorArray::SwapSortedKeys(
int first,
int second) {
2256 : marking_(array->GetHeap()->incremental_marking()) {
2258 ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
2263 marking_->LeaveNoMarkingScope();
2267 template<
typename Shape,
typename Key>
2269 const int kMinCapacity = 32;
2271 if (capacity < kMinCapacity) {
2272 capacity = kMinCapacity;
2278 template<
typename Shape,
typename Key>
2285 template<
typename Shape,
typename Key>
2287 uint32_t capacity = Capacity();
2292 Object* element = KeyAt(entry);
2294 if (element == isolate->
heap()->raw_unchecked_undefined_value())
break;
2295 if (element != isolate->
heap()->raw_unchecked_the_hole_value() &&
2296 Shape::IsMatch(key, element))
return entry;
2297 entry = NextProbe(entry, count++, capacity);
2304 Object* max_index_object =
get(kMaxNumberKeyIndex);
2305 if (!max_index_object->IsSmi())
return false;
2307 (
Smi::cast(max_index_object)->
value() & kRequiresSlowElementsMask);
2311 ASSERT(!requires_slow_elements());
2312 Object* max_index_object =
get(kMaxNumberKeyIndex);
2313 if (!max_index_object->IsSmi())
return 0;
2314 uint32_t value =
static_cast<uint32_t
>(
Smi::cast(max_index_object)->
value());
2315 return value >> kRequiresSlowElementsTagSize;
2388 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2390 #undef MAKE_STRUCT_CAST
2393 template <
typename Shape,
typename Key>
2395 ASSERT(obj->IsHashTable());
2396 return reinterpret_cast<HashTable*
>(obj);
2406 uint32_t String::hash_field() {
2413 #if V8_HOST_ARCH_64_BIT
2420 if (other ==
this)
return true;
2421 if (StringShape(
this).IsSymbol() && StringShape(other).IsSymbol()) {
2424 return SlowEquals(other);
2429 if (!StringShape(
this).IsCons())
return this;
2432 return SlowTryFlatten(pretenure);
2437 MaybeObject* flat = TryFlatten(pretenure);
2438 Object* successfully_flattened;
2439 if (!flat->ToObject(&successfully_flattened))
return this;
2446 switch (StringShape(
this).full_representation_tag()) {
2472 ASSERT(StringShape(
this).IsSequential());
2474 return this->IsAsciiRepresentation()
2481 if (!StringShape(
this).IsCons())
return true;
2490 ASSERT(StringShape(
this).IsIndirect());
2506 static_cast<byte>(value));
2516 return reinterpret_cast<char*
>(GetCharsAddress());
2558 ASSERT(parent->IsSeqString() || parent->IsExternalString());
2611 if (is_short())
return;
2612 const char** data_field =
2613 reinterpret_cast<const char**
>(
FIELD_ADDR(
this, kResourceDataOffset));
2614 *data_field = resource()->data();
2620 *
reinterpret_cast<const Resource**
>(
2621 FIELD_ADDR(
this, kResourceOffset)) = resource;
2622 if (resource !=
NULL) update_data_cache();
2627 return resource()->data();
2633 return GetChars()[index];
2643 if (is_short())
return;
2646 *data_field = resource()->data();
2652 *
reinterpret_cast<const Resource**
>(
2653 FIELD_ADDR(
this, kResourceOffset)) = resource;
2654 if (resource !=
NULL) update_data_cache();
2659 return resource()->data();
2665 return GetChars()[index];
2671 return GetChars() + start;
2676 set_finger_index(kEntriesIndex);
2677 set_size(kEntriesIndex);
2682 int cache_size = size();
2686 cache_size - kEntriesIndex);
2741 return reinterpret_cast<uint8_t*
>(external_pointer());
2747 uint8_t* ptr = external_pixel_pointer();
2753 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2759 uint8_t* ptr = external_pixel_pointer();
2764 void* ExternalArray::external_pointer() {
2766 return reinterpret_cast<void*
>(ptr);
2770 void ExternalArray::set_external_pointer(
void* value,
WriteBarrierMode mode) {
2771 intptr_t ptr =
reinterpret_cast<intptr_t
>(value);
2778 int8_t* ptr =
static_cast<int8_t*
>(external_pointer());
2784 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2790 int8_t* ptr =
static_cast<int8_t*
>(external_pointer());
2797 uint8_t* ptr =
static_cast<uint8_t*
>(external_pointer());
2803 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2809 uint8_t* ptr =
static_cast<uint8_t*
>(external_pointer());
2822 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2841 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2873 uint32_t* ptr =
static_cast<uint32_t*
>(external_pointer());
2885 uint32_t* ptr =
static_cast<uint32_t*
>(external_pointer());
2892 float* ptr =
static_cast<float*
>(external_pointer());
2904 float* ptr =
static_cast<float*
>(external_pointer());
2911 double* ptr =
static_cast<double*
>(external_pointer());
2923 double* ptr =
static_cast<double*
>(external_pointer());
2934 ASSERT(0 <=
id &&
id < 256);
2966 reinterpret_cast<SeqAsciiString*>(
this)->
length());
2969 return reinterpret_cast<ByteArray*
>(
this)->ByteArraySize();
2972 return reinterpret_cast<FreeSpace*
>(
this)->size();
2976 reinterpret_cast<SeqTwoByteString*>(
this)->
length());
2980 reinterpret_cast<FixedDoubleArray*>(
this)->
length());
2983 return reinterpret_cast<Code*
>(
this)->CodeSize();
2990 ASSERT(0 <= value && value < 256);
2996 ASSERT(0 <= value && value < 256);
2997 WRITE_BYTE_FIELD(
this, kInObjectPropertiesOffset, static_cast<byte>(value));
3002 ASSERT(0 <= value && value < 256);
3004 kPreAllocatedPropertyFieldsOffset,
3005 static_cast<byte>(value));
3051 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
3053 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
3059 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
3064 set_bit_field3(FunctionWithPrototype::update(bit_field3(), value));
3069 return FunctionWithPrototype::decode(bit_field3());
3074 if (access_check_needed) {
3075 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
3077 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
3083 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
3089 set_bit_field2(bit_field2() | (1 << kIsExtensible));
3091 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
3096 return ((1 << kIsExtensible) & bit_field2()) != 0;
3102 set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
3104 set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
3109 return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
3114 set_bit_field3(IsShared::update(bit_field3(), value));
3119 return IsShared::decode(bit_field3());
3124 set_bit_field3(DictionaryMap::update(bit_field3(), value));
3129 return DictionaryMap::decode(bit_field3());
3144 set_bit_field3(OwnsDescriptors::update(bit_field3(), is_shared));
3149 return OwnsDescriptors::decode(bit_field3());
3156 ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
3157 ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
3158 ExtractArgumentsCountFromFlags(flags) >= 0);
3164 return ExtractKindFromFlags(
flags());
3173 ASSERT(is_inline_cache_stub() ||
3182 ASSERT(is_inline_cache_stub());
3183 return ExtractExtraICStateFromFlags(
flags());
3188 return ExtractTypeFromFlags(
flags());
3193 ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
3194 return ExtractArgumentsCountFromFlags(
flags());
3200 kind() == UNARY_OP_IC ||
3201 kind() == BINARY_OP_IC ||
3202 kind() == COMPARE_IC ||
3203 kind() == TO_BOOLEAN_IC);
3204 return StubMajorKeyField::decode(
3211 kind() == UNARY_OP_IC ||
3212 kind() == BINARY_OP_IC ||
3213 kind() == COMPARE_IC ||
3214 kind() == TO_BOOLEAN_IC);
3215 ASSERT(0 <= major && major < 256);
3217 int updated = StubMajorKeyField::update(previous, major);
3223 return kind() == STUB && IsPregeneratedField::decode(
flags());
3230 f =
static_cast<Flags>(IsPregeneratedField::update(f, value));
3250 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
3257 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
3265 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
3272 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
3280 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
3287 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
3300 ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
3319 ASSERT(kind() == OPTIMIZED_FUNCTION);
3320 return StackSlotsField::decode(
3326 CHECK(slots <= (1 << kStackSlotsBitCount));
3327 ASSERT(kind() == OPTIMIZED_FUNCTION);
3329 int updated = StackSlotsField::update(previous, slots);
3335 ASSERT(kind() == OPTIMIZED_FUNCTION);
3336 return SafepointTableOffsetField::decode(
3342 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
3343 ASSERT(kind() == OPTIMIZED_FUNCTION);
3346 int updated = SafepointTableOffsetField::update(previous, offset);
3353 return StackCheckTableOffsetField::decode(
3362 int updated = StackCheckTableOffsetField::update(previous, offset);
3368 ASSERT(is_call_stub() || is_keyed_call_stub());
3375 ASSERT(is_call_stub() || is_keyed_call_stub());
3381 ASSERT(is_unary_op_stub());
3382 return UnaryOpTypeField::decode(
3388 ASSERT(is_unary_op_stub());
3390 int updated = UnaryOpTypeField::update(previous, value);
3396 ASSERT(is_binary_op_stub());
3397 return BinaryOpTypeField::decode(
3403 ASSERT(is_binary_op_stub());
3405 int updated = BinaryOpTypeField::update(previous, value);
3411 ASSERT(is_binary_op_stub());
3412 return BinaryOpResultTypeField::decode(
3418 ASSERT(is_binary_op_stub());
3420 int updated = BinaryOpResultTypeField::update(previous, value);
3426 ASSERT(is_compare_ic_stub());
3427 return CompareStateField::decode(
3433 ASSERT(is_compare_ic_stub());
3435 int updated = CompareStateField::update(previous, value);
3441 ASSERT(is_compare_ic_stub());
3442 return CompareOperationField::decode(
3448 ASSERT(is_compare_ic_stub());
3450 int updated = CompareOperationField::update(previous, value);
3456 ASSERT(is_to_boolean_ic_stub());
3457 return ToBooleanStateField::decode(
3463 ASSERT(is_to_boolean_ic_stub());
3465 int updated = ToBooleanStateField::update(previous, value);
3472 return HasFunctionCacheField::decode(
3480 int updated = HasFunctionCacheField::update(previous, flag);
3486 Kind kind = this->kind();
3487 return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
3499 ASSERT(extra_ic_state == kNoExtraICState ||
3502 kind == KEYED_STORE_IC);
3504 int bits = KindField::encode(kind)
3505 | ICStateField::encode(ic_state)
3506 | TypeField::encode(type)
3507 | ExtraICStateField::encode(extra_ic_state)
3508 | (argc << kArgumentsCountShift)
3509 | CacheHolderField::encode(holder);
3510 return static_cast<Flags>(bits);
3519 return ComputeFlags(kind,
MONOMORPHIC, extra_ic_state, type, argc, holder);
3524 return KindField::decode(flags);
3529 return ICStateField::decode(flags);
3534 return ExtraICStateField::decode(flags);
3539 return TypeField::decode(flags);
3544 return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
3549 return CacheHolderField::decode(flags);
3555 return static_cast<Flags>(bits);
3576 Object* Map::prototype() {
3590 static MaybeObject* EnsureHasTransitionArray(Map* map) {
3591 TransitionArray* transitions;
3592 MaybeObject* maybe_transitions;
3593 if (!map->HasTransitionArray()) {
3595 if (!maybe_transitions->To(&transitions))
return maybe_transitions;
3596 transitions->set_back_pointer_storage(map->GetBackPointer());
3597 }
else if (!map->transitions()->IsFullTransitionArray()) {
3598 maybe_transitions = map->transitions()->ExtendToFullTransitionArray();
3599 if (!maybe_transitions->To(&transitions))
return maybe_transitions;
3603 map->set_transitions(transitions);
3614 for (
int i = 0; i < len; ++i) used_indices[i] =
false;
3618 for (
int i = 0; i < len; ++i) {
3619 int enum_index = descriptors->
GetDetails(i).descriptor_index() -
3620 PropertyDetails::kInitialIndex;
3621 ASSERT(0 <= enum_index && enum_index < len);
3622 ASSERT(!used_indices[enum_index]);
3623 used_indices[enum_index] =
true;
3627 set_instance_descriptors(descriptors);
3628 SetNumberOfOwnDescriptors(len);
3637 Object* back_pointer = GetBackPointer();
3643 WRITE_FIELD(
this, kTransitionsOrBackPointerOffset, back_pointer);
3645 heap,
this, kTransitionsOrBackPointerOffset, back_pointer, mode);
3652 int number_of_own_descriptors = NumberOfOwnDescriptors();
3654 descriptors->
Append(desc, witness);
3655 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
3661 if (object->IsDescriptorArray()) {
3664 ASSERT(object->IsMap() ||
object->IsUndefined());
3671 return HasTransitionArray() && transitions()->HasElementsTransition();
3677 return object->IsTransitionArray();
3682 return transitions()->elements_transition();
3687 if (!HasTransitionArray())
return true;
3697 if (HasTransitionArray())
return transitions()->CopyInsert(key, target);
3703 transitions()->SetTarget(transition_index, target);
3708 return transitions()->GetTarget(transition_index);
3713 MaybeObject* allow_elements = EnsureHasTransitionArray(
this);
3714 if (allow_elements->IsFailure())
return allow_elements;
3715 transitions()->set_elements_transition(transitioned_map);
3721 if (!HasTransitionArray())
return GetHeap()->empty_fixed_array();
3722 if (!transitions()->HasPrototypeTransitions()) {
3723 return GetHeap()->empty_fixed_array();
3725 return transitions()->GetPrototypeTransitions();
3730 MaybeObject* allow_prototype = EnsureHasTransitionArray(
this);
3731 if (allow_prototype->IsFailure())
return allow_prototype;
3733 if (HasPrototypeTransitions()) {
3734 ASSERT(GetPrototypeTransitions() != proto_transitions);
3735 ZapPrototypeTransitions();
3738 transitions()->SetPrototypeTransitions(proto_transitions);
3744 return HasTransitionArray() && transitions()->HasPrototypeTransitions();
3749 ASSERT(HasTransitionArray());
3755 void Map::set_transitions(TransitionArray* transition_array,
3759 CHECK(transitions() != transition_array);
3763 WRITE_FIELD(
this, kTransitionsOrBackPointerOffset, transition_array);
3765 GetHeap(),
this, kTransitionsOrBackPointerOffset, transition_array, mode);
3770 ASSERT(undefined->IsUndefined());
3771 WRITE_FIELD(
this, kTransitionsOrBackPointerOffset, undefined);
3777 ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
3778 (value->IsMap() && GetBackPointer()->IsUndefined()));
3780 if (object->IsTransitionArray()) {
3783 WRITE_FIELD(
this, kTransitionsOrBackPointerOffset, value);
3785 GetHeap(),
this, kTransitionsOrBackPointerOffset, value, mode);
3796 return transition_array;
3801 ASSERT(HasTransitionArray());
3802 ASSERT(unchecked_transition_array()->HasPrototypeTransitions());
3803 return unchecked_transition_array()->UncheckedPrototypeTransitions();
3812 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
3816 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
3822 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
3823 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
3824 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
3826 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
3827 kExpectedReceiverTypeOffset)
3830 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
3833 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
3834 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
3837 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
3838 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
3839 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
3840 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
3841 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
3844 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
3847 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
3850 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
3852 kPropertyAccessorsOffset)
3853 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
3854 kPrototypeTemplateOffset)
3855 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
3857 kNamedPropertyHandlerOffset)
3858 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
3859 kIndexedPropertyHandlerOffset)
3861 kInstanceTemplateOffset)
3862 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
3863 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
3864 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
3865 kInstanceCallHandlerOffset)
3867 kAccessCheckInfoOffset)
3871 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
3872 kInternalFieldCountOffset)
3875 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
3880 ACCESSORS(Script, name, Object, kNameOffset)
3881 ACCESSORS(Script,
id, Object, kIdOffset)
3884 ACCESSORS(Script, data, Object, kDataOffset)
3885 ACCESSORS(Script, context_data, Object, kContextOffset)
3890 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
3891 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
3893 kEvalFrominstructionsOffsetOffset)
3895 #ifdef ENABLE_DEBUGGER_SUPPORT
3896 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
3897 ACCESSORS(DebugInfo, original_code,
Code, kOriginalCodeIndex)
3899 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
3903 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
3904 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
3907 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
3908 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
3909 kOptimizedCodeMapOffset)
3910 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
3911 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
3913 kInstanceClassNameOffset)
3914 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
3915 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
3916 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
3917 ACCESSORS(SharedFunctionInfo, inferred_name,
String, kInferredNameOffset)
3918 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
3919 kThisPropertyAssignmentsOffset)
3920 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
3924 kHiddenPrototypeBit)
3925 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
3927 kNeedsAccessCheckBit)
3929 kReadOnlyPrototypeBit)
3936 has_only_simple_this_property_assignments,
3937 kHasOnlySimpleThisPropertyAssignments)
3941 kAllowLazyCompilation)
3944 allows_lazy_compilation_without_context,
3945 kAllowLazyCompilationWithoutContext)
3952 has_duplicate_parameters,
3953 kHasDuplicateParameters)
3956 #if V8_HOST_ARCH_32_BIT
3959 kFormalParameterCountOffset)
3961 kExpectedNofPropertiesOffset)
3962 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3964 kStartPositionAndTypeOffset)
3965 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
3967 kFunctionTokenPositionOffset)
3969 kCompilerHintsOffset)
3970 SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
3971 kThisPropertyAssignmentsCountOffset)
3972 SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
3973 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
3976 kStressDeoptCounterOffset)
3979 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
3980 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
3981 int holder::name() { \
3982 int value = READ_INT_FIELD(this, offset); \
3983 ASSERT(kHeapObjectTag == 1); \
3984 ASSERT((value & kHeapObjectTag) == 0); \
3985 return value >> 1; \
3987 void holder::set_##name(int value) { \
3988 ASSERT(kHeapObjectTag == 1); \
3989 ASSERT((value & 0xC0000000) == 0xC0000000 || \
3990 (value & 0xC0000000) == 0x000000000); \
3991 WRITE_INT_FIELD(this, \
3993 (value << 1) & ~kHeapObjectTag); \
3996 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
3997 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
3998 INT_ACCESSORS(holder, name, offset)
4003 formal_parameter_count,
4004 kFormalParameterCountOffset)
4007 expected_nof_properties,
4008 kExpectedNofPropertiesOffset)
4013 start_position_and_type,
4014 kStartPositionAndTypeOffset)
4017 function_token_position,
4018 kFunctionTokenPositionOffset)
4021 kCompilerHintsOffset)
4024 this_property_assignments_count,
4025 kThisPropertyAssignmentsCountOffset)
4030 stress_deopt_counter,
4031 kStressDeoptCounterOffset)
4041 ASSERT(0 <= value && value < 256);
4042 WRITE_BYTE_FIELD(
this, kConstructionCountOffset, static_cast<byte>(value));
4048 live_objects_may_exist,
4049 kLiveObjectsMayExist)
4052 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
4053 return initial_map() !=
GetHeap()->undefined_value();
4059 optimization_disabled,
4060 kOptimizationDisabled)
4063 void SharedFunctionInfo::set_optimization_disabled(
bool disable) {
4065 kOptimizationDisabled,
4069 if ((
code()->kind() == Code::FUNCTION) && disable) {
4070 code()->set_optimizable(
false);
4076 if (
code()->kind() != Code::FUNCTION)
return 0;
4077 return code()->profiler_ticks();
4097 this->language_mode() == language_mode ||
4101 hints, kStrictModeFunction, language_mode !=
CLASSIC_MODE);
4103 hints, kExtendedModeFunction, language_mode ==
EXTENDED_MODE);
4104 set_compiler_hints(hints);
4112 BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode,
4113 kExtendedModeFunction)
4114 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
4117 kNameShouldPrintAsAnonymous)
4118 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
4119 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
4120 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
4126 void SharedFunctionInfo::BeforeVisitingPointers() {
4127 if (IsInobjectSlackTrackingInProgress()) DetachInitialMap();
4137 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
4138 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
4140 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
4142 bool Script::HasValidSource() {
4143 Object* src = this->source();
4144 if (!src->IsString())
return true;
4146 if (!StringShape(src_str).IsExternal())
return true;
4158 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
4168 set_start_position_and_type((start_position << kStartPositionShift)
4179 return reinterpret_cast<Code*
>(
READ_FIELD(
this, kCodeOffset));
4189 ScopeInfo* SharedFunctionInfo::scope_info() {
4190 return reinterpret_cast<ScopeInfo*
>(
READ_FIELD(
this, kScopeInfoOffset));
4194 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
4196 WRITE_FIELD(
this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
4200 reinterpret_cast<Object*>(value),
4207 Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
4212 return function_data()->IsFunctionTemplateInfo();
4223 return function_data()->IsSmi();
4228 ASSERT(HasBuiltinFunctionId());
4240 set_compiler_hints(hints | ((code_age & kCodeAgeMask) << kCodeAgeShift));
4245 return ICAgeBits::decode(counters());
4250 set_counters(ICAgeBits::update(counters(), ic_age));
4255 return DeoptCountBits::decode(counters());
4260 set_counters(DeoptCountBits::update(counters(), deopt_count));
4265 int value = counters();
4266 int deopt_count = DeoptCountBits::decode(value);
4267 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
4268 set_counters(DeoptCountBits::update(value, deopt_count));
4273 return OptReenableTriesBits::decode(counters());
4278 set_counters(OptReenableTriesBits::update(counters(), tries));
4283 Code* code = this->
code();
4289 int tries = opt_reenable_tries();
4290 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
4293 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
4294 set_optimization_disabled(
false);
4297 code()->set_optimizable(
true);
4303 return context()->global_object()->IsJSBuiltinsObject();
4308 return shared()->formal_parameter_count() !=
4314 return code()->kind() == Code::OPTIMIZED_FUNCTION;
4319 return code()->kind() == Code::FUNCTION &&
code()->optimizable();
4336 Builtins::kInRecompileQueue);
4346 return reinterpret_cast<Code*
>(
4363 bool was_optimized = IsOptimized();
4364 bool is_optimized = code->
kind() == Code::OPTIMIZED_FUNCTION;
4370 if (!was_optimized && is_optimized) {
4371 context()->native_context()->AddOptimizedFunction(
this);
4373 if (was_optimized && !is_optimized) {
4374 context()->native_context()->RemoveOptimizedFunction(
this);
4390 return reinterpret_cast<SharedFunctionInfo*
>(
4391 READ_FIELD(
this, kSharedFunctionInfoOffset));
4396 ASSERT(value->IsUndefined() || value->IsContext());
4402 kPrototypeOrInitialMapOffset)
4405 Map* JSFunction::initial_map() {
4406 return Map::cast(prototype_or_initial_map());
4411 set_prototype_or_initial_map(value);
4417 Context* native_context = context()->native_context();
4418 Object* array_function =
4420 if (array_function->IsJSFunction() &&
4425 MaybeObject* maybe_maps =
4428 if (!maybe_maps->To(&maps))
return maybe_maps;
4430 Map* current_map = initial_map;
4433 maps->
set(kind, current_map);
4438 MaybeObject* maybe_new_map =
4440 if (!maybe_new_map->To(&new_map))
return maybe_new_map;
4441 maps->
set(next_kind, new_map);
4442 current_map = new_map;
4444 native_context->set_js_array_maps(maps);
4446 set_initial_map(initial_map);
4452 return prototype_or_initial_map()->IsMap();
4457 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
4467 ASSERT(has_instance_prototype());
4468 if (has_initial_map())
return initial_map()->prototype();
4471 return prototype_or_initial_map();
4479 if (
map()->has_non_instance_prototype())
return map()->constructor();
4480 return instance_prototype();
4495 ASSERT(!shared()->bound());
4496 return literals_or_bindings();
4501 ASSERT(!shared()->bound());
4502 set_literals_or_bindings(literals);
4507 ASSERT(shared()->bound());
4508 return literals_or_bindings();
4513 ASSERT(shared()->bound());
4517 bindings->
map() ==
GetHeap()->fixed_cow_array_map());
4518 set_literals_or_bindings(bindings);
4523 ASSERT(!shared()->bound());
4524 return literals()->length();
4529 ASSERT(
id < kJSBuiltinsCount);
4530 return READ_FIELD(
this, OffsetOfFunctionWithId(
id));
4536 ASSERT(
id < kJSBuiltinsCount);
4537 WRITE_FIELD(
this, OffsetOfFunctionWithId(
id), value);
4543 ASSERT(
id < kJSBuiltinsCount);
4550 ASSERT(
id < kJSBuiltinsCount);
4559 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
4562 void JSProxy::InitializeBody(
int object_size, Object* value) {
4573 ACCESSORS(JSWeakMap, next, Object, kNextOffset)
4586 ACCESSORS(JSModule, context, Object, kContextOffset)
4590 JSModule* JSModule::
cast(Object* obj) {
4591 ASSERT(obj->IsJSModule());
4593 return reinterpret_cast<JSModule*
>(obj);
4597 ACCESSORS(JSValue, value, Object, kValueOffset)
4600 JSValue* JSValue::
cast(Object* obj) {
4601 ASSERT(obj->IsJSValue());
4603 return reinterpret_cast<JSValue*
>(obj);
4607 ACCESSORS(JSDate, value, Object, kValueOffset)
4608 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
4609 ACCESSORS(JSDate, year, Object, kYearOffset)
4610 ACCESSORS(JSDate, month, Object, kMonthOffset)
4611 ACCESSORS(JSDate, day, Object, kDayOffset)
4612 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
4613 ACCESSORS(JSDate, hour, Object, kHourOffset)
4614 ACCESSORS(JSDate, min, Object, kMinOffset)
4615 ACCESSORS(JSDate, sec, Object, kSecOffset)
4618 JSDate* JSDate::
cast(Object* obj) {
4621 return reinterpret_cast<JSDate*
>(obj);
4625 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
4627 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
4628 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
4629 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
4630 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
4631 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
4634 JSMessageObject* JSMessageObject::
cast(Object* obj) {
4635 ASSERT(obj->IsJSMessageObject());
4637 return reinterpret_cast<JSMessageObject*
>(obj);
4641 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
4643 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
4644 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
4645 ACCESSORS(Code, type_feedback_info, Object, kTypeFeedbackInfoOffset)
4646 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
4655 return instruction_start() + instruction_size();
4665 return reinterpret_cast<FixedArray*
>(
4666 READ_FIELD(
this, kDeoptimizationDataOffset));
4676 return unchecked_relocation_info()->GetDataStartAddress();
4681 return unchecked_relocation_info()->length();
4686 return instruction_start();
4701 JSRegExp::
Type JSRegExp::TypeTag() {
4702 Object* data = this->data();
4716 switch (TypeTag()) {
4729 ASSERT(this->data()->IsFixedArray());
4730 Object* data = this->data();
4737 ASSERT(this->data()->IsFixedArray());
4738 Object* data = this->data();
4745 ASSERT(TypeTag() != NOT_COMPILED);
4751 FixedArray* fa =
reinterpret_cast<FixedArray*
>(data());
4758 ASSERT(TypeTag() != NOT_COMPILED);
4759 ASSERT(index >= kDataIndex);
4765 ASSERT(index >= kDataIndex);
4766 FixedArray* fa =
reinterpret_cast<FixedArray*
>(data());
4767 if (value->IsSmi()) {
4781 Map* map = fixed_array->
map();
4783 (map ==
GetHeap()->fixed_array_map() ||
4784 map ==
GetHeap()->fixed_cow_array_map())) ||
4786 (fixed_array->IsFixedDoubleArray() ||
4787 fixed_array ==
GetHeap()->empty_fixed_array())) ||
4789 fixed_array->IsFixedArray() &&
4790 fixed_array->IsDictionary()) ||
4793 (elements()->IsFixedArray() && elements()->
length() >= 2));
4842 return array->IsExternalArray();
4846 #define EXTERNAL_ELEMENTS_CHECK(name, type) \
4847 bool JSObject::HasExternal##name##Elements() { \
4848 HeapObject* array = elements(); \
4849 ASSERT(array != NULL); \
4850 if (!array->IsHeapObject()) \
4852 return array->map()->instance_type() == type; \
4871 bool JSObject::HasNamedInterceptor() {
4882 ASSERT(HasFastSmiOrObjectElements());
4885 if (elems->
map() != isolate->
heap()->fixed_cow_array_map())
return elems;
4886 Object* writable_elems;
4888 elems, isolate->
heap()->fixed_array_map());
4889 if (!maybe_writable_elems->ToObject(&writable_elems)) {
4890 return maybe_writable_elems;
4894 isolate->
counters()->cow_arrays_converted()->Increment();
4895 return writable_elems;
4900 ASSERT(!HasFastProperties());
4906 ASSERT(HasDictionaryElements());
4911 bool String::IsHashFieldComputed(uint32_t field) {
4912 return (field & kHashNotComputedMask) == 0;
4917 return IsHashFieldComputed(hash_field());
4923 uint32_t field = hash_field();
4924 if (IsHashFieldComputed(field))
return field >> kHashShift;
4926 return ComputeAndSetHash();
4932 raw_running_hash_(seed),
4934 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
4935 is_first_char_(
true) {
4936 ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
4945 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint32_t c) {
4947 running_hash += (running_hash << 10);
4948 running_hash ^= (running_hash >> 6);
4949 return running_hash;
4953 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
4954 running_hash += (running_hash << 3);
4955 running_hash ^= (running_hash >> 11);
4956 running_hash += (running_hash << 15);
4960 return running_hash;
4971 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
4973 if (is_array_index_) {
4974 if (c < '0' || c >
'9') {
4975 is_array_index_ =
false;
4978 if (is_first_char_) {
4979 is_first_char_ =
false;
4980 if (c ==
'0' && length_ > 1) {
4981 is_array_index_ =
false;
4985 if (array_index_ > 429496729
U - ((d + 2) >> 3)) {
4986 is_array_index_ =
false;
4988 array_index_ = array_index_ * 10 + d;
5001 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
5005 uint32_t StringHasher::GetHash() {
5008 return GetHashCore(raw_running_hash_);
5012 template <
typename s
char>
5020 for (; i < length; i++) {
5033 return SlowAsArrayIndex(index);
5038 return map()->prototype();
5043 return map()->constructor();
5070 if (IsJSGlobalProxy()) {
5072 if (proto->IsNull())
return GetHeap()->undefined_value();
5073 ASSERT(proto->IsJSGlobalObject());
5126 return AttributesField::decode(static_cast<uint32_t>(
flag()->value()));
5131 set_flag(
Smi::FromInt(AttributesField::update(
flag()->value(), attributes)));
5136 Object* function_template = expected_receiver_type();
5137 if (!function_template->IsFunctionTemplateInfo())
return true;
5142 template<
typename Shape,
typename Key>
5146 SetEntry(entry, key, value, PropertyDetails(
Smi::FromInt(0)));
5150 template<
typename Shape,
typename Key>
5154 PropertyDetails details) {
5155 ASSERT(!key->IsString() ||
5156 details.IsDeleted() ||
5157 details.dictionary_index() > 0);
5168 ASSERT(other->IsNumber());
5169 return key ==
static_cast<uint32_t
>(other->
Number());
5180 ASSERT(other->IsNumber());
5191 ASSERT(other->IsNumber());
5196 return Isolate::Current()->heap()->NumberFromUint32(key);
5223 template <
int entrysize>
5229 template <
int entrysize>
5232 return Smi::cast(maybe_hash->ToObjectChecked())->value();
5236 template <
int entrysize>
5240 return Smi::cast(maybe_hash->ToObjectChecked())->value();
5244 template <
int entrysize>
5263 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
5264 if (elts->
length() < required_size) {
5267 Expand(required_size + (required_size >> 3));
5269 }
else if (!
GetHeap()->new_space()->Contains(elts) &&
5270 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
5273 Expand(required_size);
5285 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
5294 if (maybe_result->IsFailure())
return maybe_result;
5297 ((storage->
map() !=
GetHeap()->fixed_double_array_map()) &&
5301 set_elements(storage);
5308 if (
length() == 0)
return this;
5314 if (
length() == 0)
return this;
5330 set(index * 2, cell);
5340 return isolate->
factory()->the_hole_value();
5345 return isolate->
factory()->undefined_value();
5350 return heap->raw_unchecked_the_hole_value();
5356 return ICTotalCountField::decode(current);
5362 value = ICTotalCountField::update(value,
5363 ICTotalCountField::decode(count));
5370 return ICsWithTypeInfoCountField::decode(current);
5376 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
5382 if (new_count >= 0) {
5383 new_count &= ICsWithTypeInfoCountField::kMask;
5384 value = ICsWithTypeInfoCountField::update(value, new_count);
5398 int checksum = OwnTypeChangeChecksum::decode(value);
5399 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
5400 value = OwnTypeChangeChecksum::update(value, checksum);
5410 int mask = (1 << kTypeChangeChecksumBits) - 1;
5411 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
5421 return OwnTypeChangeChecksum::decode(value);
5427 int mask = (1 << kTypeChangeChecksumBits) - 1;
5428 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
5433 kTypeFeedbackCellsOffset)
5439 Relocatable::Relocatable(
Isolate* isolate) {
5440 ASSERT(isolate == Isolate::Current());
5442 prev_ = isolate->relocatable_top();
5443 isolate->set_relocatable_top(
this);
5447 Relocatable::~Relocatable() {
5448 ASSERT(isolate_ == Isolate::Current());
5449 ASSERT_EQ(isolate_->relocatable_top(),
this);
5450 isolate_->set_relocatable_top(prev_);
5460 v->VisitExternalReference(
5461 reinterpret_cast<Address*>(
FIELD_ADDR(
this, kForeignAddressOffset)));
5465 template<
typename StaticVisitor>
5467 StaticVisitor::VisitExternalReference(
5468 reinterpret_cast<Address*>(
FIELD_ADDR(
this, kForeignAddressOffset)));
5474 v->VisitExternalAsciiString(
5475 reinterpret_cast<Resource**>(
FIELD_ADDR(
this, kResourceOffset)));
5479 template<
typename StaticVisitor>
5482 StaticVisitor::VisitExternalAsciiString(
5483 reinterpret_cast<Resource**>(
FIELD_ADDR(
this, kResourceOffset)));
5489 v->VisitExternalTwoByteString(
5490 reinterpret_cast<Resource**>(
FIELD_ADDR(
this, kResourceOffset)));
5494 template<
typename StaticVisitor>
5497 StaticVisitor::VisitExternalTwoByteString(
5498 reinterpret_cast<Resource**>(
FIELD_ADDR(
this, kResourceOffset)));
5502 template<
int start_offset,
int end_offset,
int size>
5511 template<
int start_offset>
5521 #undef CAST_ACCESSOR
5522 #undef INT_ACCESSORS
5524 #undef ACCESSORS_TO_SMI
5525 #undef SMI_ACCESSORS
5527 #undef BOOL_ACCESSORS
5531 #undef WRITE_BARRIER
5532 #undef CONDITIONAL_WRITE_BARRIER
5533 #undef READ_DOUBLE_FIELD
5534 #undef WRITE_DOUBLE_FIELD
5535 #undef READ_INT_FIELD
5536 #undef WRITE_INT_FIELD
5537 #undef READ_INTPTR_FIELD
5538 #undef WRITE_INTPTR_FIELD
5539 #undef READ_UINT32_FIELD
5540 #undef WRITE_UINT32_FIELD
5541 #undef READ_SHORT_FIELD
5542 #undef WRITE_SHORT_FIELD
5543 #undef READ_BYTE_FIELD
5544 #undef WRITE_BYTE_FIELD
5549 #endif // V8_OBJECTS_INL_H_
static int SizeOf(Map *map, HeapObject *object)
MUST_USE_RESULT MaybeObject * GetElementWithReceiver(Object *receiver, uint32_t index)
bool FLAG_enable_slow_asserts
#define WRITE_BYTE_FIELD(p, offset, value)
float get_scalar(int index)
Object * unchecked_first()
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset eval_from_instructions_offset
#define HAS_FAILURE_TAG(value)
uint8_t get_scalar(int index)
bool prohibits_overwriting()
void TryReenableOptimization()
void SetBackPointer(Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode)
void set_compare_state(byte value)
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
static bool IsMatch(uint32_t key, Object *other)
Address GetCharsAddress()
void set_prohibits_overwriting(bool value)
void set_null_unchecked(Heap *heap, int index)
Code * builtin(Name name)
PropertyAttributes GetPropertyAttribute(String *name)
void set_deopt_count(int value)
JSGlobalPropertyCell * Cell(int index)
#define SLOW_ASSERT(condition)
int allow_osr_at_loop_nesting_level()
const intptr_t kSmiTagMask
static const int kExternalAsciiRepresentationTag
static bool is_the_hole_nan(double value)
V8EXPORT bool IsTrue() const
FixedArray * function_bindings()
bool has_instance_prototype()
bool HasElementsTransition()
static const int kEntries
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit read_only_prototype
static int EntryToIndex(int entry)
static ByteArray * FromDataStartAddress(Address address)
void AddCharacter(uint32_t c)
void set_all_can_write(bool value)
Object * DataAtUnchecked(int index)
int inobject_properties()
void set_has_deoptimization_support(bool value)
static uint32_t Hash(uint32_t key)
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
void set(int index, Object *value)
int GetInternalFieldOffset(int index)
void AddSurrogatePair(uc32 c)
static bool get(Smi *smi, int bit_position)
void RecordWrite(Address address, int offset)
#define ASSERT_TAG_ALIGNED(address)
void set_all_can_read(bool value)
FixedArray * unchecked_deoptimization_data()
void set_function_with_prototype(bool value)
static double hole_nan_as_double()
bool InNewSpace(Object *object)
static const int kTransitionsOrBackPointerOffset
static String * cast(Object *obj)
#define READ_DOUBLE_FIELD(p, offset)
#define READ_INTPTR_FIELD(p, offset)
MaybeObject * TryFlatten(PretenureFlag pretenure=NOT_TENURED)
static MUST_USE_RESULT MaybeObject * Allocate(int number_of_transitions)
const uint32_t kTwoByteStringTag
const int kFailureTypeTagSize
static const uint32_t kExponentMask
void set_language_mode(LanguageMode language_mode)
bool function_with_prototype()
static DescriptorArray * cast(Object *obj)
static Failure * InternalError()
static int SizeOf(Map *map, HeapObject *object)
void set_unary_op_type(byte value)
int unused_property_fields()
void set_length(Smi *length)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check, kNeedsAccessCheckBit) BOOL_ACCESSORS(FunctionTemplateInfo
void set_javascript_builtin(Builtins::JavaScript id, Object *value)
Object * InObjectPropertyAt(int index)
static const int kStorage2Offset
static Smi * FromInt(int value)
void set_code_age(int age)
bool HasFastSmiElements()
bool IsFastObjectElementsKind(ElementsKind kind)
void IteratePointer(ObjectVisitor *v, int offset)
int BinarySearch(T *array, String *name, int low, int high, int valid_entries)
MUST_USE_RESULT MaybeObject * ToSmi()
unsigned stack_check_table_offset()
Map * elements_transition_map()
void set_second(String *second, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
void LookupTransition(JSObject *holder, String *name, LookupResult *result)
static Object * GetObjectFromEntryAddress(Address location_of_address)
void AddSurrogatePairNoIndex(uc32 c)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kIsTopLevelBit allows_lazy_compilation
int NumberOfOwnDescriptors()
static MemoryChunk * FromAddress(Address a)
MUST_USE_RESULT MaybeObject * EnsureCanContainHeapObjectElements()
void VerifyApiCallResultType()
static HeapObject * cast(Object *obj)
MUST_USE_RESULT MaybeObject * get(int index)
#define READ_UINT32_FIELD(p, offset)
void set_function_bindings(FixedArray *bindings)
static const byte kArgumentMarker
static const int kMaxHashCalcLength
bool is_access_check_needed()
void set_pre_allocated_property_fields(int value)
static const byte kUndefined
const int kVariableSizeSentinel
static void IterateBody(HeapObject *obj, int object_size, ObjectVisitor *v)
void Get(int descriptor_number, Descriptor *desc)
static Failure * OutOfMemoryException()
JSFunction * GetConstantFunction(int descriptor_number)
static const int kFastPropertiesSoftLimit
PropertyAttributes property_attributes()
bool IsAsciiRepresentation()
static ExternalTwoByteString * cast(Object *obj)
SeededNumberDictionary * element_dictionary()
static Map * cast(Object *obj)
void set_has_debug_break_slots(bool value)
void SetDataAtUnchecked(int index, Object *value, Heap *heap)
bool has_non_instance_prototype()
#define SMI_ACCESSORS(holder, name, offset)
static StubType ExtractTypeFromFlags(Flags flags)
static const byte kTheHole
MUST_USE_RESULT MaybeObject * GetPropertyWithReceiver(Object *receiver, String *key, PropertyAttributes *attributes)
static const int kExponentBias
bool attached_to_shared_function_info()
void set_context(Object *context)
#define READ_FIELD(p, offset)
static bool TryTransitionToField(Handle< JSObject > object, Handle< String > key)
static Handle< Object > UninitializedSentinel(Isolate *isolate)
void Set(int descriptor_number, Descriptor *desc, const WhitenessWitness &)
bool SameValue(Object *other)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit kReadOnlyPrototypeBit is_toplevel
#define MAKE_STRUCT_CAST(NAME, Name, name)
static Failure * Exception()
uint8_t * external_pixel_pointer()
static Foreign * cast(Object *obj)
MUST_USE_RESULT MaybeObject * GetElementsTransitionMapSlow(ElementsKind elements_kind)
static bool IsMatch(String *key, Object *other)
byte binary_op_result_type()
ACCESSORS(AccessorInfo, expected_receiver_type, Object, kExpectedReceiverTypeOffset) ACCESSORS(FunctionTemplateInfo
#define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset)
uint16_t SlicedStringGet(int index)
static Smi * FromIntptr(intptr_t value)
static Handle< Object > TransitionElementsKind(Handle< JSObject > object, ElementsKind to_kind)
#define READ_BYTE_FIELD(p, offset)
Address GetCharsAddress()
void SetAstId(int index, TypeFeedbackId id)
void change_ic_with_type_info_count(int count)
#define ASSERT(condition)
void set_profiler_ticks(int ticks)
Object * instance_prototype()
static MUST_USE_RESULT MaybeObject * AsObject(String *key)
const int kPointerSizeLog2
void set_start_position(int start_position)
#define WRITE_INT_FIELD(p, offset, value)
void set_optimizable(bool value)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
Object * BypassGlobalProxy()
#define READ_INT64_FIELD(p, offset)
#define WRITE_UINT32_FIELD(p, offset, value)
static Context * cast(Object *context)
static uint32_t HashForObject(Object *key, Object *object)
bool IsMarkedForParallelRecompilation()
EXTERNAL_ELEMENTS_CHECK(UnsignedShort, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE) EXTERNAL_ELEMENTS_CHECK(UnsignedInt
kPropertyAccessorsOffset kNamedPropertyHandlerOffset instance_template
#define WRITE_INTPTR_FIELD(p, offset, value)
const uint32_t kStringRepresentationMask
bool NonFailureIsHeapObject()
int SizeFromMap(Map *map)
void set_compiled_optimizable(bool value)
static MUST_USE_RESULT MaybeObject * AsObject(Object *key)
void set(int index, float value)
Object * DataAt(int index)
Object ** GetKeySlot(int descriptor_number)
bool IsInternalError() const
bool HasSpecificClassOf(String *name)
int GetInternalFieldCount()
void initialize_elements()
uint8_t get_scalar(int index)
TypeFeedbackId AstId(int index)
const Resource * resource()
MUST_USE_RESULT MaybeObject * get(int index)
const int kFastElementsKindCount
void set_first(String *first, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
void ReplaceCode(Code *code)
void set_map_and_elements(Map *map, FixedArrayBase *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static ExternalAsciiString * cast(Object *obj)
MUST_USE_RESULT MaybeObject * EnsureWritableFastElements()
void set_the_hole(int index)
void init_back_pointer(Object *undefined)
void set_foreign_address(Address value)
MUST_USE_RESULT MaybeObject * Copy()
void SeqTwoByteStringSet(int index, uint16_t value)
bool IsMarkedForLazyRecompilation()
static Code * cast(Object *obj)
const uint32_t kAsciiDataHintTag
#define CAST_ACCESSOR(type)
const uint32_t kShortExternalStringMask
void set(int index, uint32_t value)
bool HasElementWithReceiver(JSReceiver *receiver, uint32_t index)
int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind)
bool AsArrayIndex(uint32_t *index)
Object * GetValue(int descriptor_number)
static Object ** RawField(HeapObject *obj, int offset)
void change_own_type_change_checksum()
TransitionArray * unchecked_transition_array()
static Smi * cast(Object *object)
void set_literals(FixedArray *literals)
WhitenessWitness(FixedArray *array)
static void IterateBody(HeapObject *obj, ObjectVisitor *v)
static uint32_t Hash(Object *key)
void set(int index, uint16_t value)
void ClearCodeCache(Heap *heap)
static const int kZeroHash
bool Equals(String *other)
static const int kHeaderSize
Code * javascript_builtin_code(Builtins::JavaScript id)
MUST_USE_RESULT MaybeObject * get(int index)
int GetInObjectPropertyOffset(int index)
Object * GetInternalField(int index)
void set_dictionary_map(bool value)
uint16_t SeqAsciiStringGet(int index)
void LookupDescriptor(JSObject *holder, String *name, LookupResult *result)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kIsTopLevelBit kAllowLazyCompilation kUsesArguments kFormalParameterCountOffset kStartPositionAndTypeOffset kCompilerHintsOffset stress_deopt_counter
void set_binary_op_type(byte value)
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
void set(int index, int16_t value)
Object * InObjectPropertyAtPut(int index, Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
uint16_t ExternalTwoByteStringGet(int index)
Map * GetTransition(int transition_index)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kIsTopLevelBit compiler_hints
static const int kFirstOffset
ByteArray * unchecked_relocation_info()
bool HasFastSmiOrObjectElements()
MUST_USE_RESULT MaybeObject * GetIdentityHash(CreationFlag flag)
static const int kKindOffset
const uint32_t kNotStringTag
static void NoWriteBarrierSet(FixedArray *array, int index, Object *value)
static const int kParentOffset
String * GetKey(int descriptor_number)
bool HasNonStrictArgumentsElements()
MUST_USE_RESULT MaybeObject * GetIdentityHash(CreationFlag flag)
static const int kTransitionSize
const uint64_t kHoleNanInt64
void set_the_hole(int index)
void set_is_pregenerated(bool value)
MUST_USE_RESULT MaybeObject * AddTransition(String *key, Map *target, SimpleTransitionFlag flag)
#define READ_SHORT_FIELD(p, offset)
#define FIELD_ADDR(p, offset)
void set_opt_reenable_tries(int value)
Object * GetElementNoExceptionThrown(uint32_t index)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static SeededNumberDictionary * cast(Object *obj)
void Append(Descriptor *desc, const WhitenessWitness &)
virtual void Validate(JSObject *obj)=0
void set_ic_total_count(int count)
static const int kDescriptorLengthOffset
MUST_USE_RESULT MaybeObject * SetContent(FixedArrayBase *storage)
const uint32_t kIsSymbolMask
void set_unchecked(int index, Smi *value)
MUST_USE_RESULT MaybeObject * get(int index)
static const int kExponentShift
bool IsStringObjectWithCharacterAt(uint32_t index)
static const int kValueOffset
const int kFailureTagSize
const uint32_t kHoleNanUpper32
static InlineCacheHolderFlag ExtractCacheHolderFromFlags(Flags flags)
void ExternalTwoByteStringIterateBody()
void set_undefined(int index)
static SlicedString * cast(Object *obj)
MUST_USE_RESULT MaybeObject * SetPrototypeTransitions(FixedArray *prototype_transitions)
static const int kDontAdaptArgumentsSentinel
int pre_allocated_property_fields()
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kIsTopLevelBit BOOL_GETTER(SharedFunctionInfo, compiler_hints, has_only_simple_this_property_assignments, kHasOnlySimpleThisPropertyAssignments) BOOL_ACCESSORS(SharedFunctionInfo
static uint32_t SeededHash(uint32_t key, uint32_t seed)
#define WRITE_BARRIER(heap, object, offset, value)
#define HAS_SMI_TAG(value)
Context * native_context()
void InitializeBody(int object_size)
#define MAKE_STRUCT_PREDICATE(NAME, Name, name)
static const int kFirstOffset
int LinearSearch(T *array, String *name, int len, int valid_entries)
bool IsAsciiRepresentationUnderneath()
static Failure * RetryAfterGC()
void IteratePointers(ObjectVisitor *v, int start, int end)
int SeqTwoByteStringSize(InstanceType instance_type)
static const uchar kMaxNonSurrogateCharCode
static const int kNotFound
static bool IsValid(intptr_t value)
void set_resource(const Resource *buffer)
static Failure * cast(MaybeObject *object)
const uint32_t kIsIndirectStringMask
void set_inlined_type_change_checksum(int checksum)
#define READ_INT_FIELD(p, offset)
static const int kMinValue
bool ToArrayIndex(uint32_t *index)
ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_number)
MUST_USE_RESULT MaybeObject * ResetElements()
ElementsKind GetElementsKind()
SharedFunctionInfo * unchecked_shared()
static Object * RawUninitializedSentinel(Heap *heap)
static Handle< Map > GetElementsTransitionMap(Handle< JSObject > object, ElementsKind to_kind)
MUST_USE_RESULT MaybeObject * AllocateFixedArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
static const int kIsNotArrayIndexMask
#define TYPE_CHECKER(type, instancetype)
static Oddball * cast(Object *obj)
static Address & Address_at(Address addr)
int GetFieldIndex(int descriptor_number)
MUST_USE_RESULT MaybeObject * get(int index)
bool IsAligned(T value, U alignment)
static SeqAsciiString * cast(Object *obj)
void set_inobject_properties(int value)
unsigned safepoint_table_offset()
void set_hash_field(uint32_t value)
const uint16_t * ExternalTwoByteStringGetData(unsigned start)
bool HasElement(uint32_t index)
#define WRITE_SHORT_FIELD(p, offset, value)
const uint32_t kAsciiDataHintMask
AllocationSpace allocation_space() const
bool HasBuiltinFunctionId()
MUST_USE_RESULT MaybeObject * set_initial_map_and_cache_transitions(Map *value)
bool IsTwoByteRepresentationUnderneath()
static FunctionTemplateInfo * cast(Object *obj)
kPropertyAccessorsOffset named_property_handler
static const int kPropertiesOffset
static const int kStorage1Offset
T RoundUp(T x, intptr_t m)
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
static FixedDoubleArray * cast(Object *obj)
Object * FastPropertyAt(int index)
bool IsTwoByteRepresentation()
uint16_t ExternalAsciiStringGet(int index)
static Code * GetCodeFromTargetAddress(Address address)
bool is_inline_cache_stub()
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kIsTopLevelBit kAllowLazyCompilation kUsesArguments kFormalParameterCountOffset PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, expected_nof_properties, kExpectedNofPropertiesOffset) PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo
bool IsFastSmiElementsKind(ElementsKind kind)
static const int kMaxNonCodeHeapObjectSize
bool AllowsSetElementsLength()
Object * unchecked_context()
const uint32_t kShortExternalStringTag
static int SmiValue(internal::Object *value)
ElementsKind FastSmiToObjectElementsKind(ElementsKind from_kind)
int SeqAsciiStringSize(InstanceType instance_type)
Object * FastPropertyAtPut(int index, Object *value)
static int GetIdentityHash(Handle< JSObject > obj)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
HeapObject * UncheckedPrototypeTransitions()
static int SizeFor(int length)
static const int kElementsOffset
WriteBarrierMode GetWriteBarrierMode(const AssertNoAllocation &)
void set_resource(const Resource *buffer)
PropertyDetails GetDetails(int descriptor_number)
Object ** GetFirstElementAddress()
static uint32_t HashForObject(uint32_t key, Object *object)
BuiltinFunctionId builtin_function_id()
int8_t get_scalar(int index)
MUST_USE_RESULT MaybeObject * Copy()
const uint32_t kStringTag
byte * relocation_start()
InlineCacheState ic_state()
static uint32_t HashForObject(String *key, Object *object)
void set_construction_count(int value)
double get_scalar(int index)
uint16_t ConsStringGet(int index)
DescriptorLookupCache * descriptor_lookup_cache()
void set_map_no_write_barrier(Map *value)
void set_check_type(CheckType value)
Object * GetConstructor()
void initialize_storage()
void DontAdaptArguments()
bool has_function_cache()
void set_to_boolean_state(byte value)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kIsTopLevelBit kAllowLazyCompilation kUsesArguments formal_parameter_count
static int OffsetOfElementAt(int index)
void SetTransition(int transition_index, Map *target)
void set(int index, uint8_t value)
PropertyAttributes GetPropertyAttributeWithReceiver(JSReceiver *receiver, String *name)
static int SizeFor(int length)
#define T(name, string, precedence)
static ExtraICState ExtractExtraICStateFromFlags(Flags flags)
static TransitionArray * cast(Object *obj)
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
void SetCell(int index, JSGlobalPropertyCell *cell)
static ElementsAccessor * ForKind(ElementsKind elements_kind)
static SeqTwoByteString * cast(Object *obj)
int16_t get_scalar(int index)
bool HasTransitionArray()
const int kElementsKindCount
void SetDataAt(int index, Object *value)
static bool IsMatch(Object *key, Object *other)
static const int kHeaderSize
bool HasElementWithHandler(uint32_t index)
void set(int index, double value)
MUST_USE_RESULT MaybeObject * NumberFromDouble(double value, PretenureFlag pretenure=NOT_TENURED)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit kReadOnlyPrototypeBit start_position_and_type
static InlineCacheState ExtractICStateFromFlags(Flags flags)
bool HasProperty(String *name)
bool has_deoptimization_support()
static Kind ExtractKindFromFlags(Flags flags)
static const int kMapOffset
bool has_named_interceptor()
static int ExtractArgumentsCountFromFlags(Flags flags)
bool HasPrototypeTransitions()
bool is_the_hole(int index)
void set_instance_type(InstanceType value)
const uint32_t kIsNotStringMask
bool IsOutOfMemoryException() const
static HeapNumber * cast(Object *obj)
static MUST_USE_RESULT MaybeObject * NewWith(SimpleTransitionFlag flag, String *key, Map *target, Object *back_pointer)
int32_t get_scalar(int index)
bool CanHaveMoreTransitions()
static StringDictionary * cast(Object *obj)
void set_value(double value)
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
LanguageMode language_mode()
bool should_have_prototype()
static const int kLengthOffset
static double nan_value()
bool has_deoptimization_support()
MUST_USE_RESULT MaybeObject * get(int index)
uint32_t get_scalar(int index)
uint32_t ComputeIntegerHash(uint32_t key, uint32_t seed)
bool is_the_hole(int index)
AccessorDescriptor * GetCallbacks(int descriptor_number)
V8EXPORT bool IsNumber() const
ExtraICState extra_ic_state()
static int SizeFor(int length)
const intptr_t kObjectAlignment
void SetInternalField(int index, Object *value)
PropertyType GetType(int descriptor_number)
static JSGlobalPropertyCell * cast(Object *obj)
name_should_print_as_anonymous
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
IncrementalMarking * incremental_marking()
MUST_USE_RESULT MaybeObject * get(int index)
bool has_indexed_interceptor()
ElementsKind GetInitialFastElementsKind()
void ForeignIterateBody()
static const uint32_t kHashBitMask
bool HasPropertyWithHandler(String *name)
void SetNumberOfDescriptors(int number_of_descriptors)
Object * GetBackPointer()
void AddCharacterNoIndex(uint32_t c)
static const int kEntriesIndex
static const uint32_t kSignMask
void set_bit_field(byte value)
static int SizeFor(int length)
static const int kMaxNumberOfDescriptors
static JSValue * cast(Object *obj)
static const int kHeaderSize
uint32_t HashSequentialString(const schar *chars, int length, uint32_t seed)
FunctionTemplateInfo * get_api_func_data()
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset hidden_prototype
void set_back_pointer_storage(Object *back_pointer, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
void EnsureSize(int minimum_size_of_backing_fixed_array)
const Resource * resource()
int number_of_descriptors()
void set(int index, double value)
#define WRITE_FIELD(p, offset, value)
static const int kFullStringRepresentationMask
void MemsetPointer(T **dest, U *value, int counter)
MUST_USE_RESULT MaybeObject * CopyAsElementsKind(ElementsKind kind, TransitionFlag flag)
void set_major_key(int value)
int Search(T *array, String *name, int valid_entries)
bool NeedsArgumentsAdaption()
void Set(int index, uint16_t value)
static void NoIncrementalWriteBarrierSet(FixedArray *array, int index, Object *value)
V8EXPORT bool IsFalse() const
void set_is_access_check_needed(bool access_check_needed)
MUST_USE_RESULT MaybeObject * GetProperty(String *key)
#define ASSERT_EQ(v1, v2)
double get_scalar(int index)
void set_owns_descriptors(bool is_shared)
bool HasFastObjectElements()
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset instance_class_name
MUST_USE_RESULT MaybeObject * get(int index)
int ic_with_type_info_count()
InstanceType instance_type()
static JSProxy * cast(Object *obj)
static const int kMaxFastProperties
static bool ShouldZapGarbage()
static HeapObject * FromAddress(Address address)
bool HasFastHoleyElements()
bool requires_slow_elements()
void set(int index, byte value)
int own_type_change_checksum()
static double canonical_not_the_hole_nan_as_double()
#define INT_ACCESSORS(holder, name, offset)
bool TooManyFastProperties(int properties, StoreFromKeyed store_mode)
static FixedArray * cast(Object *obj)
void AppendDescriptor(Descriptor *desc, const DescriptorArray::WhitenessWitness &)
StringHasher(int length, uint32_t seed)
static const int kHeaderSize
void set(int index, int8_t value)
static Smi * set(Smi *smi, int bit_position, bool v)
void SeqAsciiStringSet(int index, uint16_t value)
bool IsCompatibleReceiver(Object *receiver)
static HashTable * cast(Object *obj)
void set_is_extensible(bool value)
ElementsKind elements_kind()
Object * back_pointer_storage()
void set_is_shared(bool value)
uint16_t get_scalar(int index)
static Handle< Object > GetElement(Handle< Object > object, uint32_t index)
void set_compare_operation(byte value)
void set_attached_to_shared_function_info(bool value)
void set_stack_slots(unsigned slots)
MUST_USE_RESULT MaybeObject * NumberFromInt32(int32_t value, PretenureFlag pretenure=NOT_TENURED)
const uint32_t kIsIndirectStringTag
void SetEntry(int entry, Object *key, Object *value)
Object * GetCallbacksObject(int descriptor_number)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static const int kStringEncodingMask
void set_instance_size(int value)
JSFunction * unchecked_constructor()
void set(int index, int32_t value)
bool IsFastHoleyElementsKind(ElementsKind kind)
Address GetDataStartAddress()
static uint32_t Hash(String *key)
bool HasDictionaryElements()
void set_javascript_builtin_code(Builtins::JavaScript id, Code *value)
ElementsAccessor * GetElementsAccessor()
bool IsInstanceOf(FunctionTemplateInfo *type)
bool HasFastDoubleElements()
String * TryFlattenGetString(PretenureFlag pretenure=NOT_TENURED)
void set_bit_field2(byte value)
static MUST_USE_RESULT MaybeObject * AsObject(uint32_t key)
void set_finger_index(int finger_index)
void set_map_word(MapWord map_word)
void set_binary_op_result_type(byte value)
bool has_debug_break_slots()
void set(int index, uint8_t value)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
bool HasIndexedInterceptor()
Object * unchecked_second()
static const byte kNotBooleanMask
int GetSortedKeyIndex(int descriptor_number)
static const int kExternalTwoByteRepresentationTag
Address foreign_address()
const uint32_t kSymbolTag
static const int kEntrySize
bool HasLocalProperty(String *name)
const int kFailureTypeTagMask
MUST_USE_RESULT MaybeObject * get(int index)
static Flags RemoveTypeFromFlags(Flags flags)
void set_visitor_id(int visitor_id)
static bool HasHeapObjectTag(internal::Object *value)
int LastAddedFieldIndex()
const uint32_t kAsciiStringTag
MUST_USE_RESULT MaybeObject * set_elements_transition_map(Map *transitioned_map)
#define ACCESSORS_TO_SMI(holder, name, offset)
String * GetSortedKey(int descriptor_number)
void set_property_attributes(PropertyAttributes attributes)
void set_code(Code *code)
MUST_USE_RESULT MaybeObject * GetHash(CreationFlag flag)
static ConsString * cast(Object *obj)
void set_safepoint_table_offset(unsigned offset)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
static FixedArrayBase * cast(Object *object)
bool is_compiled_optimizable()
bool ContainsOnlySmisOrHoles()
void set_flags(Flags flags)
static const int kMaxValue
static const int kCodeCacheOffset
MUST_USE_RESULT MaybeObject * get(int index)
#define WRITE_DOUBLE_FIELD(p, offset, value)
static const int kNotFound
void set_non_instance_prototype(bool value)
void increment_deopt_count()
const uc32 kMaxAsciiCharCode
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kIsTopLevelBit kAllowLazyCompilation uses_arguments
uint16_t SeqTwoByteStringGet(int index)
Object ** GetValueSlot(int descriptor_number)
StringDictionary * property_dictionary()
static uint32_t SeededHashForObject(uint32_t key, uint32_t seed, Object *object)
int Lookup(Map *source, String *name)
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
MUST_USE_RESULT MaybeObject * CopySize(int new_length)
static const int kExponentOffset
static const int kValueOffset
bool IsInRecompileQueue()
void SetSortedKey(int pointer, int descriptor_number)
void InitializeDescriptors(DescriptorArray *descriptors)
FixedArray * GetPrototypeTransitions()
void set_allow_osr_at_loop_nesting_level(int level)
static void AddFastPropertyUsingMap(Handle< JSObject > object, Handle< Map > map)
static JSObject * cast(Object *obj)
uint32_t RoundUpToPowerOf2(uint32_t x)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset access_check_info
bool HasExternalArrayElements()
int64_t get_representation(int index)
bool matches_inlined_type_change_checksum(int checksum)
#define MAKE_STRUCT_CASE(NAME, Name, name)
Object * javascript_builtin(Builtins::JavaScript id)
PropertyAttributes GetLocalPropertyAttribute(String *name)
static Flags ComputeMonomorphicFlags(Kind kind, StubType type, ExtraICState extra_ic_state=kNoExtraICState, InlineCacheHolderFlag holder=OWN_MAP, int argc=-1)
uint32_t max_number_key()
void set_initial_map(Map *value)
bool IsFastDoubleElementsKind(ElementsKind kind)
void set_has_function_cache(bool flag)
MUST_USE_RESULT MaybeObject * EnsureCanContainElements(Object **elements, uint32_t count, EnsureElementsMode mode)
static const int kFirstIndex
void set_unused_property_fields(int value)
const uint32_t kStringEncodingMask
const uint16_t * GetChars()
void set_stack_check_table_offset(unsigned offset)
static int ComputeCapacity(int at_least_space_for)
void ExternalAsciiStringIterateBody()
void InitializeBody(Map *map, Object *pre_allocated_value, Object *filler_value)
void set_requires_slow_elements()
void EnterNoMarkingScope()
void set_parent(String *parent, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset flag
static JSFunction * cast(Object *obj)