35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_
54 PropertyDetails::PropertyDetails(Smi* smi) {
55 value_ = smi->value();
59 Smi* PropertyDetails::AsSmi() {
64 PropertyDetails PropertyDetails::AsDeleted() {
65 Smi* smi =
Smi::FromInt(value_ | DeletedField::encode(1));
66 return PropertyDetails(smi);
70 #define TYPE_CHECKER(type, instancetype) \
71 bool Object::Is##type() { \
72 return Object::IsHeapObject() && \
73 HeapObject::cast(this)->map()->instance_type() == instancetype; \
77 #define CAST_ACCESSOR(type) \
78 type* type::cast(Object* object) { \
79 ASSERT(object->Is##type()); \
80 return reinterpret_cast<type*>(object); \
84 #define INT_ACCESSORS(holder, name, offset) \
85 int holder::name() { return READ_INT_FIELD(this, offset); } \
86 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
89 #define ACCESSORS(holder, name, type, offset) \
90 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
91 void holder::set_##name(type* value, WriteBarrierMode mode) { \
92 WRITE_FIELD(this, offset, value); \
93 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
98 #define ACCESSORS_TO_SMI(holder, name, offset) \
99 Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \
100 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
101 WRITE_FIELD(this, offset, value); \
106 #define SMI_ACCESSORS(holder, name, offset) \
107 int holder::name() { \
108 Object* value = READ_FIELD(this, offset); \
109 return Smi::cast(value)->value(); \
111 void holder::set_##name(int value) { \
112 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
116 #define BOOL_GETTER(holder, field, name, offset) \
117 bool holder::name() { \
118 return BooleanBit::get(field(), offset); \
122 #define BOOL_ACCESSORS(holder, field, name, offset) \
123 bool holder::name() { \
124 return BooleanBit::get(field(), offset); \
126 void holder::set_##name(bool value) { \
127 set_##field(BooleanBit::set(field(), offset, value)); \
132 return IsFixedArray() || IsFixedDoubleArray();
138 if (!this->IsJSObject())
return false;
141 if (!cons_obj->IsJSFunction())
return false;
145 for (
Object*
type = fun->shared()->function_data();
146 type->IsFunctionTemplateInfo();
148 if (
type == expected)
return true;
155 bool Object::IsSmi() {
160 bool Object::IsHeapObject() {
166 ASSERT(!this->IsFailure());
167 return (reinterpret_cast<intptr_t>(
this) &
kSmiTagMask) != 0;
175 return Object::IsHeapObject()
180 bool Object::IsSpecObject() {
181 return Object::IsHeapObject()
186 bool Object::IsSpecFunction() {
187 if (!Object::IsHeapObject())
return false;
193 bool Object::IsSymbol() {
194 if (!this->IsHeapObject())
return false;
205 bool Object::IsConsString() {
206 if (!IsString())
return false;
211 bool Object::IsSlicedString() {
212 if (!IsString())
return false;
217 bool Object::IsSeqString() {
218 if (!IsString())
return false;
223 bool Object::IsSeqAsciiString() {
224 if (!IsString())
return false;
225 return StringShape(
String::cast(
this)).IsSequential() &&
230 bool Object::IsSeqTwoByteString() {
231 if (!IsString())
return false;
232 return StringShape(
String::cast(
this)).IsSequential() &&
237 bool Object::IsExternalString() {
238 if (!IsString())
return false;
243 bool Object::IsExternalAsciiString() {
244 if (!IsString())
return false;
250 bool Object::IsExternalTwoByteString() {
251 if (!IsString())
return false;
258 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
261 StringShape::StringShape(
String* str)
262 :
type_(str->map()->instance_type()) {
268 StringShape::StringShape(Map* map)
269 :
type_(map->instance_type()) {
276 :
type_(static_cast<uint32_t>(t)) {
282 bool StringShape::IsSymbol() {
340 bool StringShape::IsCons() {
345 bool StringShape::IsSliced() {
350 bool StringShape::IsIndirect() {
355 bool StringShape::IsExternal() {
360 bool StringShape::IsSequential() {
371 uint32_t StringShape::encoding_tag() {
376 uint32_t StringShape::full_representation_tag() {
385 bool StringShape::IsSequentialAscii() {
390 bool StringShape::IsSequentialTwoByte() {
395 bool StringShape::IsExternalAscii() {
400 bool StringShape::IsExternalTwoByte() {
410 ASSERT(0 <= index && index <= length_);
412 return static_cast<const byte*
>(start_)[index];
414 return static_cast<const uc16*
>(start_)[index];
420 return IsSmi() || IsHeapNumber();
429 if (!Object::IsHeapObject())
return false;
438 bool
Object::IsExternalArray() {
439 if (!Object::IsHeapObject())
458 bool MaybeObject::IsFailure() {
463 bool MaybeObject::IsRetryAfterGC() {
469 bool MaybeObject::IsOutOfMemory() {
475 bool MaybeObject::IsException() {
480 bool MaybeObject::IsTheHole() {
481 return !IsFailure() && ToObjectUnchecked()->IsTheHole();
487 return reinterpret_cast<Failure*
>(obj);
491 bool Object::IsJSReceiver() {
493 return IsHeapObject() &&
498 bool Object::IsJSObject() {
500 return IsHeapObject() &&
505 bool Object::IsJSProxy() {
506 if (!Object::IsHeapObject())
return false;
522 bool Object::IsDescriptorArray() {
523 return IsFixedArray();
527 bool Object::IsDeoptimizationInputData() {
529 if (!IsFixedArray())
return false;
536 if (length == 0)
return true;
539 return length >= 0 &&
544 bool Object::IsDeoptimizationOutputData() {
545 if (!IsFixedArray())
return false;
554 bool Object::IsTypeFeedbackCells() {
555 if (!IsFixedArray())
return false;
564 bool Object::IsContext() {
565 if (Object::IsHeapObject()) {
568 return (map == heap->function_context_map() ||
569 map == heap->catch_context_map() ||
570 map == heap->with_context_map() ||
571 map == heap->global_context_map() ||
572 map == heap->block_context_map() ||
573 map == heap->module_context_map());
579 bool Object::IsGlobalContext() {
580 return Object::IsHeapObject() &&
586 bool Object::IsModuleContext() {
587 return Object::IsHeapObject() &&
593 bool Object::IsScopeInfo() {
594 return Object::IsHeapObject() &&
604 return obj->IsJSFunction();
618 bool Object::IsStringWrapper() {
619 return IsJSValue() &&
JSValue::cast(
this)->value()->IsString();
626 bool
Object::IsBoolean() {
627 return IsOddball() &&
637 return obj->IsJSArray();
641 bool Object::IsHashTable() {
642 return Object::IsHeapObject() &&
648 bool Object::IsDictionary() {
649 return IsHashTable() &&
654 bool Object::IsSymbolTable() {
655 return IsHashTable() &&
this ==
660 bool Object::IsJSFunctionResultCache() {
661 if (!IsFixedArray())
return false;
663 int length =
self->length();
670 if (FLAG_verify_heap) {
671 reinterpret_cast<JSFunctionResultCache*
>(
this)->
672 JSFunctionResultCacheVerify();
679 bool Object::IsNormalizedMapCache() {
680 if (!IsFixedArray())
return false;
685 if (FLAG_verify_heap) {
686 reinterpret_cast<NormalizedMapCache*
>(
this)->NormalizedMapCacheVerify();
693 bool Object::IsCompilationCacheTable() {
694 return IsHashTable();
698 bool Object::IsCodeCacheHashTable() {
699 return IsHashTable();
703 bool Object::IsPolymorphicCodeCacheHashTable() {
704 return IsHashTable();
708 bool Object::IsMapCache() {
709 return IsHashTable();
713 bool Object::IsPrimitive() {
714 return IsOddball() || IsNumber() || IsString();
718 bool Object::IsJSGlobalProxy() {
719 bool result = IsHeapObject() &&
722 ASSERT(!result || IsAccessCheckNeeded());
727 bool Object::IsGlobalObject() {
728 if (!IsHeapObject())
return false;
740 bool Object::IsUndetectableObject() {
741 return IsHeapObject()
746 bool Object::IsAccessCheckNeeded() {
747 return IsHeapObject()
753 if (!IsHeapObject())
return false;
755 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
757 #undef MAKE_STRUCT_CASE
758 default:
return false;
763 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
764 bool Object::Is##Name() { \
765 return Object::IsHeapObject() \
766 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
769 #undef MAKE_STRUCT_PREDICATE
782 bool Object::IsTheHole() {
805 ?
static_cast<double>(
reinterpret_cast<Smi*
>(
this)->value())
806 : reinterpret_cast<HeapNumber*>(
this)->value();
816 if (IsSmi())
return this;
817 if (IsHeapNumber()) {
819 int int_value =
FastD2I(value);
844 ASSERT(!maybe->IsFailure());
846 maybe->ToObject(&result);
862 #define FIELD_ADDR(p, offset) \
863 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
865 #define READ_FIELD(p, offset) \
866 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
868 #define WRITE_FIELD(p, offset, value) \
869 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
871 #define WRITE_BARRIER(heap, object, offset, value) \
872 heap->incremental_marking()->RecordWrite( \
873 object, HeapObject::RawField(object, offset), value); \
874 if (heap->InNewSpace(value)) { \
875 heap->RecordWrite(object->address(), offset); \
878 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
879 if (mode == UPDATE_WRITE_BARRIER) { \
880 heap->incremental_marking()->RecordWrite( \
881 object, HeapObject::RawField(object, offset), value); \
882 if (heap->InNewSpace(value)) { \
883 heap->RecordWrite(object->address(), offset); \
887 #ifndef V8_TARGET_ARCH_MIPS
888 #define READ_DOUBLE_FIELD(p, offset) \
889 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
890 #else // V8_TARGET_ARCH_MIPS
893 static inline double read_double_field(
void* p,
int offset) {
898 c.u[0] = (*
reinterpret_cast<uint32_t*
>(
FIELD_ADDR(p, offset)));
899 c.u[1] = (*
reinterpret_cast<uint32_t*
>(
FIELD_ADDR(p, offset + 4)));
902 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
903 #endif // V8_TARGET_ARCH_MIPS
905 #ifndef V8_TARGET_ARCH_MIPS
906 #define WRITE_DOUBLE_FIELD(p, offset, value) \
907 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
908 #else // V8_TARGET_ARCH_MIPS
911 static inline void write_double_field(
void* p,
int offset,
918 (*
reinterpret_cast<uint32_t*
>(
FIELD_ADDR(p, offset))) = c.u[0];
919 (*
reinterpret_cast<uint32_t*
>(
FIELD_ADDR(p, offset + 4))) = c.u[1];
921 #define WRITE_DOUBLE_FIELD(p, offset, value) \
922 write_double_field(p, offset, value)
923 #endif // V8_TARGET_ARCH_MIPS
926 #define READ_INT_FIELD(p, offset) \
927 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
929 #define WRITE_INT_FIELD(p, offset, value) \
930 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
932 #define READ_INTPTR_FIELD(p, offset) \
933 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
935 #define WRITE_INTPTR_FIELD(p, offset, value) \
936 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
938 #define READ_UINT32_FIELD(p, offset) \
939 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
941 #define WRITE_UINT32_FIELD(p, offset, value) \
942 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
944 #define READ_INT64_FIELD(p, offset) \
945 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
947 #define WRITE_INT64_FIELD(p, offset, value) \
948 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
950 #define READ_SHORT_FIELD(p, offset) \
951 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
953 #define WRITE_SHORT_FIELD(p, offset, value) \
954 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
956 #define READ_BYTE_FIELD(p, offset) \
957 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
959 #define WRITE_BYTE_FIELD(p, offset, value) \
960 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
976 intptr_t tagged_value =
977 (
static_cast<intptr_t
>(
value) << smi_shift_bits) |
kSmiTag;
978 return reinterpret_cast<Smi*
>(tagged_value);
985 return reinterpret_cast<Smi*
>((value << smi_shift_bits) |
kSmiTag);
1026 intptr_t Failure::value()
const {
1027 return static_cast<intptr_t
>(
1043 Failure* Failure::Construct(Type type, intptr_t value) {
1056 #ifdef V8_TARGET_ARCH_X64
1071 bool result = (
static_cast<uintptr_t
>(value + 0x40000000
U) < 0x80000000U);
1073 ASSERT(result == in_range);
1078 MapWord MapWord::FromMap(
Map* map) {
1079 return MapWord(reinterpret_cast<uintptr_t>(map));
1083 Map* MapWord::ToMap() {
1084 return reinterpret_cast<Map*
>(value_);
1088 bool MapWord::IsForwardingAddress() {
1089 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1093 MapWord MapWord::FromForwardingAddress(HeapObject*
object) {
1095 return MapWord(reinterpret_cast<uintptr_t>(raw));
1099 HeapObject* MapWord::ToForwardingAddress() {
1100 ASSERT(IsForwardingAddress());
1106 void HeapObject::VerifyObjectField(
int offset) {
1110 void HeapObject::VerifySmiField(
int offset) {
1137 if (value !=
NULL) {
1180 v->VisitPointers(reinterpret_cast<Object**>(
FIELD_ADDR(
this, start)),
1181 reinterpret_cast<Object**>(
FIELD_ADDR(
this, end)));
1186 v->VisitPointer(reinterpret_cast<Object**>(
FIELD_ADDR(
this, offset)));
1222 for (
int i = 0; i <
length(); ++i) {
1223 Object* candidate = *current++;
1224 if (!candidate->IsSmi() && candidate != the_hole)
return false;
1269 Object* the_hole = heap->the_hole_value();
1270 for (uint32_t i = 0; i < count; ++i) {
1271 Object* current = *objects++;
1272 if (current == the_hole) {
1275 }
else if (!current->IsSmi()) {
1284 }
else if (is_holey) {
1293 if (target_kind != current_kind) {
1303 if (elements->
map() !=
GetHeap()->fixed_double_array_map()) {
1305 elements->
map() ==
GetHeap()->fixed_cow_array_map());
1318 for (uint32_t i = 0; i < length; ++i) {
1332 Map* current_map =
map();
1334 if (from_kind == to_kind)
return current_map;
1337 Object* maybe_array_maps = global_context->js_array_maps();
1338 if (maybe_array_maps->IsFixedArray()) {
1340 if (array_maps->
get(from_kind) == current_map) {
1341 Object* maybe_transitioned_map = array_maps->
get(to_kind);
1342 if (maybe_transitioned_map->IsMap()) {
1343 return Map::cast(maybe_transitioned_map);
1356 if (new_map !=
NULL) {
1364 ASSERT((
map()->has_fast_smi_or_object_elements() ||
1365 (value ==
GetHeap()->empty_fixed_array())) ==
1366 (value->
map() ==
GetHeap()->fixed_array_map() ||
1367 value->
map() ==
GetHeap()->fixed_cow_array_map()));
1369 (
map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
1380 void JSObject::initialize_properties() {
1387 ASSERT(
map()->has_fast_smi_or_object_elements() ||
1388 map()->has_fast_double_elements());
1397 if (!FLAG_smi_only_arrays) {
1402 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
1423 Object* JSGlobalPropertyCell::value() {
1430 ASSERT(!val->IsJSGlobalPropertyCell());
1528 ASSERT(index < properties()->length());
1529 return properties()->get(index);
1542 ASSERT(index < properties()->length());
1543 properties()->set(index, value);
1581 Object* pre_allocated_value,
1583 ASSERT(!filler_value->IsHeapObject() ||
1585 ASSERT(!pre_allocated_value->IsHeapObject() ||
1589 if (filler_value != pre_allocated_value) {
1592 for (
int i = 0; i < pre_allocated; i++) {
1597 while (offset < size) {
1605 return !properties()->IsDictionary();
1620 map()->used_for_prototype()) {
1625 return properties > limit;
1640 if (value < 0)
return false;
1644 if (IsHeapNumber()) {
1646 uint32_t uint_value =
static_cast<uint32_t
>(value);
1647 if (value == static_cast<double>(uint_value)) {
1648 *index = uint_value;
1657 if (!this->IsJSValue())
return false;
1660 if (!js_value->value()->IsString())
return false;
1663 if (index >= (uint32_t)str->
length())
return false;
1670 ASSERT(object->IsFixedArray() ||
object->IsFixedDoubleArray());
1682 return get(index) ==
GetHeap()->the_hole_value();
1689 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1723 map() !=
HEAP->fixed_array_map());
1732 map() !=
HEAP->fixed_array_map());
1739 return GetHeap()->the_hole_value();
1748 map() !=
HEAP->fixed_array_map());
1757 map() !=
HEAP->fixed_array_map());
1791 ASSERT(array->
map() !=
HEAP->raw_unchecked_fixed_cow_array_map());
1805 ASSERT(array->
map() !=
HEAP->raw_unchecked_fixed_cow_array_map());
1822 heap->undefined_value());
1849 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1867 ASSERT(!
HEAP->InNewSpace(heap->null_value()));
1880 this ==
HEAP->empty_descriptor_array());
1901 void DescriptorArray::NoIncrementalWriteBarrierSwap(
FixedArray* array,
1918 const int kMaxElementsForLinearSearch = 8;
1919 if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
1928 int DescriptorArray::SearchWithCache(String* name) {
1931 number = Search(name);
1938 Map* DescriptorArray::elements_transition_map() {
1951 void DescriptorArray::set_elements_transition_map(
1965 reinterpret_cast<HeapObject*>(
this),
1972 return String::cast(
get(ToKeyIndex(descriptor_number)));
1979 reinterpret_cast<HeapObject*>(
this),
1986 return get(ToValueIndex(descriptor_number));
1998 Object* details =
get(ToDetailsIndex(descriptor_number));
1999 return PropertyDetails(
Smi::cast(details));
2015 return Descriptor::IndexFromValue(
GetValue(descriptor_number));
2026 return GetValue(descriptor_number);
2038 Entry entry(
this, descriptor_number);
2044 switch (
GetType(descriptor_number)) {
2050 if (!value->IsAccessorPair())
return false;
2052 return accessors->getter()->IsMap() && accessors->setter()->IsMap();
2073 desc->Init(
GetKey(descriptor_number),
2086 ToKeyIndex(descriptor_number),
2089 ToValueIndex(descriptor_number),
2092 ToDetailsIndex(descriptor_number),
2093 desc->GetDetails().AsSmi());
2097 void DescriptorArray::NoIncrementalWriteBarrierSwapDescriptors(
2098 int first,
int second) {
2099 NoIncrementalWriteBarrierSwap(
this, ToKeyIndex(first), ToKeyIndex(second));
2100 NoIncrementalWriteBarrierSwap(
this,
2101 ToValueIndex(first),
2102 ToValueIndex(second));
2103 NoIncrementalWriteBarrierSwap(
this,
2104 ToDetailsIndex(first),
2105 ToDetailsIndex(second));
2110 : marking_(array->GetHeap()->incremental_marking()) {
2113 ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
2119 marking_->LeaveNoMarkingScope();
2123 template<
typename Shape,
typename Key>
2125 const int kMinCapacity = 32;
2127 if (capacity < kMinCapacity) {
2128 capacity = kMinCapacity;
2134 template<
typename Shape,
typename Key>
2141 template<
typename Shape,
typename Key>
2143 uint32_t capacity = Capacity();
2148 Object* element = KeyAt(entry);
2150 if (element == isolate->
heap()->raw_unchecked_undefined_value())
break;
2151 if (element != isolate->
heap()->raw_unchecked_the_hole_value() &&
2152 Shape::IsMatch(key, element))
return entry;
2153 entry = NextProbe(entry, count++, capacity);
2160 Object* max_index_object =
get(kMaxNumberKeyIndex);
2161 if (!max_index_object->IsSmi())
return false;
2163 (
Smi::cast(max_index_object)->
value() & kRequiresSlowElementsMask);
2167 ASSERT(!requires_slow_elements());
2168 Object* max_index_object =
get(kMaxNumberKeyIndex);
2169 if (!max_index_object->IsSmi())
return 0;
2170 uint32_t value =
static_cast<uint32_t
>(
Smi::cast(max_index_object)->
value());
2171 return value >> kRequiresSlowElementsTagSize;
2244 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2246 #undef MAKE_STRUCT_CAST
2249 template <
typename Shape,
typename Key>
2251 ASSERT(obj->IsHashTable());
2252 return reinterpret_cast<HashTable*
>(obj);
2262 uint32_t String::hash_field() {
2269 #if V8_HOST_ARCH_64_BIT
2276 if (other ==
this)
return true;
2277 if (StringShape(
this).IsSymbol() && StringShape(other).IsSymbol()) {
2280 return SlowEquals(other);
2285 if (!StringShape(
this).IsCons())
return this;
2288 return SlowTryFlatten(pretenure);
2293 MaybeObject* flat = TryFlatten(pretenure);
2294 Object* successfully_flattened;
2295 if (!flat->ToObject(&successfully_flattened))
return this;
2302 switch (StringShape(
this).full_representation_tag()) {
2328 ASSERT(StringShape(
this).IsSequential());
2330 return this->IsAsciiRepresentation()
2337 if (!StringShape(
this).IsCons())
return true;
2346 ASSERT(StringShape(
this).IsIndirect());
2362 static_cast<byte>(value));
2372 return reinterpret_cast<char*
>(GetCharsAddress());
2414 ASSERT(parent->IsSeqString() || parent->IsExternalString());
2466 if (is_short())
return;
2467 const char** data_field =
2468 reinterpret_cast<const char**
>(
FIELD_ADDR(
this, kResourceDataOffset));
2469 *data_field = resource()->data();
2475 *
reinterpret_cast<const Resource**
>(
2476 FIELD_ADDR(
this, kResourceOffset)) = resource;
2477 if (resource !=
NULL) update_data_cache();
2482 return resource()->data();
2488 return GetChars()[index];
2498 if (is_short())
return;
2501 *data_field = resource()->data();
2507 *
reinterpret_cast<const Resource**
>(
2508 FIELD_ADDR(
this, kResourceOffset)) = resource;
2509 if (resource !=
NULL) update_data_cache();
2514 return resource()->data();
2520 return GetChars()[index];
2526 return GetChars() + start;
2531 set_finger_index(kEntriesIndex);
2532 set_size(kEntriesIndex);
2537 int cache_size = size();
2541 cache_size - kEntriesIndex);
2596 return reinterpret_cast<uint8_t*
>(external_pointer());
2602 uint8_t* ptr = external_pixel_pointer();
2608 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2614 uint8_t* ptr = external_pixel_pointer();
2619 void* ExternalArray::external_pointer() {
2621 return reinterpret_cast<void*
>(ptr);
2625 void ExternalArray::set_external_pointer(
void* value,
WriteBarrierMode mode) {
2626 intptr_t ptr =
reinterpret_cast<intptr_t
>(value);
2633 int8_t* ptr =
static_cast<int8_t*
>(external_pointer());
2639 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2645 int8_t* ptr =
static_cast<int8_t*
>(external_pointer());
2652 uint8_t* ptr =
static_cast<uint8_t*
>(external_pointer());
2658 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2664 uint8_t* ptr =
static_cast<uint8_t*
>(external_pointer());
2677 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2696 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2728 uint32_t* ptr =
static_cast<uint32_t*
>(external_pointer());
2740 uint32_t* ptr =
static_cast<uint32_t*
>(external_pointer());
2747 float* ptr =
static_cast<float*
>(external_pointer());
2759 float* ptr =
static_cast<float*
>(external_pointer());
2766 double* ptr =
static_cast<double*
>(external_pointer());
2778 double* ptr =
static_cast<double*
>(external_pointer());
2789 ASSERT(0 <=
id &&
id < 256);
2821 reinterpret_cast<SeqAsciiString*>(
this)->
length());
2824 return reinterpret_cast<ByteArray*
>(
this)->ByteArraySize();
2827 return reinterpret_cast<FreeSpace*
>(
this)->size();
2831 reinterpret_cast<SeqTwoByteString*>(
this)->
length());
2835 reinterpret_cast<FixedDoubleArray*>(
this)->
length());
2838 return reinterpret_cast<Code*
>(
this)->CodeSize();
2845 ASSERT(0 <= value && value < 256);
2851 ASSERT(0 <= value && value < 256);
2852 WRITE_BYTE_FIELD(
this, kInObjectPropertiesOffset, static_cast<byte>(value));
2857 ASSERT(0 <= value && value < 256);
2859 kPreAllocatedPropertyFieldsOffset,
2860 static_cast<byte>(value));
2906 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
2908 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
2914 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
2920 set_bit_field3(bit_field3() | (1 << kFunctionWithPrototype));
2922 set_bit_field3(bit_field3() & ~(1 << kFunctionWithPrototype));
2928 return ((1 << kFunctionWithPrototype) & bit_field3()) != 0;
2933 if (access_check_needed) {
2934 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
2936 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
2942 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
2948 set_bit_field2(bit_field2() | (1 << kIsExtensible));
2950 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
2955 return ((1 << kIsExtensible) & bit_field2()) != 0;
2961 set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
2963 set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
2968 return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
2974 set_bit_field3(bit_field3() | (1 << kIsShared));
2976 set_bit_field3(bit_field3() & ~(1 << kIsShared));
2981 return ((1 << kIsShared) & bit_field3()) != 0;
2987 set_bit_field3(bit_field3() | (1 << kUsedForPrototype));
2989 set_bit_field3(bit_field3() & ~(1 << kUsedForPrototype));
2995 return ((1 << kUsedForPrototype) & bit_field3()) != 0;
3012 ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
3013 ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
3014 ExtractArgumentsCountFromFlags(flags) >= 0);
3020 return ExtractKindFromFlags(
flags());
3029 ASSERT(is_inline_cache_stub() ||
3038 ASSERT(is_inline_cache_stub());
3039 return ExtractExtraICStateFromFlags(
flags());
3044 return ExtractTypeFromFlags(
flags());
3049 ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
3050 return ExtractArgumentsCountFromFlags(
flags());
3056 kind() == UNARY_OP_IC ||
3057 kind() == BINARY_OP_IC ||
3058 kind() == COMPARE_IC ||
3059 kind() == TO_BOOLEAN_IC);
3066 kind() == UNARY_OP_IC ||
3067 kind() == BINARY_OP_IC ||
3068 kind() == COMPARE_IC ||
3069 kind() == TO_BOOLEAN_IC);
3070 ASSERT(0 <= major && major < 256);
3076 return kind() == STUB && IsPregeneratedField::decode(
flags());
3083 f =
static_cast<Flags>(IsPregeneratedField::update(f, value));
3103 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
3110 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
3118 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
3125 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
3133 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
3140 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
3153 ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
3172 ASSERT(kind() == OPTIMIZED_FUNCTION);
3178 ASSERT(kind() == OPTIMIZED_FUNCTION);
3184 ASSERT(kind() == OPTIMIZED_FUNCTION);
3190 ASSERT(kind() == OPTIMIZED_FUNCTION);
3210 ASSERT(is_call_stub() || is_keyed_call_stub());
3217 ASSERT(is_call_stub() || is_keyed_call_stub());
3223 ASSERT(is_unary_op_stub());
3229 ASSERT(is_unary_op_stub());
3235 ASSERT(is_binary_op_stub());
3241 ASSERT(is_binary_op_stub());
3247 ASSERT(is_binary_op_stub());
3253 ASSERT(is_binary_op_stub());
3259 ASSERT(is_compare_ic_stub());
3265 ASSERT(is_compare_ic_stub());
3271 ASSERT(is_compare_ic_stub());
3277 ASSERT(is_compare_ic_stub());
3283 ASSERT(is_to_boolean_ic_stub());
3289 ASSERT(is_to_boolean_ic_stub());
3307 Kind kind = this->kind();
3308 return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
3320 ASSERT(extra_ic_state == kNoExtraICState ||
3323 kind == KEYED_STORE_IC);
3325 int bits = KindField::encode(kind)
3326 | ICStateField::encode(ic_state)
3327 | TypeField::encode(type)
3328 | ExtraICStateField::encode(extra_ic_state)
3329 | (argc << kArgumentsCountShift)
3330 | CacheHolderField::encode(holder);
3331 return static_cast<Flags>(bits);
3340 return ComputeFlags(kind,
MONOMORPHIC, extra_ic_state, type, argc, holder);
3345 return KindField::decode(flags);
3350 return ICStateField::decode(flags);
3355 return ExtraICStateField::decode(flags);
3360 return TypeField::decode(flags);
3365 return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
3370 return CacheHolderField::decode(flags);
3376 return static_cast<Flags>(bits);
3386 Code* result =
reinterpret_cast<Code*
>(code);
3397 Object* Map::prototype() {
3409 DescriptorArray* Map::instance_descriptors() {
3411 if (object->IsSmi()) {
3412 return GetHeap()->empty_descriptor_array();
3426 kInstanceDescriptorsOrBitField3Offset);
3427 if (!object->IsSmi()) {
3429 ZapInstanceDescriptors();
3433 kInstanceDescriptorsOrBitField3Offset,
3442 kInstanceDescriptorsOrBitField3Offset);
3444 if (value == heap->empty_descriptor_array()) {
3445 clear_instance_descriptors();
3448 if (object->IsSmi()) {
3457 if (value != instance_descriptors()) {
3458 ZapInstanceDescriptors();
3461 WRITE_FIELD(
this, kInstanceDescriptorsOrBitField3Offset, value);
3463 heap,
this, kInstanceDescriptorsOrBitField3Offset, value, mode);
3469 kInstanceDescriptorsOrBitField3Offset);
3470 if (object->IsSmi()) {
3481 kInstanceDescriptorsOrBitField3Offset);
3482 if (object->IsSmi()) {
3484 kInstanceDescriptorsOrBitField3Offset,
3493 Object*
object =
READ_FIELD(
this, kPrototypeTransitionsOrBackPointerOffset);
3494 if (object->IsFixedArray()) {
3508 return instance_descriptors()->set_elements_transition_map(transitioned_map);
3515 ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
3516 (value->IsMap() && GetBackPointer()->IsUndefined()));
3517 Object*
object =
READ_FIELD(
this, kPrototypeTransitionsOrBackPointerOffset);
3518 if (object->IsFixedArray()) {
3520 kProtoTransitionBackPointerOffset, value, mode);
3522 WRITE_FIELD(
this, kPrototypeTransitionsOrBackPointerOffset, value);
3524 heap,
this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
3530 Object*
object =
READ_FIELD(
this, kPrototypeTransitionsOrBackPointerOffset);
3531 if (object->IsFixedArray()) {
3534 return GetHeap()->empty_fixed_array();
3539 void Map::set_prototype_transitions(FixedArray* value,
WriteBarrierMode mode) {
3541 ASSERT(value != heap->empty_fixed_array());
3542 value->set(kProtoTransitionBackPointerOffset, GetBackPointer());
3544 if (value != prototype_transitions()) {
3545 ZapPrototypeTransitions();
3548 WRITE_FIELD(
this, kPrototypeTransitionsOrBackPointerOffset, value);
3550 heap,
this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
3555 ASSERT(undefined->IsUndefined());
3556 WRITE_FIELD(
this, kPrototypeTransitionsOrBackPointerOffset, undefined);
3561 Object*
object =
READ_FIELD(
this, kPrototypeTransitionsOrBackPointerOffset);
3562 return reinterpret_cast<HeapObject*
>(object);
3571 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
3580 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
3581 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
3582 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
3584 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
3585 kExpectedReceiverTypeOffset)
3588 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
3591 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
3592 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
3595 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
3596 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
3597 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
3598 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
3599 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
3602 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
3605 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
3608 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
3610 kPropertyAccessorsOffset)
3611 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
3612 kPrototypeTemplateOffset)
3613 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
3615 kNamedPropertyHandlerOffset)
3616 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
3617 kIndexedPropertyHandlerOffset)
3619 kInstanceTemplateOffset)
3620 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
3621 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
3622 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
3623 kInstanceCallHandlerOffset)
3625 kAccessCheckInfoOffset)
3629 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
3630 kInternalFieldCountOffset)
3633 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
3638 ACCESSORS(Script, name, Object, kNameOffset)
3639 ACCESSORS(Script,
id, Object, kIdOffset)
3642 ACCESSORS(Script, data, Object, kDataOffset)
3643 ACCESSORS(Script, context_data, Object, kContextOffset)
3648 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
3649 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
3651 kEvalFrominstructionsOffsetOffset)
3653 #ifdef ENABLE_DEBUGGER_SUPPORT
3654 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
3655 ACCESSORS(DebugInfo, original_code,
Code, kOriginalCodeIndex)
3656 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
3657 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
3661 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
3662 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
3665 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
3666 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
3667 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
3668 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
3669 kInstanceClassNameOffset)
3670 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
3671 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
3672 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
3673 ACCESSORS(SharedFunctionInfo, inferred_name,
String, kInferredNameOffset)
3675 kThisPropertyAssignmentsOffset)
3676 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
3680 kHiddenPrototypeBit)
3681 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
3683 kNeedsAccessCheckBit)
3685 kReadOnlyPrototypeBit)
3688 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
3693 kHasOnlySimpleThisPropertyAssignments)
3696 allows_lazy_compilation,
3697 kAllowLazyCompilation)
3704 has_duplicate_parameters,
3705 kHasDuplicateParameters)
3708 #if V8_HOST_ARCH_32_BIT
3711 kFormalParameterCountOffset)
3713 kExpectedNofPropertiesOffset)
3714 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3716 kStartPositionAndTypeOffset)
3717 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
3719 kFunctionTokenPositionOffset)
3721 kCompilerHintsOffset)
3722 SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
3723 kThisPropertyAssignmentsCountOffset)
3724 SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
3725 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
3728 kStressDeoptCounterOffset)
3731 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
3732 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
3733 int holder::name() { \
3734 int value = READ_INT_FIELD(this, offset); \
3735 ASSERT(kHeapObjectTag == 1); \
3736 ASSERT((value & kHeapObjectTag) == 0); \
3737 return value >> 1; \
3739 void holder::set_##name(int value) { \
3740 ASSERT(kHeapObjectTag == 1); \
3741 ASSERT((value & 0xC0000000) == 0xC0000000 || \
3742 (value & 0xC0000000) == 0x000000000); \
3743 WRITE_INT_FIELD(this, \
3745 (value << 1) & ~kHeapObjectTag); \
3748 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
3749 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
3750 INT_ACCESSORS(holder, name, offset)
3755 formal_parameter_count,
3756 kFormalParameterCountOffset)
3759 expected_nof_properties,
3760 kExpectedNofPropertiesOffset)
3765 start_position_and_type,
3766 kStartPositionAndTypeOffset)
3769 function_token_position,
3770 kFunctionTokenPositionOffset)
3773 kCompilerHintsOffset)
3776 this_property_assignments_count,
3777 kThisPropertyAssignmentsCountOffset)
3782 stress_deopt_counter,
3783 kStressDeoptCounterOffset)
3793 ASSERT(0 <= value && value < 256);
3794 WRITE_BYTE_FIELD(
this, kConstructionCountOffset, static_cast<byte>(value));
3800 live_objects_may_exist,
3801 kLiveObjectsMayExist)
3804 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
3805 return initial_map() !=
GetHeap()->undefined_value();
3811 optimization_disabled,
3812 kOptimizationDisabled)
3815 void SharedFunctionInfo::set_optimization_disabled(
bool disable) {
3817 kOptimizationDisabled,
3822 code()->set_optimizable(
false);
3829 return code()->profiler_ticks();
3849 this->language_mode() == language_mode ||
3853 hints, kStrictModeFunction, language_mode !=
CLASSIC_MODE);
3855 hints, kExtendedModeFunction, language_mode ==
EXTENDED_MODE);
3856 set_compiler_hints(hints);
3864 BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode,
3865 kExtendedModeFunction)
3866 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
3869 kNameShouldPrintAsAnonymous)
3870 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
3871 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
3872 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
3878 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
3882 bool Script::HasValidSource() {
3883 Object* src = this->source();
3884 if (!src->IsString())
return true;
3886 if (!StringShape(src_str).IsExternal())
return true;
3898 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
3908 set_start_position_and_type((start_position << kStartPositionShift)
3913 Code* SharedFunctionInfo::code() {
3919 return reinterpret_cast<Code*
>(
READ_FIELD(
this, kCodeOffset));
3929 ScopeInfo* SharedFunctionInfo::scope_info() {
3930 return reinterpret_cast<ScopeInfo*
>(
READ_FIELD(
this, kScopeInfoOffset));
3934 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
3936 WRITE_FIELD(
this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3940 reinterpret_cast<Object*>(value),
3947 Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
3952 return function_data()->IsFunctionTemplateInfo();
3963 return function_data()->IsSmi();
3968 ASSERT(HasBuiltinFunctionId());
3980 set_compiler_hints(hints | ((code_age & kCodeAgeMask) << kCodeAgeShift));
3985 return ICAgeBits::decode(counters());
3990 set_counters(ICAgeBits::update(counters(), ic_age));
3995 return DeoptCountBits::decode(counters());
4000 set_counters(DeoptCountBits::update(counters(), deopt_count));
4005 int value = counters();
4006 int deopt_count = DeoptCountBits::decode(value);
4007 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
4008 set_counters(DeoptCountBits::update(value, deopt_count));
4013 return OptReenableTriesBits::decode(counters());
4018 set_counters(OptReenableTriesBits::update(counters(), tries));
4023 Code* code = this->code();
4029 int tries = opt_reenable_tries();
4030 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
4033 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
4034 set_optimization_disabled(
false);
4037 code()->set_optimizable(
true);
4043 return context()->global()->IsJSBuiltinsObject();
4048 return shared()->formal_parameter_count() !=
4059 return code()->kind() ==
Code::FUNCTION && code()->optimizable();
4074 return reinterpret_cast<Code*
>(
4091 bool was_optimized = IsOptimized();
4098 if (!was_optimized && is_optimized) {
4099 context()->global_context()->AddOptimizedFunction(
this);
4101 if (was_optimized && !is_optimized) {
4102 context()->global_context()->RemoveOptimizedFunction(
this);
4118 return reinterpret_cast<SharedFunctionInfo*
>(
4119 READ_FIELD(
this, kSharedFunctionInfoOffset));
4124 ASSERT(value->IsUndefined() || value->IsContext());
4130 kPrototypeOrInitialMapOffset)
4133 Map* JSFunction::initial_map() {
4134 return Map::cast(prototype_or_initial_map());
4139 set_prototype_or_initial_map(value);
4145 Context* global_context = context()->global_context();
4146 Object* array_function =
4148 if (array_function->IsJSFunction() &&
4153 MaybeObject* maybe_maps =
4156 if (!maybe_maps->To(&maps))
return maybe_maps;
4158 Map* current_map = initial_map;
4161 maps->
set(kind, current_map);
4166 MaybeObject* maybe_new_map =
4168 if (!maybe_new_map->To(&new_map))
return maybe_new_map;
4169 maps->
set(next_kind, new_map);
4170 current_map = new_map;
4172 global_context->set_js_array_maps(maps);
4174 set_initial_map(initial_map);
4180 return prototype_or_initial_map()->IsMap();
4185 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
4195 ASSERT(has_instance_prototype());
4196 if (has_initial_map())
return initial_map()->prototype();
4199 return prototype_or_initial_map();
4207 if (
map()->has_non_instance_prototype())
return map()->constructor();
4208 return instance_prototype();
4223 ASSERT(!shared()->bound());
4224 return literals_or_bindings();
4229 ASSERT(!shared()->bound());
4230 set_literals_or_bindings(literals);
4235 ASSERT(shared()->bound());
4236 return literals_or_bindings();
4241 ASSERT(shared()->bound());
4245 bindings->
map() ==
GetHeap()->fixed_cow_array_map());
4246 set_literals_or_bindings(bindings);
4251 ASSERT(!shared()->bound());
4252 return literals()->length();
4257 ASSERT(
id < kJSBuiltinsCount);
4258 return READ_FIELD(
this, OffsetOfFunctionWithId(
id));
4264 ASSERT(
id < kJSBuiltinsCount);
4265 WRITE_FIELD(
this, OffsetOfFunctionWithId(
id), value);
4271 ASSERT(
id < kJSBuiltinsCount);
4278 ASSERT(
id < kJSBuiltinsCount);
4287 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
4290 void JSProxy::InitializeBody(
int object_size, Object* value) {
4301 ACCESSORS(JSWeakMap, next, Object, kNextOffset)
4314 ACCESSORS(JSModule, context, Object, kContextOffset)
4317 JSModule* JSModule::
cast(Object* obj) {
4318 ASSERT(obj->IsJSModule());
4320 return reinterpret_cast<JSModule*
>(obj);
4324 ACCESSORS(JSValue, value, Object, kValueOffset)
4327 JSValue* JSValue::
cast(Object* obj) {
4328 ASSERT(obj->IsJSValue());
4330 return reinterpret_cast<JSValue*
>(obj);
4334 ACCESSORS(JSDate, value, Object, kValueOffset)
4335 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
4336 ACCESSORS(JSDate, year, Object, kYearOffset)
4337 ACCESSORS(JSDate, month, Object, kMonthOffset)
4338 ACCESSORS(JSDate, day, Object, kDayOffset)
4339 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
4340 ACCESSORS(JSDate, hour, Object, kHourOffset)
4341 ACCESSORS(JSDate, min, Object, kMinOffset)
4342 ACCESSORS(JSDate, sec, Object, kSecOffset)
4345 JSDate* JSDate::
cast(Object* obj) {
4348 return reinterpret_cast<JSDate*
>(obj);
4352 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
4354 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
4355 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
4356 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
4357 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
4358 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
4361 JSMessageObject* JSMessageObject::
cast(Object* obj) {
4362 ASSERT(obj->IsJSMessageObject());
4364 return reinterpret_cast<JSMessageObject*
>(obj);
4368 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
4370 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
4371 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
4372 ACCESSORS(Code, type_feedback_info, Object, kTypeFeedbackInfoOffset)
4373 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
4382 return instruction_start() + instruction_size();
4392 return reinterpret_cast<FixedArray*
>(
4393 READ_FIELD(
this, kDeoptimizationDataOffset));
4403 return unchecked_relocation_info()->GetDataStartAddress();
4408 return unchecked_relocation_info()->length();
4413 return instruction_start();
4428 JSRegExp::
Type JSRegExp::TypeTag() {
4429 Object* data = this->data();
4443 switch (TypeTag()) {
4456 ASSERT(this->data()->IsFixedArray());
4457 Object* data = this->data();
4464 ASSERT(this->data()->IsFixedArray());
4465 Object* data = this->data();
4472 ASSERT(TypeTag() != NOT_COMPILED);
4478 FixedArray* fa =
reinterpret_cast<FixedArray*
>(data());
4485 ASSERT(TypeTag() != NOT_COMPILED);
4486 ASSERT(index >= kDataIndex);
4492 ASSERT(index >= kDataIndex);
4493 FixedArray* fa =
reinterpret_cast<FixedArray*
>(data());
4494 if (value->IsSmi()) {
4508 Map* map = fixed_array->
map();
4510 (map ==
GetHeap()->fixed_array_map() ||
4511 map ==
GetHeap()->fixed_cow_array_map())) ||
4513 (fixed_array->IsFixedDoubleArray() ||
4514 fixed_array ==
GetHeap()->empty_fixed_array())) ||
4516 fixed_array->IsFixedArray() &&
4517 fixed_array->IsDictionary()) ||
4520 (elements()->IsFixedArray() && elements()->
length() >= 2));
4569 return array->IsExternalArray();
4573 #define EXTERNAL_ELEMENTS_CHECK(name, type) \
4574 bool JSObject::HasExternal##name##Elements() { \
4575 HeapObject* array = elements(); \
4576 ASSERT(array != NULL); \
4577 if (!array->IsHeapObject()) \
4579 return array->map()->instance_type() == type; \
4598 bool JSObject::HasNamedInterceptor() {
4609 ASSERT(HasFastSmiOrObjectElements());
4612 if (elems->
map() != isolate->
heap()->fixed_cow_array_map())
return elems;
4613 Object* writable_elems;
4615 elems, isolate->
heap()->fixed_array_map());
4616 if (!maybe_writable_elems->ToObject(&writable_elems)) {
4617 return maybe_writable_elems;
4621 isolate->
counters()->cow_arrays_converted()->Increment();
4622 return writable_elems;
4627 ASSERT(!HasFastProperties());
4633 ASSERT(HasDictionaryElements());
4638 bool String::IsHashFieldComputed(uint32_t field) {
4639 return (field & kHashNotComputedMask) == 0;
4644 return IsHashFieldComputed(hash_field());
4650 uint32_t field = hash_field();
4651 if (IsHashFieldComputed(field))
return field >> kHashShift;
4653 return ComputeAndSetHash();
4659 raw_running_hash_(seed),
4661 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
4662 is_first_char_(
true),
4664 ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
4680 raw_running_hash_ += c;
4681 raw_running_hash_ += (raw_running_hash_ << 10);
4682 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4684 if (is_array_index_) {
4685 if (c < '0' || c >
'9') {
4686 is_array_index_ =
false;
4689 if (is_first_char_) {
4690 is_first_char_ =
false;
4691 if (c ==
'0' && length_ > 1) {
4692 is_array_index_ =
false;
4696 if (array_index_ > 429496729
U - ((d + 2) >> 3)) {
4697 is_array_index_ =
false;
4699 array_index_ = array_index_ * 10 + d;
4712 raw_running_hash_ += c;
4713 raw_running_hash_ += (raw_running_hash_ << 10);
4714 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4718 uint32_t StringHasher::GetHash() {
4721 uint32_t result = raw_running_hash_;
4722 result += (result << 3);
4723 result ^= (result >> 11);
4724 result += (result << 15);
4732 template <
typename s
char>
4740 for (; i < length; i++) {
4753 return SlowAsArrayIndex(index);
4785 if (IsJSGlobalProxy()) {
4787 if (proto->IsNull())
return GetHeap()->undefined_value();
4788 ASSERT(proto->IsJSGlobalObject());
4841 return AttributesField::decode(static_cast<uint32_t>(
flag()->value()));
4846 set_flag(
Smi::FromInt(AttributesField::update(
flag()->value(), attributes)));
4851 Object* function_template = expected_receiver_type();
4852 if (!function_template->IsFunctionTemplateInfo())
return true;
4857 template<
typename Shape,
typename Key>
4861 SetEntry(entry, key, value, PropertyDetails(
Smi::FromInt(0)));
4865 template<
typename Shape,
typename Key>
4869 PropertyDetails details) {
4870 ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
4881 ASSERT(other->IsNumber());
4882 return key ==
static_cast<uint32_t
>(other->
Number());
4893 ASSERT(other->IsNumber());
4904 ASSERT(other->IsNumber());
4909 return Isolate::Current()->heap()->NumberFromUint32(key);
4936 template <
int entrysize>
4942 template <
int entrysize>
4945 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4949 template <
int entrysize>
4953 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4957 template <
int entrysize>
4976 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
4977 if (elts->
length() < required_size) {
4980 Expand(required_size + (required_size >> 3));
4982 }
else if (!
GetHeap()->new_space()->Contains(elts) &&
4983 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
4986 Expand(required_size);
4998 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
5007 if (maybe_result->IsFailure())
return maybe_result;
5010 ((storage->
map() !=
GetHeap()->fixed_double_array_map()) &&
5014 set_elements(storage);
5021 if (
length() == 0)
return this;
5027 if (
length() == 0)
return this;
5033 set(1 + index * 2,
id);
5043 set(index * 2, cell);
5053 return isolate->
factory()->the_hole_value();
5058 return isolate->
factory()->undefined_value();
5063 return heap->raw_unchecked_the_hole_value();
5069 kIcWithTypeinfoCountOffset)
5071 kTypeFeedbackCellsOffset)
5077 Relocatable::Relocatable(
Isolate* isolate) {
5078 ASSERT(isolate == Isolate::Current());
5080 prev_ = isolate->relocatable_top();
5081 isolate->set_relocatable_top(
this);
5085 Relocatable::~Relocatable() {
5086 ASSERT(isolate_ == Isolate::Current());
5087 ASSERT_EQ(isolate_->relocatable_top(),
this);
5088 isolate_->set_relocatable_top(prev_);
5098 v->VisitExternalReference(
5099 reinterpret_cast<Address*>(
FIELD_ADDR(
this, kForeignAddressOffset)));
5103 template<
typename StaticVisitor>
5105 StaticVisitor::VisitExternalReference(
5106 reinterpret_cast<Address*>(
FIELD_ADDR(
this, kForeignAddressOffset)));
5112 v->VisitExternalAsciiString(
5113 reinterpret_cast<Resource**>(
FIELD_ADDR(
this, kResourceOffset)));
5117 template<
typename StaticVisitor>
5120 StaticVisitor::VisitExternalAsciiString(
5121 reinterpret_cast<Resource**>(
FIELD_ADDR(
this, kResourceOffset)));
5127 v->VisitExternalTwoByteString(
5128 reinterpret_cast<Resource**>(
FIELD_ADDR(
this, kResourceOffset)));
5132 template<
typename StaticVisitor>
5135 StaticVisitor::VisitExternalTwoByteString(
5136 reinterpret_cast<Resource**>(
FIELD_ADDR(
this, kResourceOffset)));
5139 #define SLOT_ADDR(obj, offset) \
5140 reinterpret_cast<Object**>((obj)->address() + offset)
5142 template<
int start_offset,
int end_offset,
int size>
5150 template<
int start_offset>
5160 #undef CAST_ACCESSOR
5161 #undef INT_ACCESSORS
5163 #undef ACCESSORS_TO_SMI
5164 #undef SMI_ACCESSORS
5166 #undef BOOL_ACCESSORS
5170 #undef WRITE_BARRIER
5171 #undef CONDITIONAL_WRITE_BARRIER
5172 #undef READ_DOUBLE_FIELD
5173 #undef WRITE_DOUBLE_FIELD
5174 #undef READ_INT_FIELD
5175 #undef WRITE_INT_FIELD
5176 #undef READ_INTPTR_FIELD
5177 #undef WRITE_INTPTR_FIELD
5178 #undef READ_UINT32_FIELD
5179 #undef WRITE_UINT32_FIELD
5180 #undef READ_SHORT_FIELD
5181 #undef WRITE_SHORT_FIELD
5182 #undef READ_BYTE_FIELD
5183 #undef WRITE_BYTE_FIELD
5188 #endif // V8_OBJECTS_INL_H_
static int SizeOf(Map *map, HeapObject *object)
MUST_USE_RESULT MaybeObject * GetElementWithReceiver(Object *receiver, uint32_t index)
bool FLAG_enable_slow_asserts
#define WRITE_BYTE_FIELD(p, offset, value)
float get_scalar(int index)
Object * unchecked_first()
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset eval_from_instructions_offset
#define HAS_FAILURE_TAG(value)
uint8_t get_scalar(int index)
bool prohibits_overwriting()
void TryReenableOptimization()
void SetBackPointer(Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode)
void set_compare_state(byte value)
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
static bool IsMatch(uint32_t key, Object *other)
Address GetCharsAddress()
int LinearSearch(SearchMode mode, String *name, int len)
void set_prohibits_overwriting(bool value)
void set_null_unchecked(Heap *heap, int index)
Code * builtin(Name name)
PropertyAttributes GetPropertyAttribute(String *name)
void set_deopt_count(int value)
JSGlobalPropertyCell * Cell(int index)
#define SLOW_ASSERT(condition)
int allow_osr_at_loop_nesting_level()
const intptr_t kSmiTagMask
static bool is_the_hole_nan(double value)
V8EXPORT bool IsTrue() const
FixedArray * function_bindings()
bool has_instance_prototype()
static const int kEntries
static int EntryToIndex(int entry)
static ByteArray * FromDataStartAddress(Address address)
void AddCharacter(uint32_t c)
void set_all_can_write(bool value)
Object * DataAtUnchecked(int index)
int inobject_properties()
void set_has_deoptimization_support(bool value)
static uint32_t Hash(uint32_t key)
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
void set(int index, Object *value)
int GetInternalFieldOffset(int index)
void AddSurrogatePair(uc32 c)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset kNeedsAccessCheckBit kIsExpressionBit kHasOnlySimpleThisPropertyAssignments kUsesArguments kFormalParameterCountOffset PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, expected_nof_properties, kExpectedNofPropertiesOffset) PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo
static bool get(Smi *smi, int bit_position)
void RecordWrite(Address address, int offset)
#define ASSERT_TAG_ALIGNED(address)
void set_all_can_read(bool value)
FixedArray * unchecked_deoptimization_data()
void set_function_with_prototype(bool value)
static double hole_nan_as_double()
bool InNewSpace(Object *object)
static String * cast(Object *obj)
#define READ_DOUBLE_FIELD(p, offset)
#define READ_INTPTR_FIELD(p, offset)
int Lookup(DescriptorArray *array, String *name)
MaybeObject * TryFlatten(PretenureFlag pretenure=NOT_TENURED)
const uint32_t kTwoByteStringTag
const int kFailureTypeTagSize
static const uint32_t kExponentMask
void set_language_mode(LanguageMode language_mode)
bool function_with_prototype()
static DescriptorArray * cast(Object *obj)
static Failure * InternalError()
static int SizeOf(Map *map, HeapObject *object)
void set_unary_op_type(byte value)
void clear_instance_descriptors()
int unused_property_fields()
void set_length(Smi *length)
void set_javascript_builtin(Builtins::JavaScript id, Object *value)
Object * InObjectPropertyAt(int index)
static Smi * FromInt(int value)
void set_code_age(int age)
bool HasFastSmiElements()
bool IsFastObjectElementsKind(ElementsKind kind)
void IteratePointer(ObjectVisitor *v, int offset)
MUST_USE_RESULT MaybeObject * ToSmi()
unsigned stack_check_table_offset()
Map * elements_transition_map()
void set_second(String *second, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static Object * GetObjectFromEntryAddress(Address location_of_address)
void AddSurrogatePairNoIndex(uc32 c)
void SetAstId(int index, Smi *id)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset kNeedsAccessCheckBit kIsExpressionBit kHasOnlySimpleThisPropertyAssignments kUsesArguments formal_parameter_count
static MemoryChunk * FromAddress(Address a)
MUST_USE_RESULT MaybeObject * EnsureCanContainHeapObjectElements()
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
void init_instance_descriptors()
static HeapObject * cast(Object *obj)
MUST_USE_RESULT MaybeObject * get(int index)
#define READ_UINT32_FIELD(p, offset)
void set_function_bindings(FixedArray *bindings)
static const byte kArgumentMarker
static const int kMaxHashCalcLength
bool is_access_check_needed()
static PropertyType ExtractTypeFromFlags(Flags flags)
void set_pre_allocated_property_fields(int value)
static const byte kUndefined
static AccessorPair * cast(Object *obj)
const int kVariableSizeSentinel
static void IterateBody(HeapObject *obj, int object_size, ObjectVisitor *v)
void Get(int descriptor_number, Descriptor *desc)
static Failure * OutOfMemoryException()
JSFunction * GetConstantFunction(int descriptor_number)
static const int kFastPropertiesSoftLimit
PropertyAttributes property_attributes()
bool IsAsciiRepresentation()
static ExternalTwoByteString * cast(Object *obj)
SeededNumberDictionary * element_dictionary()
static Map * cast(Object *obj)
void set_has_debug_break_slots(bool value)
void SetDataAtUnchecked(int index, Object *value, Heap *heap)
bool has_non_instance_prototype()
static const byte kTheHole
MUST_USE_RESULT MaybeObject * GetPropertyWithReceiver(Object *receiver, String *key, PropertyAttributes *attributes)
int BinarySearch(String *name, int low, int high)
static const int kExponentBias
bool attached_to_shared_function_info()
void set_context(Object *context)
#define READ_FIELD(p, offset)
static Handle< Object > UninitializedSentinel(Isolate *isolate)
bool IsTransitionOnly(int descriptor_number)
void Set(int descriptor_number, Descriptor *desc, const WhitenessWitness &)
bool SameValue(Object *other)
#define MAKE_STRUCT_CAST(NAME, Name, name)
static Failure * Exception()
uint8_t * external_pixel_pointer()
static Foreign * cast(Object *obj)
MUST_USE_RESULT MaybeObject * GetElementsTransitionMapSlow(ElementsKind elements_kind)
static bool IsMatch(String *key, Object *other)
static const int kTransitionsOffset
byte binary_op_result_type()
ACCESSORS(AccessorInfo, expected_receiver_type, Object, kExpectedReceiverTypeOffset) ACCESSORS(FunctionTemplateInfo
#define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype, kHiddenPrototypeBit) BOOL_ACCESSORS(FunctionTemplateInfo
uint16_t SlicedStringGet(int index)
static Smi * FromIntptr(intptr_t value)
Context * global_context()
static Handle< Object > TransitionElementsKind(Handle< JSObject > object, ElementsKind to_kind)
#define READ_BYTE_FIELD(p, offset)
Address GetCharsAddress()
#define ASSERT(condition)
void set_profiler_ticks(int ticks)
Object * instance_prototype()
static MUST_USE_RESULT MaybeObject * AsObject(String *key)
const int kPointerSizeLog2
void set_start_position(int start_position)
#define WRITE_INT_FIELD(p, offset, value)
void set_optimizable(bool value)
void SetNullValueUnchecked(int descriptor_number, Heap *heap)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
Object * BypassGlobalProxy()
#define READ_INT64_FIELD(p, offset)
#define WRITE_UINT32_FIELD(p, offset, value)
static Context * cast(Object *context)
static Flags ComputeMonomorphicFlags(Kind kind, PropertyType type, ExtraICState extra_ic_state=kNoExtraICState, InlineCacheHolderFlag holder=OWN_MAP, int argc=-1)
static uint32_t HashForObject(Object *key, Object *object)
EXTERNAL_ELEMENTS_CHECK(UnsignedShort, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE) EXTERNAL_ELEMENTS_CHECK(UnsignedInt
kPropertyAccessorsOffset kNamedPropertyHandlerOffset instance_template
#define WRITE_INTPTR_FIELD(p, offset, value)
const uint32_t kStringRepresentationMask
bool NonFailureIsHeapObject()
int SizeFromMap(Map *map)
void set_compiled_optimizable(bool value)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset kNeedsAccessCheckBit kIsExpressionBit kHasOnlySimpleThisPropertyAssignments uses_arguments
static MUST_USE_RESULT MaybeObject * AsObject(Object *key)
void set(int index, float value)
Object * DataAt(int index)
Object ** GetKeySlot(int descriptor_number)
bool IsInternalError() const
bool HasSpecificClassOf(String *name)
int GetInternalFieldCount()
void initialize_elements()
uint8_t get_scalar(int index)
const Resource * resource()
MUST_USE_RESULT MaybeObject * get(int index)
const int kFastElementsKindCount
void set_first(String *first, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
void ReplaceCode(Code *code)
void set_map_and_elements(Map *map, FixedArrayBase *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static ExternalAsciiString * cast(Object *obj)
MUST_USE_RESULT MaybeObject * EnsureWritableFastElements()
void set_the_hole(int index)
void set_foreign_address(Address value)
MUST_USE_RESULT MaybeObject * Copy()
bool MayContainTransitions()
void SeqTwoByteStringSet(int index, uint16_t value)
bool IsMarkedForLazyRecompilation()
static Code * cast(Object *obj)
const uint32_t kAsciiDataHintTag
#define CAST_ACCESSOR(type)
const uint32_t kShortExternalStringMask
void set(int index, uint32_t value)
bool HasElementWithReceiver(JSReceiver *receiver, uint32_t index)
int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind)
bool AsArrayIndex(uint32_t *index)
Object * GetValue(int descriptor_number)
BOOL_GETTER(SharedFunctionInfo, compiler_hints, optimization_disabled, kOptimizationDisabled) void SharedFunctionInfo
static Object ** RawField(HeapObject *obj, int offset)
static Smi * cast(Object *object)
void set_literals(FixedArray *literals)
static void IterateBody(HeapObject *obj, ObjectVisitor *v)
static uint32_t Hash(Object *key)
void set(int index, uint16_t value)
void ClearCodeCache(Heap *heap)
bool Equals(String *other)
static const int kHeaderSize
void set_used_for_prototype(bool value)
Code * javascript_builtin_code(Builtins::JavaScript id)
MUST_USE_RESULT MaybeObject * get(int index)
int GetInObjectPropertyOffset(int index)
Object * GetInternalField(int index)
uint16_t SeqAsciiStringGet(int index)
void set_binary_op_type(byte value)
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
void set(int index, int16_t value)
Object * InObjectPropertyAtPut(int index, Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
uint16_t ExternalTwoByteStringGet(int index)
static const int kFirstOffset
bool IsPropertyDescriptor(T *desc)
ByteArray * unchecked_relocation_info()
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset this_property_assignments
bool HasFastSmiOrObjectElements()
MUST_USE_RESULT MaybeObject * GetIdentityHash(CreationFlag flag)
static const int kKindOffset
const uint32_t kNotStringTag
static void NoWriteBarrierSet(FixedArray *array, int index, Object *value)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset kNeedsAccessCheckBit kIsExpressionBit has_only_simple_this_property_assignments
static const int kParentOffset
String * GetKey(int descriptor_number)
bool HasNonStrictArgumentsElements()
MUST_USE_RESULT MaybeObject * GetIdentityHash(CreationFlag flag)
const uint64_t kHoleNanInt64
void set_the_hole(int index)
void set_is_pregenerated(bool value)
#define READ_SHORT_FIELD(p, offset)
#define FIELD_ADDR(p, offset)
void set_opt_reenable_tries(int value)
Object * GetElementNoExceptionThrown(uint32_t index)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static SeededNumberDictionary * cast(Object *obj)
virtual void Validate(JSObject *obj)=0
MUST_USE_RESULT MaybeObject * SetContent(FixedArrayBase *storage)
const uint32_t kIsSymbolMask
void set_unchecked(int index, Smi *value)
MUST_USE_RESULT MaybeObject * get(int index)
static const int kExponentShift
bool IsStringObjectWithCharacterAt(uint32_t index)
static const int kValueOffset
const int kFailureTagSize
const uint32_t kHoleNanUpper32
static InlineCacheHolderFlag ExtractCacheHolderFromFlags(Flags flags)
void ExternalTwoByteStringIterateBody()
void set_undefined(int index)
static SlicedString * cast(Object *obj)
static const int kDontAdaptArgumentsSentinel
int pre_allocated_property_fields()
static uint32_t SeededHash(uint32_t key, uint32_t seed)
#define WRITE_BARRIER(heap, object, offset, value)
#define HAS_SMI_TAG(value)
void InitializeBody(int object_size)
#define MAKE_STRUCT_PREDICATE(NAME, Name, name)
static const int kFirstOffset
bool IsAsciiRepresentationUnderneath()
static Failure * RetryAfterGC()
void IteratePointers(ObjectVisitor *v, int start, int end)
int SeqTwoByteStringSize(InstanceType instance_type)
static const uchar kMaxNonSurrogateCharCode
static bool IsValid(intptr_t value)
void set_resource(const Resource *buffer)
static Failure * cast(MaybeObject *object)
const uint32_t kIsIndirectStringMask
#define READ_INT_FIELD(p, offset)
static const int kMinValue
bool ToArrayIndex(uint32_t *index)
ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_number)
MUST_USE_RESULT MaybeObject * ResetElements()
ElementsKind GetElementsKind()
SharedFunctionInfo * unchecked_shared()
static Object * RawUninitializedSentinel(Heap *heap)
static Handle< Map > GetElementsTransitionMap(Handle< JSObject > object, ElementsKind to_kind)
MUST_USE_RESULT MaybeObject * AllocateFixedArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset kNeedsAccessCheckBit is_expression
static const int kIsNotArrayIndexMask
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset kNeedsAccessCheckBit kIsExpressionBit compiler_hints
#define TYPE_CHECKER(type, instancetype)
static Oddball * cast(Object *obj)
static Address & Address_at(Address addr)
int GetFieldIndex(int descriptor_number)
MUST_USE_RESULT MaybeObject * get(int index)
bool IsAligned(T value, U alignment)
static SeqAsciiString * cast(Object *obj)
void set_inobject_properties(int value)
unsigned safepoint_table_offset()
void set_hash_field(uint32_t value)
const uint16_t * ExternalTwoByteStringGetData(unsigned start)
bool HasElement(uint32_t index)
#define WRITE_SHORT_FIELD(p, offset, value)
const uint32_t kAsciiDataHintMask
AllocationSpace allocation_space() const
bool HasBuiltinFunctionId()
MUST_USE_RESULT MaybeObject * set_initial_map_and_cache_transitions(Map *value)
bool IsTwoByteRepresentationUnderneath()
static FunctionTemplateInfo * cast(Object *obj)
kPropertyAccessorsOffset named_property_handler
static const int kPropertiesOffset
T RoundUp(T x, intptr_t m)
void set_elements_transition_map(Map *transitioned_map)
static FixedDoubleArray * cast(Object *obj)
Object * FastPropertyAt(int index)
bool IsTwoByteRepresentation()
uint16_t ExternalAsciiStringGet(int index)
static Code * GetCodeFromTargetAddress(Address address)
bool is_inline_cache_stub()
bool IsFastSmiElementsKind(ElementsKind kind)
HeapObject * unchecked_prototype_transitions()
bool AllowsSetElementsLength()
Object * unchecked_context()
const uint32_t kShortExternalStringTag
static int SmiValue(internal::Object *value)
ElementsKind FastSmiToObjectElementsKind(ElementsKind from_kind)
int SeqAsciiStringSize(InstanceType instance_type)
Object * FastPropertyAtPut(int index, Object *value)
void Update(DescriptorArray *array, String *name, int result)
static int GetIdentityHash(Handle< JSObject > obj)
static int SizeFor(int length)
static const int kElementsOffset
WriteBarrierMode GetWriteBarrierMode(const AssertNoAllocation &)
void set_resource(const Resource *buffer)
PropertyDetails GetDetails(int descriptor_number)
Object ** GetFirstElementAddress()
static uint32_t HashForObject(uint32_t key, Object *object)
BuiltinFunctionId builtin_function_id()
int8_t get_scalar(int index)
MUST_USE_RESULT MaybeObject * Copy()
const uint32_t kStringTag
byte * relocation_start()
InlineCacheState ic_state()
static uint32_t HashForObject(String *key, Object *object)
void set_construction_count(int value)
double get_scalar(int index)
uint16_t ConsStringGet(int index)
DescriptorLookupCache * descriptor_lookup_cache()
void set_map_no_write_barrier(Map *value)
void set_check_type(CheckType value)
void DontAdaptArguments()
bool has_function_cache()
void set_to_boolean_state(byte value)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset needs_access_check
static int OffsetOfElementAt(int index)
void set(int index, uint8_t value)
PropertyAttributes GetPropertyAttributeWithReceiver(JSReceiver *receiver, String *name)
static int SizeFor(int length)
static ExtraICState ExtractExtraICStateFromFlags(Flags flags)
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
void SetCell(int index, JSGlobalPropertyCell *cell)
static ElementsAccessor * ForKind(ElementsKind elements_kind)
static SeqTwoByteString * cast(Object *obj)
int16_t get_scalar(int index)
const int kElementsKindCount
void SetDataAt(int index, Object *value)
static bool IsMatch(Object *key, Object *other)
static const int kHeaderSize
bool HasElementWithHandler(uint32_t index)
void set(int index, double value)
MUST_USE_RESULT MaybeObject * NumberFromDouble(double value, PretenureFlag pretenure=NOT_TENURED)
static InlineCacheState ExtractICStateFromFlags(Flags flags)
bool HasProperty(String *name)
bool has_deoptimization_support()
static Kind ExtractKindFromFlags(Flags flags)
static const int kMapOffset
bool has_named_interceptor()
static int ExtractArgumentsCountFromFlags(Flags flags)
bool is_the_hole(int index)
void set_instance_type(InstanceType value)
const uint32_t kIsNotStringMask
bool IsOutOfMemoryException() const
static HeapNumber * cast(Object *obj)
int32_t get_scalar(int index)
static StringDictionary * cast(Object *obj)
void set_value(double value)
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
LanguageMode language_mode()
bool should_have_prototype()
static const int kLengthOffset
static double nan_value()
bool has_deoptimization_support()
MUST_USE_RESULT MaybeObject * get(int index)
uint32_t get_scalar(int index)
uint32_t ComputeIntegerHash(uint32_t key, uint32_t seed)
bool is_the_hole(int index)
static const int kBitField3StorageOffset
AccessorDescriptor * GetCallbacks(int descriptor_number)
V8EXPORT bool IsNumber() const
ExtraICState extra_ic_state()
static int SizeFor(int length)
const intptr_t kObjectAlignment
void SetInternalField(int index, Object *value)
PropertyType GetType(int descriptor_number)
static JSGlobalPropertyCell * cast(Object *obj)
name_should_print_as_anonymous
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
IncrementalMarking * incremental_marking()
MUST_USE_RESULT MaybeObject * get(int index)
bool has_indexed_interceptor()
ElementsKind GetInitialFastElementsKind()
void ForeignIterateBody()
static const uint32_t kHashBitMask
bool HasPropertyWithHandler(String *name)
Object * GetBackPointer()
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, PropertyType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
void AddCharacterNoIndex(uint32_t c)
static const int kEntriesIndex
static const uint32_t kSignMask
void set_bit_field(byte value)
static int SizeFor(int length)
static JSValue * cast(Object *obj)
static const int kHeaderSize
uint32_t HashSequentialString(const schar *chars, int length, uint32_t seed)
FunctionTemplateInfo * get_api_func_data()
void EnsureSize(int minimum_size_of_backing_fixed_array)
const Resource * resource()
int number_of_descriptors()
void set(int index, double value)
#define WRITE_FIELD(p, offset, value)
static const int kFullStringRepresentationMask
void MemsetPointer(T **dest, U *value, int counter)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset flag
void set_major_key(int value)
bool NeedsArgumentsAdaption()
void Set(int index, uint16_t value)
static void NoIncrementalWriteBarrierSet(FixedArray *array, int index, Object *value)
V8EXPORT bool IsFalse() const
void set_is_access_check_needed(bool access_check_needed)
MUST_USE_RESULT MaybeObject * CreateNextElementsTransition(ElementsKind elements_kind)
MUST_USE_RESULT MaybeObject * GetProperty(String *key)
#define ASSERT_EQ(v1, v2)
double get_scalar(int index)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
bool HasFastObjectElements()
MUST_USE_RESULT MaybeObject * get(int index)
InstanceType instance_type()
static JSProxy * cast(Object *obj)
static const int kMaxFastProperties
static HeapObject * FromAddress(Address address)
bool HasFastHoleyElements()
bool requires_slow_elements()
void set(int index, byte value)
void SetDetailsUnchecked(int descriptor_number, Smi *value)
static double canonical_not_the_hole_nan_as_double()
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping true
#define INT_ACCESSORS(holder, name, offset)
bool TooManyFastProperties(int properties, StoreFromKeyed store_mode)
static FixedArray * cast(Object *obj)
bool IsProperty(int descriptor_number)
StringHasher(int length, uint32_t seed)
static const int kHeaderSize
void set(int index, int8_t value)
static Smi * set(Smi *smi, int bit_position, bool v)
bool used_for_prototype()
void SeqAsciiStringSet(int index, uint16_t value)
void set_parent(String *parent)
bool IsCompatibleReceiver(Object *receiver)
static HashTable * cast(Object *obj)
void set_is_extensible(bool value)
ElementsKind elements_kind()
void set_is_shared(bool value)
uint16_t get_scalar(int index)
static Handle< Object > GetElement(Handle< Object > object, uint32_t index)
void set_compare_operation(byte value)
void set_attached_to_shared_function_info(bool value)
void set_stack_slots(unsigned slots)
MUST_USE_RESULT MaybeObject * NumberFromInt32(int32_t value, PretenureFlag pretenure=NOT_TENURED)
const uint32_t kIsIndirectStringTag
void SetEntry(int entry, Object *key, Object *value)
Object * GetCallbacksObject(int descriptor_number)
void set_instance_size(int value)
JSFunction * unchecked_constructor()
void set(int index, int32_t value)
bool IsFastHoleyElementsKind(ElementsKind kind)
Address GetDataStartAddress()
static uint32_t Hash(String *key)
bool IsNullDescriptor(int descriptor_number)
bool HasDictionaryElements()
void set_javascript_builtin_code(Builtins::JavaScript id, Code *value)
ElementsAccessor * GetElementsAccessor()
bool IsInstanceOf(FunctionTemplateInfo *type)
bool HasFastDoubleElements()
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset kNeedsAccessCheckBit kIsExpressionBit kHasOnlySimpleThisPropertyAssignments kUsesArguments kFormalParameterCountOffset kStartPositionAndTypeOffset kCompilerHintsOffset stress_deopt_counter
String * TryFlattenGetString(PretenureFlag pretenure=NOT_TENURED)
void set_bit_field3_storage(int value)
void set_bit_field2(byte value)
static MUST_USE_RESULT MaybeObject * AsObject(uint32_t key)
void set_finger_index(int finger_index)
void set_map_word(MapWord map_word)
void set_binary_op_result_type(byte value)
#define SLOT_ADDR(obj, offset)
bool has_debug_break_slots()
void set(int index, uint8_t value)
bool HasIndexedInterceptor()
Object * unchecked_second()
static const byte kNotBooleanMask
static const int kExternalTwoByteRepresentationTag
Address foreign_address()
const uint32_t kSymbolTag
static const int kEntrySize
bool HasLocalProperty(String *name)
const int kFailureTypeTagMask
MUST_USE_RESULT MaybeObject * get(int index)
static Flags RemoveTypeFromFlags(Flags flags)
void set_visitor_id(int visitor_id)
static bool HasHeapObjectTag(internal::Object *value)
const uint32_t kAsciiStringTag
#define ACCESSORS_TO_SMI(holder, name, offset)
void set_property_attributes(PropertyAttributes attributes)
void set_code(Code *code)
SMI_ACCESSORS(TypeFeedbackInfo, ic_with_type_info_count, kIcWithTypeinfoCountOffset) ACCESSORS(TypeFeedbackInfo
MUST_USE_RESULT MaybeObject * GetHash(CreationFlag flag)
static ConsString * cast(Object *obj)
void set_safepoint_table_offset(unsigned offset)
static FixedArrayBase * cast(Object *object)
bool is_compiled_optimizable()
bool ContainsOnlySmisOrHoles()
static const int kTransitionsIndex
void set_flags(Flags flags)
static const int kMaxValue
static const int kCodeCacheOffset
MUST_USE_RESULT MaybeObject * get(int index)
#define WRITE_DOUBLE_FIELD(p, offset, value)
static const int kNotFound
void set_non_instance_prototype(bool value)
void increment_deopt_count()
const uc32 kMaxAsciiCharCode
uint16_t SeqTwoByteStringGet(int index)
Object ** GetValueSlot(int descriptor_number)
StringDictionary * property_dictionary()
static uint32_t SeededHashForObject(uint32_t key, uint32_t seed, Object *object)
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
static const int kExponentOffset
static const int kValueOffset
void set_allow_osr_at_loop_nesting_level(int level)
static JSObject * cast(Object *obj)
uint32_t RoundUpToPowerOf2(uint32_t x)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset access_check_info
bool HasExternalArrayElements()
int64_t get_representation(int index)
#define MAKE_STRUCT_CASE(NAME, Name, name)
Object * javascript_builtin(Builtins::JavaScript id)
PropertyAttributes GetLocalPropertyAttribute(String *name)
uint32_t max_number_key()
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset kNeedsAccessCheckBit start_position_and_type
void set_initial_map(Map *value)
bool IsFastDoubleElementsKind(ElementsKind kind)
void set_has_function_cache(bool flag)
MUST_USE_RESULT MaybeObject * EnsureCanContainElements(Object **elements, uint32_t count, EnsureElementsMode mode)
static const int kFirstIndex
void set_unused_property_fields(int value)
const uint32_t kStringEncodingMask
const uint16_t * GetChars()
void init_prototype_transitions(Object *undefined)
void set_stack_check_table_offset(unsigned offset)
void set_bit_field3(int value)
static int ComputeCapacity(int at_least_space_for)
void ExternalAsciiStringIterateBody()
WhitenessWitness(DescriptorArray *array)
void InitializeBody(Map *map, Object *pre_allocated_value, Object *filler_value)
void set_requires_slow_elements()
void EnterNoMarkingScope()
static JSFunction * cast(Object *obj)