35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_
57 PropertyDetails::PropertyDetails(Smi* smi) {
58 value_ = smi->value();
62 Smi* PropertyDetails::AsSmi()
const {
65 int value = value_ << 1;
70 PropertyDetails PropertyDetails::AsDeleted()
const {
71 Smi* smi =
Smi::FromInt(value_ | DeletedField::encode(1));
72 return PropertyDetails(smi);
76 #define TYPE_CHECKER(type, instancetype) \
77 bool Object::Is##type() { \
78 return Object::IsHeapObject() && \
79 HeapObject::cast(this)->map()->instance_type() == instancetype; \
83 #define CAST_ACCESSOR(type) \
84 type* type::cast(Object* object) { \
85 SLOW_ASSERT(object->Is##type()); \
86 return reinterpret_cast<type*>(object); \
90 #define FIXED_TYPED_ARRAY_CAST_ACCESSOR(type) \
92 type* type::cast(Object* object) { \
93 SLOW_ASSERT(object->Is##type()); \
94 return reinterpret_cast<type*>(object); \
97 #define INT_ACCESSORS(holder, name, offset) \
98 int holder::name() { return READ_INT_FIELD(this, offset); } \
99 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
102 #define ACCESSORS(holder, name, type, offset) \
103 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
104 void holder::set_##name(type* value, WriteBarrierMode mode) { \
105 WRITE_FIELD(this, offset, value); \
106 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
111 #define ACCESSORS_TO_SMI(holder, name, offset) \
112 Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \
113 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
114 WRITE_FIELD(this, offset, value); \
119 #define SMI_ACCESSORS(holder, name, offset) \
120 int holder::name() { \
121 Object* value = READ_FIELD(this, offset); \
122 return Smi::cast(value)->value(); \
124 void holder::set_##name(int value) { \
125 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
129 #define BOOL_GETTER(holder, field, name, offset) \
130 bool holder::name() { \
131 return BooleanBit::get(field(), offset); \
135 #define BOOL_ACCESSORS(holder, field, name, offset) \
136 bool holder::name() { \
137 return BooleanBit::get(field(), offset); \
139 void holder::set_##name(bool value) { \
140 set_##field(BooleanBit::set(field(), offset, value)); \
145 return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
146 IsFixedTypedArrayBase() || IsExternalArray();
152 return Object::IsHeapObject() &&
159 return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
163 bool Object::IsSmi() {
168 bool Object::IsHeapObject() {
174 ASSERT(!this->IsFailure());
175 return (reinterpret_cast<intptr_t>(
this) &
kSmiTagMask) != 0;
184 return Object::IsHeapObject()
189 bool Object::IsName() {
190 return IsString() || IsSymbol();
194 bool Object::IsUniqueName() {
195 return IsInternalizedString() || IsSymbol();
199 bool Object::IsSpecObject() {
200 return Object::IsHeapObject()
205 bool Object::IsSpecFunction() {
206 if (!Object::IsHeapObject())
return false;
212 bool Object::IsInternalizedString() {
213 if (!this->IsHeapObject())
return false;
221 bool Object::IsConsString() {
222 if (!IsString())
return false;
227 bool Object::IsSlicedString() {
228 if (!IsString())
return false;
233 bool Object::IsSeqString() {
234 if (!IsString())
return false;
239 bool Object::IsSeqOneByteString() {
240 if (!IsString())
return false;
241 return StringShape(
String::cast(
this)).IsSequential() &&
246 bool Object::IsSeqTwoByteString() {
247 if (!IsString())
return false;
248 return StringShape(
String::cast(
this)).IsSequential() &&
253 bool Object::IsExternalString() {
254 if (!IsString())
return false;
259 bool Object::IsExternalAsciiString() {
260 if (!IsString())
return false;
266 bool Object::IsExternalTwoByteString() {
267 if (!IsString())
return false;
274 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
275 IsFixedTypedArrayBase();
281 if (representation.
IsSmi() && IsUninitialized()) {
284 if (!representation.
IsDouble())
return this;
285 if (IsUninitialized()) {
292 StringShape::StringShape(
String* str)
293 : type_(str->
map()->instance_type()) {
299 StringShape::StringShape(Map*
map)
300 : type_(map->instance_type()) {
307 : type_(static_cast<uint32_t>(t)) {
313 bool StringShape::IsInternalized() {
372 bool StringShape::IsCons() {
377 bool StringShape::IsSliced() {
382 bool StringShape::IsIndirect() {
387 bool StringShape::IsExternal() {
392 bool StringShape::IsSequential() {
403 uint32_t StringShape::encoding_tag() {
408 uint32_t StringShape::full_representation_tag() {
420 bool StringShape::IsSequentialAscii() {
425 bool StringShape::IsSequentialTwoByte() {
430 bool StringShape::IsExternalAscii() {
441 bool StringShape::IsExternalTwoByte() {
452 ASSERT(0 <= index && index <= length_);
454 return static_cast<const byte*
>(start_)[index];
456 return static_cast<const uc16*
>(start_)[index];
461 template <
typename Char>
505 : string_(string), from_(from), length_(length) {
506 if (string_->IsSlicedString()) {
509 ASSERT(string_->IsSeqString() ||
string->IsExternalString());
514 ASSERT(from_ + length_ <= string_->length());
515 const Char* chars = GetChars() + from_;
517 chars, length_, string_->GetHeap()->HashSeed());
531 const Char* GetChars();
533 while (string->IsSlicedString()) {
535 *offset += sliced->
offset();
536 string = sliced->
parent();
541 Handle<String> string_;
544 uint32_t hash_field_;
598 return IsSmi() || IsHeapNumber();
607 if (!Object::IsHeapObject())
return false;
613 bool Object::IsExternalArray() {
614 if (!Object::IsHeapObject())
623 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
624 TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \
625 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
628 #undef TYPED_ARRAY_TYPE_CHECKER
631 bool Object::IsFixedTypedArrayBase() {
632 if (!Object::IsHeapObject())
return false;
641 bool MaybeObject::IsFailure() {
646 bool MaybeObject::IsRetryAfterGC() {
652 bool MaybeObject::IsException() {
657 bool MaybeObject::IsTheHole() {
658 return !IsFailure() && ToObjectUnchecked()->IsTheHole();
662 bool MaybeObject::IsUninitialized() {
663 return !IsFailure() && ToObjectUnchecked()->IsUninitialized();
673 bool Object::IsJSReceiver() {
675 return IsHeapObject() &&
680 bool Object::IsJSObject() {
682 return IsHeapObject() &&
687 bool Object::IsJSProxy() {
688 if (!Object::IsHeapObject())
return false;
706 bool Object::IsJSWeakCollection() {
707 return IsJSWeakMap() || IsJSWeakSet();
711 bool Object::IsDescriptorArray() {
712 return IsFixedArray();
716 bool Object::IsTransitionArray() {
717 return IsFixedArray();
721 bool Object::IsDeoptimizationInputData() {
723 if (!IsFixedArray())
return false;
730 if (length == 0)
return true;
733 return length >= 0 &&
738 bool Object::IsDeoptimizationOutputData() {
739 if (!IsFixedArray())
return false;
748 bool Object::IsDependentCode() {
749 if (!IsFixedArray())
return false;
756 bool Object::IsContext() {
757 if (!Object::IsHeapObject())
return false;
760 return (map == heap->function_context_map() ||
761 map == heap->catch_context_map() ||
762 map == heap->with_context_map() ||
763 map == heap->native_context_map() ||
764 map == heap->block_context_map() ||
765 map == heap->module_context_map() ||
766 map == heap->global_context_map());
770 bool Object::IsNativeContext() {
771 return Object::IsHeapObject() &&
777 bool Object::IsScopeInfo() {
778 return Object::IsHeapObject() &&
788 return obj->IsJSFunction();
804 bool Object::IsStringWrapper() {
805 return IsJSValue() &&
JSValue::cast(
this)->value()->IsString();
812 bool
Object::IsBoolean() {
813 return IsOddball() &&
824 bool Object::IsJSArrayBufferView() {
825 return IsJSDataView() || IsJSTypedArray();
833 return obj->IsJSArray();
837 bool Object::IsHashTable() {
838 return Object::IsHeapObject() &&
844 bool Object::IsDictionary() {
845 return IsHashTable() &&
850 bool Object::IsStringTable() {
851 return IsHashTable() &&
856 bool Object::IsJSFunctionResultCache() {
857 if (!IsFixedArray())
return false;
859 int length =
self->length();
866 if (FLAG_verify_heap) {
867 reinterpret_cast<JSFunctionResultCache*
>(
this)->
868 JSFunctionResultCacheVerify();
875 bool Object::IsNormalizedMapCache() {
876 if (!IsFixedArray())
return false;
881 if (FLAG_verify_heap) {
882 reinterpret_cast<NormalizedMapCache*
>(
this)->NormalizedMapCacheVerify();
889 bool Object::IsCompilationCacheTable() {
890 return IsHashTable();
894 bool Object::IsCodeCacheHashTable() {
895 return IsHashTable();
899 bool Object::IsPolymorphicCodeCacheHashTable() {
900 return IsHashTable();
904 bool Object::IsMapCache() {
905 return IsHashTable();
909 bool Object::IsObjectHashTable() {
910 return IsHashTable();
914 bool Object::IsPrimitive() {
915 return IsOddball() || IsNumber() || IsString();
919 bool Object::IsJSGlobalProxy() {
920 bool result = IsHeapObject() &&
929 bool Object::IsGlobalObject() {
930 if (!IsHeapObject())
return false;
942 bool Object::IsUndetectableObject() {
943 return IsHeapObject()
948 bool Object::IsAccessCheckNeeded() {
949 if (!IsHeapObject())
return false;
950 if (IsJSGlobalProxy()) {
952 GlobalObject* global =
953 proxy->GetIsolate()->context()->global_object();
954 return proxy->IsDetachedFrom(global);
961 if (!IsHeapObject())
return false;
963 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
965 #undef MAKE_STRUCT_CASE
966 default:
return false;
971 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
972 bool Object::Is##Name() { \
973 return Object::IsHeapObject() \
974 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
977 #undef MAKE_STRUCT_PREDICATE
990 bool Object::IsTheHole() {
995 bool Object::IsUninitialized() {
1018 ?
static_cast<double>(
reinterpret_cast<Smi*
>(
this)->value())
1019 : reinterpret_cast<HeapNumber*>(
this)->value();
1029 if (object->IsSmi())
return object;
1030 if (object->IsHeapNumber()) {
1032 int int_value =
FastD2I(value);
1043 if (IsSmi())
return this;
1044 if (IsHeapNumber()) {
1046 int int_value =
FastD2I(value);
1066 ASSERT(AllowHeapAllocation::IsAllowed());
1092 #define FIELD_ADDR(p, offset) \
1093 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1095 #define READ_FIELD(p, offset) \
1096 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
1098 #define WRITE_FIELD(p, offset, value) \
1099 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1101 #define WRITE_BARRIER(heap, object, offset, value) \
1102 heap->incremental_marking()->RecordWrite( \
1103 object, HeapObject::RawField(object, offset), value); \
1104 if (heap->InNewSpace(value)) { \
1105 heap->RecordWrite(object->address(), offset); \
1108 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1109 if (mode == UPDATE_WRITE_BARRIER) { \
1110 heap->incremental_marking()->RecordWrite( \
1111 object, HeapObject::RawField(object, offset), value); \
1112 if (heap->InNewSpace(value)) { \
1113 heap->RecordWrite(object->address(), offset); \
1117 #ifndef V8_TARGET_ARCH_MIPS
1118 #define READ_DOUBLE_FIELD(p, offset) \
1119 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
1120 #else // V8_TARGET_ARCH_MIPS
1123 static inline double read_double_field(
void* p,
int offset) {
1128 c.u[0] = (*
reinterpret_cast<uint32_t*
>(
FIELD_ADDR(p, offset)));
1129 c.u[1] = (*
reinterpret_cast<uint32_t*
>(
FIELD_ADDR(p, offset + 4)));
1132 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
1133 #endif // V8_TARGET_ARCH_MIPS
1135 #ifndef V8_TARGET_ARCH_MIPS
1136 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1137 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
1138 #else // V8_TARGET_ARCH_MIPS
1141 static inline void write_double_field(
void* p,
int offset,
1148 (*
reinterpret_cast<uint32_t*
>(
FIELD_ADDR(p, offset))) = c.u[0];
1149 (*
reinterpret_cast<uint32_t*
>(
FIELD_ADDR(p, offset + 4))) = c.u[1];
1151 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1152 write_double_field(p, offset, value)
1153 #endif // V8_TARGET_ARCH_MIPS
1156 #define READ_INT_FIELD(p, offset) \
1157 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
1159 #define WRITE_INT_FIELD(p, offset, value) \
1160 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1162 #define READ_INTPTR_FIELD(p, offset) \
1163 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
1165 #define WRITE_INTPTR_FIELD(p, offset, value) \
1166 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1168 #define READ_UINT32_FIELD(p, offset) \
1169 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
1171 #define WRITE_UINT32_FIELD(p, offset, value) \
1172 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1174 #define READ_INT32_FIELD(p, offset) \
1175 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)))
1177 #define WRITE_INT32_FIELD(p, offset, value) \
1178 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1180 #define READ_INT64_FIELD(p, offset) \
1181 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
1183 #define WRITE_INT64_FIELD(p, offset, value) \
1184 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1186 #define READ_SHORT_FIELD(p, offset) \
1187 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
1189 #define WRITE_SHORT_FIELD(p, offset, value) \
1190 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1192 #define READ_BYTE_FIELD(p, offset) \
1193 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
1195 #define WRITE_BYTE_FIELD(p, offset, value) \
1196 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1218 return reinterpret_cast<Smi*
>((value << smi_shift_bits) |
kSmiTag);
1249 intptr_t Failure::value()
const {
1250 return static_cast<intptr_t
>(
1266 Failure* Failure::Construct(
Type type, intptr_t value) {
1272 static const int kFailureMagicPattern = 0x0BAD0000;
1273 return reinterpret_cast<Failure*
>(
1285 MapWord MapWord::FromMap(
Map* map) {
1286 return MapWord(reinterpret_cast<uintptr_t>(map));
1290 Map* MapWord::ToMap() {
1291 return reinterpret_cast<Map*
>(value_);
1295 bool MapWord::IsForwardingAddress() {
1296 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1300 MapWord MapWord::FromForwardingAddress(HeapObject*
object) {
1302 return MapWord(reinterpret_cast<uintptr_t>(raw));
1306 HeapObject* MapWord::ToForwardingAddress() {
1307 ASSERT(IsForwardingAddress());
1313 void HeapObject::VerifyObjectField(
int offset) {
1317 void HeapObject::VerifySmiField(
int offset) {
1343 if (value !=
NULL) {
1386 v->VisitPointers(reinterpret_cast<Object**>(
FIELD_ADDR(
this, start)),
1387 reinterpret_cast<Object**>(
FIELD_ADDR(
this, end)));
1392 v->VisitPointer(reinterpret_cast<Object**>(
FIELD_ADDR(
this, offset)));
1397 v->VisitNextCodeLink(reinterpret_cast<Object**>(
FIELD_ADDR(
this, offset)));
1433 for (
int i = 0; i <
length(); ++i) {
1434 Object* candidate = *current++;
1435 if (!candidate->IsSmi() && candidate != the_hole)
return false;
1448 #ifdef ENABLE_SLOW_ASSERTS
1479 if (FLAG_pretenuring_call_new ||
1490 if (FLAG_pretenuring_call_new ||
1501 if (FLAG_allocation_site_pretenuring) {
1526 int value = pretenure_data()->value();
1548 ASSERT(FLAG_allocation_site_pretenuring);
1555 bool decision_changed =
false;
1560 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1561 static_cast<double>(found_count) / create_count : 0.0;
1564 if (minimum_mementos_created) {
1570 decision_changed =
true;
1575 if (FLAG_trace_pretenuring_statistics) {
1577 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1578 static_cast<void*>(
this), create_count, found_count, ratio,
1579 current_mode ==
TENURED ?
"tenured" :
"not tenured",
1586 return decision_changed;
1591 object->ValidateElements();
1592 ElementsKind elements_kind =
object->map()->elements_kind();
1607 ElementsKind current_kind =
object->map()->elements_kind();
1614 Heap* heap =
object->GetHeap();
1615 Object* the_hole = heap->the_hole_value();
1616 for (uint32_t i = 0; i < count; ++i) {
1617 Object* current = *objects++;
1618 if (current == the_hole) {
1621 }
else if (!current->IsSmi()) {
1630 }
else if (is_holey) {
1639 if (target_kind != current_kind) {
1649 Heap* heap =
object->GetHeap();
1650 if (elements->map() != heap->fixed_double_array_map()) {
1651 ASSERT(elements->map() == heap->fixed_array_map() ||
1652 elements->map() == heap->fixed_cow_array_map());
1668 for (uint32_t i = 0; i < length; ++i) {
1669 if (double_array->is_the_hole(i)) {
1681 Map* current_map =
map();
1683 if (from_kind == to_kind)
return current_map;
1686 Object* maybe_array_maps = native_context->js_array_maps();
1687 if (maybe_array_maps->IsFixedArray()) {
1689 if (array_maps->
get(from_kind) == current_map) {
1690 Object* maybe_transitioned_map = array_maps->
get(to_kind);
1691 if (maybe_transitioned_map->IsMap()) {
1692 return Map::cast(maybe_transitioned_map);
1705 if (new_map !=
NULL) {
1713 ASSERT((
map()->has_fast_smi_or_object_elements() ||
1714 (value ==
GetHeap()->empty_fixed_array())) ==
1715 (value->
map() ==
GetHeap()->fixed_array_map() ||
1716 value->
map() ==
GetHeap()->fixed_cow_array_map()));
1718 (
map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
1729 void JSObject::initialize_properties() {
1736 if (
map()->has_fast_smi_or_object_elements() ||
1737 map()->has_fast_double_elements()) {
1740 }
else if (
map()->has_external_array_elements()) {
1744 }
else if (
map()->has_fixed_typed_array_elements()) {
1756 if (
map()->is_observed()) {
1760 if (!maybe->To(&dictionary))
return maybe;
1761 if (
map() ==
GetHeap()->sloppy_arguments_elements_map()) {
1764 set_elements(dictionary);
1770 if (!FLAG_smi_only_arrays) {
1775 if (!maybe->To(&map))
return maybe;
1809 int transition = transitions->
Search(*key);
1811 PropertyDetails target_details = transitions->
GetTargetDetails(transition);
1839 ASSERT(!val->IsPropertyCell() && !val->IsCell());
1845 Object* PropertyCell::type_raw() {
1971 ASSERT(index < properties()->length());
1972 return properties()->get(index);
1985 ASSERT(index < properties()->length());
1986 properties()->set(index, value);
2015 Object* pre_allocated_value,
2017 ASSERT(!filler_value->IsHeapObject() ||
2019 ASSERT(!pre_allocated_value->IsHeapObject() ||
2023 if (filler_value != pre_allocated_value) {
2026 for (
int i = 0; i < pre_allocated; i++) {
2031 while (offset < size) {
2039 ASSERT(properties()->IsDictionary() ==
map()->is_dictionary_map());
2040 return !properties()->IsDictionary();
2060 return properties()->length() > limit;
2075 if (value < 0)
return false;
2079 if (IsHeapNumber()) {
2081 uint32_t uint_value =
static_cast<uint32_t
>(value);
2082 if (value == static_cast<double>(uint_value)) {
2083 *index = uint_value;
2092 if (!this->IsJSValue())
return false;
2095 if (!js_value->value()->IsString())
return false;
2098 if (index >= static_cast<uint32_t>(str->
length()))
return false;
2105 #if ENABLE_EXTRA_CHECKS
2115 FATAL(
"API call returned invalid object");
2117 #endif // ENABLE_EXTRA_CHECKS
2134 return get(index) ==
GetHeap()->the_hole_value();
2141 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
2191 return GetHeap()->the_hole_value();
2238 int ConstantPoolArray::first_int64_index() {
2264 int number_of_code_ptr_entries,
2265 int number_of_heap_ptr_entries,
2266 int number_of_int32_entries) {
2267 int current_index = number_of_int64_entries;
2268 set_first_code_ptr_index(current_index);
2269 current_index += number_of_code_ptr_entries;
2270 set_first_heap_ptr_index(current_index);
2271 current_index += number_of_heap_ptr_entries;
2272 set_first_int32_index(current_index);
2273 current_index += number_of_int32_entries;
2379 heap->RecordWrite(array->
address(), offset);
2400 GetHeap()->undefined_value());
2435 this ==
GetHeap()->empty_descriptor_array());
2449 template<SearchMode search_mode,
typename T>
2451 uint32_t hash = name->
Hash();
2456 while (low != high) {
2457 int mid = (low + high) / 2;
2458 Name* mid_name = array->GetSortedKey(mid);
2459 uint32_t mid_hash = mid_name->
Hash();
2461 if (mid_hash >= hash) {
2468 for (; low <= limit; ++low) {
2469 int sort_index = array->GetSortedKeyIndex(low);
2470 Name* entry = array->GetKey(sort_index);
2471 if (entry->
Hash() != hash)
break;
2472 if (entry->
Equals(name)) {
2473 if (search_mode ==
ALL_ENTRIES || sort_index < valid_entries) {
2476 return T::kNotFound;
2480 return T::kNotFound;
2486 template<SearchMode search_mode,
typename T>
2488 uint32_t hash = name->
Hash();
2490 for (
int number = 0; number < len; number++) {
2491 int sorted_index = array->GetSortedKeyIndex(number);
2492 Name* entry = array->GetKey(sorted_index);
2493 uint32_t current_hash = entry->
Hash();
2494 if (current_hash > hash)
break;
2495 if (current_hash == hash && entry->
Equals(name))
return sorted_index;
2498 ASSERT(len >= valid_entries);
2499 for (
int number = 0; number < valid_entries; number++) {
2500 Name* entry = array->GetKey(number);
2501 uint32_t current_hash = entry->
Hash();
2502 if (current_hash == hash && entry->
Equals(name))
return number;
2505 return T::kNotFound;
2509 template<SearchMode search_mode,
typename T>
2512 SLOW_ASSERT(array->IsSortedNoDuplicates(valid_entries));
2517 int nof = array->number_of_entries();
2518 if (nof == 0)
return T::kNotFound;
2521 const int kMaxElementsForLinearSearch = 8;
2523 nof <= kMaxElementsForLinearSearch) ||
2525 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2526 return LinearSearch<search_mode>(array,
name, nof, valid_entries);
2530 return BinarySearch<search_mode>(array,
name, 0, nof - 1, valid_entries);
2535 return internal::Search<VALID_ENTRIES>(
this,
name, valid_descriptors);
2539 int DescriptorArray::SearchWithCache(Name*
name, Map* map) {
2540 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2541 if (number_of_own_descriptors == 0)
return kNotFound;
2544 int number = cache->
Lookup(map, name);
2547 number =
Search(name, number_of_own_descriptors);
2548 cache->Update(map, name, number);
2556 return instance_descriptors()->GetDetails(
LastAdded());
2562 LookupResult* result) {
2564 int number = descriptors->SearchWithCache(name,
this);
2566 result->DescriptorResult(holder, descriptors->
GetDetails(number), number);
2572 LookupResult* result) {
2575 int number = transition_array->
Search(name);
2577 return result->TransitionResult(
2578 holder, transition_array->
GetTarget(number));
2603 return Name::cast(
get(ToKeyIndex(descriptor_number)));
2608 return GetDetails(descriptor_number).pointer();
2618 PropertyDetails details =
GetDetails(descriptor_index);
2619 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2626 PropertyDetails details =
GetDetails(descriptor_index);
2627 set(ToDetailsIndex(descriptor_index),
2628 details.CopyWithRepresentation(representation).AsSmi());
2634 for (
int i = 0; i <
length; i++) {
2648 return get(ToValueIndex(descriptor_number));
2654 Object* details =
get(ToDetailsIndex(descriptor_number));
2655 return PropertyDetails(
Smi::cast(details));
2666 return GetDetails(descriptor_number).field_index();
2671 return GetValue(descriptor_number);
2677 return GetValue(descriptor_number);
2689 desc->Init(
GetKey(descriptor_number),
2702 ToKeyIndex(descriptor_number),
2705 ToValueIndex(descriptor_number),
2708 ToDetailsIndex(descriptor_number),
2709 desc->GetDetails().AsSmi());
2717 set(ToKeyIndex(descriptor_number), desc->GetKey());
2718 set(ToValueIndex(descriptor_number), desc->GetValue());
2719 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
2727 Set(descriptor_number, desc, witness);
2729 uint32_t hash = desc->GetKey()->Hash();
2733 for (insertion = descriptor_number; insertion > 0; --insertion) {
2735 if (key->
Hash() <= hash)
break;
2746 Set(descriptor_number, desc);
2748 uint32_t hash = desc->GetKey()->Hash();
2752 for (insertion = descriptor_number; insertion > 0; --insertion) {
2754 if (key->
Hash() <= hash)
break;
2762 void DescriptorArray::SwapSortedKeys(
int first,
int second) {
2770 : marking_(array->GetHeap()->incremental_marking()) {
2772 ASSERT(!marking_->IsMarking() ||
2773 Marking::Color(array) == Marking::WHITE_OBJECT);
2778 marking_->LeaveNoMarkingScope();
2782 template<
typename Shape,
typename Key>
2784 const int kMinCapacity = 32;
2786 if (capacity < kMinCapacity) {
2787 capacity = kMinCapacity;
2793 template<
typename Shape,
typename Key>
2800 template<
typename Shape,
typename Key>
2802 uint32_t capacity = Capacity();
2807 Object* element = KeyAt(entry);
2810 if (element == isolate->
heap()->raw_unchecked_undefined_value())
break;
2811 if (element != isolate->
heap()->raw_unchecked_the_hole_value() &&
2812 Shape::IsMatch(key, element))
return entry;
2813 entry = NextProbe(entry, count++, capacity);
2820 Object* max_index_object =
get(kMaxNumberKeyIndex);
2821 if (!max_index_object->IsSmi())
return false;
2823 (
Smi::cast(max_index_object)->
value() & kRequiresSlowElementsMask);
2827 ASSERT(!requires_slow_elements());
2828 Object* max_index_object =
get(kMaxNumberKeyIndex);
2829 if (!max_index_object->IsSmi())
return 0;
2830 uint32_t value =
static_cast<uint32_t
>(
Smi::cast(max_index_object)->
value());
2831 return value >> kRequiresSlowElementsTagSize;
2914 template <class Traits>
2918 Traits::kInstanceType);
2923 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2925 #undef MAKE_STRUCT_CAST
2928 template <
typename Shape,
typename Key>
2930 ASSERT(obj->IsHashTable());
2948 #if V8_HOST_ARCH_64_BIT
2955 if (other ==
this)
return true;
2956 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
2957 this->IsSymbol() || other->IsSymbol()) {
2970 if (other ==
this)
return true;
2971 if (this->IsInternalizedString() && other->IsInternalizedString()) {
2974 return SlowEquals(other);
2979 if (!StringShape(
this).IsCons())
return this;
2982 return SlowTryFlatten(pretenure);
2987 MaybeObject* flat = TryFlatten(pretenure);
2988 Object* successfully_flattened;
2989 if (!flat->ToObject(&successfully_flattened))
return this;
2996 switch (StringShape(
this).full_representation_tag()) {
3022 ASSERT(StringShape(
this).IsSequential());
3024 return this->IsOneByteRepresentation()
3031 if (!StringShape(
this).IsCons())
return true;
3040 ASSERT(StringShape(
this).IsIndirect());
3047 template<
class Visitor,
class ConsOp>
3055 ASSERT(length == static_cast<unsigned>(string->
length()));
3056 ASSERT(offset <= length);
3057 unsigned slice_offset = offset;
3063 visitor.VisitOneByteString(
3069 visitor.VisitTwoByteString(
3075 visitor.VisitOneByteString(
3081 visitor.VisitTwoByteString(
3089 slice_offset += slicedString->
offset();
3090 string = slicedString->
parent();
3097 string = cons_op.Operate(
string, &offset, &type, &length);
3098 if (
string ==
NULL)
return;
3099 slice_offset = offset;
3100 ASSERT(length == static_cast<unsigned>(string->
length()));
3123 template<
class Visitor>
3130 ASSERT(offset >= 0 && offset <= length);
3132 Visit(
string, offset, *visitor, op, type, static_cast<unsigned>(length));
3144 ASSERT(index >= 0 && index <
length() && value <= kMaxOneByteCharCode);
3146 static_cast<byte>(value));
3156 return reinterpret_cast<uint8_t*
>(GetCharsAddress());
3198 ASSERT(parent->IsSeqString() || parent->IsExternalString());
3251 if (is_short())
return;
3252 const char** data_field =
3253 reinterpret_cast<const char**
>(
FIELD_ADDR(
this, kResourceDataOffset));
3254 *data_field = resource()->data();
3261 *
reinterpret_cast<const Resource**
>(
3262 FIELD_ADDR(
this, kResourceOffset)) = resource;
3263 if (resource !=
NULL) update_data_cache();
3268 return reinterpret_cast<const uint8_t*
>(resource()->data());
3274 return GetChars()[index];
3284 if (is_short())
return;
3287 *data_field = resource()->data();
3293 *
reinterpret_cast<const Resource**
>(
3294 FIELD_ADDR(
this, kResourceOffset)) = resource;
3295 if (resource !=
NULL) update_data_cache();
3300 return resource()->data();
3306 return GetChars()[index];
3312 return GetChars() + start;
3321 unsigned ConsStringIteratorOp::OffsetForDepth(
unsigned depth) {
3322 return depth & kDepthMask;
3326 void ConsStringIteratorOp::PushLeft(ConsString*
string) {
3327 frames_[depth_++ & kDepthMask] = string;
3331 void ConsStringIteratorOp::PushRight(ConsString*
string) {
3333 frames_[(depth_-1) & kDepthMask] =
string;
3337 void ConsStringIteratorOp::AdjustMaximumDepth() {
3338 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3342 void ConsStringIteratorOp::Pop() {
3344 ASSERT(depth_ <= maximum_depth_);
3360 unsigned* length_out) {
3361 bool blew_stack =
false;
3362 String*
string = NextLeaf(&blew_stack, type_out, length_out);
3364 if (
string !=
NULL) {
3366 ASSERT(*length_out == static_cast<unsigned>(string->
length()));
3371 if (!blew_stack)
return NULL;
3373 unsigned offset_out;
3374 string =
Search(&offset_out, type_out, length_out);
3378 *length_out == static_cast<unsigned>(string->
length()));
3388 if (buffer8_ ==
end_) HasMore();
3390 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3397 : is_one_byte_(
false),
3399 Reset(
string, offset);
3407 int32_t type =
string->map()->instance_type();
3408 unsigned length =
string->length();
3415 if (!op_->
HasMore())
return false;
3419 if (
string ==
NULL)
return false;
3420 ASSERT(!string->IsConsString());
3430 const uint8_t* chars,
unsigned length) {
3431 is_one_byte_ =
true;
3433 end_ = chars + length;
3438 const uint16_t* chars,
unsigned length) {
3439 is_one_byte_ =
false;
3441 end_ =
reinterpret_cast<const uint8_t*
>(chars + length);
3452 int cache_size =
size();
3511 return reinterpret_cast<uint8_t*
>(external_pointer());
3534 void* ExternalArray::external_pointer() {
3536 return reinterpret_cast<void*
>(ptr);
3541 intptr_t ptr =
reinterpret_cast<intptr_t
>(value);
3548 int8_t* ptr =
static_cast<int8_t*
>(external_pointer());
3560 int8_t* ptr =
static_cast<int8_t*
>(external_pointer());
3567 uint8_t* ptr =
static_cast<uint8_t*
>(external_pointer());
3579 uint8_t* ptr =
static_cast<uint8_t*
>(external_pointer());
3643 uint32_t* ptr =
static_cast<uint32_t*
>(external_pointer());
3655 uint32_t* ptr =
static_cast<uint32_t*
>(external_pointer());
3662 float* ptr =
static_cast<float*
>(external_pointer());
3674 float* ptr =
static_cast<float*
>(external_pointer());
3681 double* ptr =
static_cast<double*
>(external_pointer());
3693 double* ptr =
static_cast<double*
>(external_pointer());
3706 switch (instance_type) {
3707 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
3708 case FIXED_##TYPE##_ARRAY_TYPE: \
3709 element_size = size; \
3713 #undef TYPED_ARRAY_CASE
3718 return length() * element_size;
3727 uint8_t Uint8ArrayTraits::defaultValue() {
return 0; }
3730 uint8_t Uint8ClampedArrayTraits::defaultValue() {
return 0; }
3733 int8_t Int8ArrayTraits::defaultValue() {
return 0; }
3736 uint16_t Uint16ArrayTraits::defaultValue() {
return 0; }
3739 int16_t Int16ArrayTraits::defaultValue() {
return 0; }
3742 uint32_t Uint32ArrayTraits::defaultValue() {
return 0; }
3745 int32_t Int32ArrayTraits::defaultValue() {
return 0; }
3748 float Float32ArrayTraits::defaultValue() {
3753 double Float64ArrayTraits::defaultValue() {
return OS::nan_value(); }
3756 template <
class Traits>
3758 ASSERT((index >= 0) && (index < this->length()));
3768 ASSERT((index >= 0) && (index < this->length()));
3773 template <
class Traits>
3775 ASSERT((index >= 0) && (index < this->length()));
3784 int index, Float64ArrayTraits::ElementType value) {
3785 ASSERT((index >= 0) && (index < this->length()));
3790 template <
class Traits>
3798 if (value < 0)
return 0;
3799 if (value > 0xFF)
return 0xFF;
3800 return static_cast<uint8_t
>(value);
3804 template <
class Traits>
3813 if (value < 0)
return 0;
3814 if (value > 0xFF)
return 0xFF;
3815 return static_cast<uint8_t
>(lrint(value));
3821 return static_cast<float>(value);
3831 template <
class Traits>
3833 return Traits::ToObject(GetHeap(), get_scalar(index));
3836 template <
class Traits>
3839 if (index < static_cast<uint32_t>(length())) {
3840 if (value->IsSmi()) {
3842 cast_value = from_int(int_value);
3843 }
else if (value->IsHeapNumber()) {
3845 cast_value = from_double(double_value);
3849 ASSERT(value->IsUndefined());
3851 set(index, cast_value);
3853 return Traits::ToObject(GetHeap(), cast_value);
3856 template <
class Traits>
3862 array->SetValue(index, *value),
3867 MaybeObject* Uint8ArrayTraits::ToObject(
Heap*, uint8_t scalar) {
3872 MaybeObject* Uint8ClampedArrayTraits::ToObject(Heap*, uint8_t scalar) {
3877 MaybeObject* Int8ArrayTraits::ToObject(Heap*, int8_t scalar) {
3882 MaybeObject* Uint16ArrayTraits::ToObject(Heap*,
uint16_t scalar) {
3887 MaybeObject* Int16ArrayTraits::ToObject(Heap*,
int16_t scalar) {
3892 MaybeObject* Uint32ArrayTraits::ToObject(Heap* heap, uint32_t scalar) {
3893 return heap->NumberFromUint32(scalar);
3897 MaybeObject* Int32ArrayTraits::ToObject(Heap* heap,
int32_t scalar) {
3898 return heap->NumberFromInt32(scalar);
3902 MaybeObject* Float32ArrayTraits::ToObject(Heap* heap,
float scalar) {
3903 return heap->NumberFromDouble(scalar);
3907 MaybeObject* Float64ArrayTraits::ToObject(Heap* heap,
double scalar) {
3908 return heap->NumberFromDouble(scalar);
3918 ASSERT(0 <=
id &&
id < 256);
3957 reinterpret_cast<SeqOneByteString*>(
this)->length());
3960 return reinterpret_cast<ByteArray*
>(
this)->ByteArraySize();
3968 reinterpret_cast<SeqTwoByteString*>(
this)->length());
3972 reinterpret_cast<FixedDoubleArray*>(
this)->length());
3976 reinterpret_cast<ConstantPoolArray*>(
this)->count_of_int64_entries(),
3977 reinterpret_cast<ConstantPoolArray*>(
this)->count_of_code_ptr_entries(),
3978 reinterpret_cast<ConstantPoolArray*>(
this)->count_of_heap_ptr_entries(),
3979 reinterpret_cast<ConstantPoolArray*>(
this)->count_of_int32_entries());
3986 return reinterpret_cast<Code*
>(
this)->CodeSize();
3993 ASSERT(0 <= value && value < 256);
3999 ASSERT(0 <= value && value < 256);
4005 ASSERT(0 <= value && value < 256);
4008 static_cast<byte>(value));
4077 if (access_check_needed) {
4203 return code_cache() !=
GetIsolate()->
heap()->empty_fixed_array();
4209 for (
int i = 0; i <= descriptor; i++) {
4210 PropertyDetails details = instance_descriptors()->GetDetails(i);
4211 if (details.representation().IsNone())
return true;
4212 if (details.representation().IsSmi())
return true;
4213 if (details.representation().IsDouble())
return true;
4214 if (details.representation().IsHeapObject())
return true;
4215 if (details.type() ==
CONSTANT)
return true;
4232 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4237 if (
length() == 0)
return 0;
4248 return get(kCodesStartIndex + i)->IsCode();
4252 return Code::cast(
get(kCodesStartIndex + i));
4263 set(kCodesStartIndex + i,
object);
4268 return get(kCodesStartIndex + i);
4283 set(kCodesStartIndex + to,
get(kCodesStartIndex + from));
4287 void DependentCode::ExtendGroup(DependencyGroup group) {
4288 GroupStartIndexes starts(
this);
4290 if (starts.at(g) < starts.at(g + 1)) {
4291 copy(starts.at(g), starts.at(g + 1));
4364 ASSERT(0 <= major && major < 256);
4374 kind() == BINARY_OP_IC ||
4375 kind() == COMPARE_IC ||
4376 kind() == COMPARE_NIL_IC ||
4377 kind() == LOAD_IC ||
4378 kind() == KEYED_LOAD_IC ||
4379 kind() == STORE_IC ||
4380 kind() == KEYED_STORE_IC ||
4381 kind() == TO_BOOLEAN_IC;
4571 #define CASE(name) case name: return true;
4574 default:
return false;
4595 ASSERT(value->IsConstantPoolArray());
4612 return static_cast<Flags>(bits);
4658 return static_cast<Flags>(bits);
4681 if (object->IsMap()) {
4683 FLAG_collect_maps &&
4684 FLAG_weak_embedded_maps_in_optimized_code;
4686 if (object->IsJSObject() ||
4687 (
object->IsCell() &&
Cell::cast(
object)->value()->IsJSObject())) {
4688 return FLAG_weak_embedded_objects_in_optimized_code;
4698 ASSERT(count_ < kMaxCount);
4699 find_[count_] = map_to_find;
4700 replace_[count_] = obj_to_replace;
4704 static const int kMaxCount = 4;
4712 Object* Map::prototype() {
4718 ASSERT(value->IsNull() || value->IsJSReceiver());
4726 static MaybeObject* EnsureHasTransitionArray(Map* map) {
4727 TransitionArray* transitions;
4728 MaybeObject* maybe_transitions;
4729 if (!map->HasTransitionArray()) {
4731 if (!maybe_transitions->To(&transitions))
return maybe_transitions;
4732 transitions->set_back_pointer_storage(map->GetBackPointer());
4733 }
else if (!map->transitions()->IsFullTransitionArray()) {
4734 maybe_transitions = map->transitions()->ExtendToFullTransitionArray();
4735 if (!maybe_transitions->To(&transitions))
return maybe_transitions;
4739 map->set_transitions(transitions);
4746 set_instance_descriptors(descriptors);
4754 void
Map::set_bit_field3(uint32_t bits) {
4757 int value = bits << 1;
4786 descriptors->
Append(desc, witness);
4793 if (object->IsDescriptorArray()) {
4796 ASSERT(object->IsMap() ||
object->IsUndefined());
4809 return object->IsTransitionArray();
4814 int index = transitions()->Search(
GetHeap()->elements_transition_symbol());
4815 return transitions()->GetTarget(index);
4836 transitions()->SetTarget(transition_index, target);
4841 return transitions()->GetTarget(transition_index);
4848 GetHeap()->elements_transition_symbol(),
4851 if (!maybe_transitions->To(&transitions))
return maybe_transitions;
4852 set_transitions(transitions);
4860 return GetHeap()->empty_fixed_array();
4862 return transitions()->GetPrototypeTransitions();
4867 MaybeObject* allow_prototype = EnsureHasTransitionArray(
this);
4868 if (allow_prototype->IsFailure())
return allow_prototype;
4876 transitions()->SetPrototypeTransitions(proto_transitions);
4894 void Map::set_transitions(TransitionArray* transition_array,
4902 for (
int i = 0; i < transitions()->number_of_transitions(); i++) {
4903 Map* target = transitions()->GetTarget(i);
4904 if (target->instance_descriptors() == instance_descriptors()) {
4905 Name* key = transitions()->GetKey(i);
4906 int new_target_index = transition_array->Search(key);
4908 ASSERT(transition_array->GetTarget(new_target_index) == target);
4912 ASSERT(transitions() != transition_array);
4923 ASSERT(undefined->IsUndefined());
4933 if (object->IsTransitionArray()) {
4949 return transition_array;
4966 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
4970 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
4977 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
4978 kExpectedReceiverTypeOffset)
4981 kSerializedDataOffset)
4987 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
4988 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
4993 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
4997 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
4998 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5001 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5002 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5003 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5004 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5005 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5008 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5011 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5012 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5015 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5017 kPrototypeTemplateOffset)
5018 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5019 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5020 kNamedPropertyHandlerOffset)
5022 kIndexedPropertyHandlerOffset)
5023 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5024 kInstanceTemplateOffset)
5025 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5026 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5028 kInstanceCallHandlerOffset)
5029 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5030 kAccessCheckInfoOffset)
5035 kInternalFieldCountOffset)
5038 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
5043 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5046 kPretenureCreateCountOffset)
5048 kDependentCodeOffset)
5049 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5053 ACCESSORS(Script, name, Object, kNameOffset)
5057 ACCESSORS(Script, context_data, Object, kContextOffset)
5060 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5061 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5063 kEvalFrominstructionsOffsetOffset)
5065 BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
5067 Script::CompilationType Script::compilation_type() {
5069 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5085 #ifdef ENABLE_DEBUGGER_SUPPORT
5086 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5087 ACCESSORS(DebugInfo, original_code,
Code, kOriginalCodeIndex)
5093 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5094 ACCESSORS(BreakPointInfo, break_point_objects,
Object, kBreakPointObjectsIndex)
5097 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5098 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5099 kOptimizedCodeMapOffset)
5100 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5101 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
5103 kInstanceClassNameOffset)
5104 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5105 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5106 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5107 ACCESSORS(SharedFunctionInfo, inferred_name,
String, kInferredNameOffset)
5108 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5113 kHiddenPrototypeBit)
5114 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5116 kNeedsAccessCheckBit)
5118 kReadOnlyPrototypeBit)
5120 kRemovePrototypeBit)
5125 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5131 kAllowLazyCompilation)
5134 allows_lazy_compilation_without_context,
5135 kAllowLazyCompilationWithoutContext)
5142 has_duplicate_parameters,
5143 kHasDuplicateParameters)
5146 #if V8_HOST_ARCH_32_BIT
5149 kFormalParameterCountOffset)
5151 kExpectedNofPropertiesOffset)
5152 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5154 kStartPositionAndTypeOffset)
5155 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5157 kFunctionTokenPositionOffset)
5159 kCompilerHintsOffset)
5160 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5161 kOptCountAndBailoutReasonOffset)
5162 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5166 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5167 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
5168 int holder::name() { \
5169 int value = READ_INT_FIELD(this, offset); \
5170 ASSERT(kHeapObjectTag == 1); \
5171 ASSERT((value & kHeapObjectTag) == 0); \
5172 return value >> 1; \
5174 void holder::set_##name(int value) { \
5175 ASSERT(kHeapObjectTag == 1); \
5176 ASSERT((value & 0xC0000000) == 0xC0000000 || \
5177 (value & 0xC0000000) == 0x000000000); \
5178 WRITE_INT_FIELD(this, \
5180 (value << 1) & ~kHeapObjectTag); \
5183 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5184 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
5185 INT_ACCESSORS(holder, name, offset)
5190 formal_parameter_count,
5191 kFormalParameterCountOffset)
5194 expected_nof_properties,
5195 kExpectedNofPropertiesOffset)
5200 start_position_and_type,
5201 kStartPositionAndTypeOffset)
5204 function_token_position,
5205 kFunctionTokenPositionOffset)
5208 kCompilerHintsOffset)
5211 opt_count_and_bailout_reason,
5212 kOptCountAndBailoutReasonOffset)
5225 ASSERT(0 <= value && value < 256);
5232 live_objects_may_exist,
5233 kLiveObjectsMayExist)
5236 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
5237 return initial_map() != GetHeap()->undefined_value();
5243 optimization_disabled,
5244 kOptimizationDisabled)
5247 void SharedFunctionInfo::set_optimization_disabled(
bool disable) {
5249 kOptimizationDisabled,
5253 if ((
code()->kind() == Code::FUNCTION) && disable) {
5254 code()->set_optimizable(
false);
5260 if (
code()->kind() != Code::FUNCTION)
return 0;
5261 return code()->profiler_ticks();
5280 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5281 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin,
5285 kNameShouldPrintAsAnonymous)
5286 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5287 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5288 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5293 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5294 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5296 void SharedFunctionInfo::BeforeVisitingPointers() {
5297 if (IsInobjectSlackTrackingInProgress()) DetachInitialMap();
5301 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5302 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5304 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5307 Object* src = this->source();
5308 if (!src->IsString())
return true;
5310 if (!StringShape(src_str).IsExternal())
return true;
5343 ASSERT(value->kind() != Code::OPTIMIZED_FUNCTION);
5352 if (
code()->gc_metadata() !=
NULL) {
5362 ScopeInfo* SharedFunctionInfo::scope_info() {
5367 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5373 reinterpret_cast<Object*>(value),
5385 return function_data()->IsFunctionTemplateInfo();
5396 return function_data()->IsSmi();
5463 Code* code = this->
code();
5473 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
5474 set_optimization_disabled(
false);
5477 code()->set_optimizable(
true);
5488 return shared()->formal_parameter_count() !=
5494 return code()->
kind() == Code::OPTIMIZED_FUNCTION;
5505 Builtins::kCompileOptimized);
5511 Builtins::kCompileOptimizedConcurrent);
5517 Builtins::kInOptimizationQueue);
5547 bool is_optimized = code->
kind() == Code::OPTIMIZED_FUNCTION;
5549 if (was_optimized && is_optimized) {
5550 shared()->EvictFromOptimizedCodeMap(this->
code(),
5551 "Replacing with another optimized code");
5558 if (!was_optimized && is_optimized) {
5561 if (was_optimized && !is_optimized) {
5574 ASSERT(value->IsUndefined() || value->IsContext());
5580 kPrototypeOrInitialMapOffset)
5584 return Map::cast(prototype_or_initial_map());
5589 set_prototype_or_initial_map(value);
5594 return prototype_or_initial_map()->IsMap();
5613 return prototype_or_initial_map();
5621 if (
map()->has_non_instance_prototype())
return map()->constructor();
5638 ASSERT(!shared()->bound());
5639 return literals_or_bindings();
5644 ASSERT(!shared()->bound());
5645 set_literals_or_bindings(literals);
5650 ASSERT(shared()->bound());
5651 return literals_or_bindings();
5656 ASSERT(shared()->bound());
5660 bindings->
map() ==
GetHeap()->fixed_cow_array_map());
5661 set_literals_or_bindings(bindings);
5666 ASSERT(!shared()->bound());
5702 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
5705 void JSProxy::InitializeBody(
int object_size, Object* value) {
5706 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
5707 for (
int offset = kHeaderSize; offset < object_size; offset +=
kPointerSize) {
5716 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
5731 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
5732 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
5733 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
5734 SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
5737 JSGeneratorObject* JSGeneratorObject::cast(Object*
obj) {
5738 ASSERT(obj->IsJSGeneratorObject());
5740 return reinterpret_cast<JSGeneratorObject*
>(
obj);
5744 ACCESSORS(JSModule, context, Object, kContextOffset)
5748 JSModule* JSModule::cast(Object*
obj) {
5749 ASSERT(obj->IsJSModule());
5751 return reinterpret_cast<JSModule*
>(
obj);
5755 ACCESSORS(JSValue, value, Object, kValueOffset)
5758 JSValue* JSValue::cast(Object*
obj) {
5759 ASSERT(obj->IsJSValue());
5761 return reinterpret_cast<JSValue*
>(
obj);
5765 ACCESSORS(JSDate, value, Object, kValueOffset)
5766 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
5767 ACCESSORS(JSDate, year, Object, kYearOffset)
5768 ACCESSORS(JSDate, month, Object, kMonthOffset)
5769 ACCESSORS(JSDate, day, Object, kDayOffset)
5770 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
5771 ACCESSORS(JSDate, hour, Object, kHourOffset)
5772 ACCESSORS(JSDate, min, Object, kMinOffset)
5773 ACCESSORS(JSDate, sec, Object, kSecOffset)
5776 JSDate* JSDate::cast(Object*
obj) {
5779 return reinterpret_cast<JSDate*
>(
obj);
5783 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
5785 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
5786 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
5787 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
5788 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
5791 JSMessageObject* JSMessageObject::cast(Object*
obj) {
5792 ASSERT(obj->IsJSMessageObject());
5794 return reinterpret_cast<JSMessageObject*
>(
obj);
5798 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
5801 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
5802 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
5803 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
5804 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
5807 void Code::WipeOutHeader() {
5813 if (!
READ_FIELD(
this, kTypeFeedbackInfoOffset)->IsSmi()) {
5821 return raw_type_feedback_info();
5827 set_raw_type_feedback_info(value, mode);
5835 kind() == BINARY_OP_IC ||
kind() == LOAD_IC);
5842 kind() == COMPARE_NIL_IC ||
5843 kind() == BINARY_OP_IC ||
5845 kind() == LOAD_IC ||
5846 kind() == KEYED_LOAD_IC ||
5847 kind() == STORE_IC ||
5848 kind() == KEYED_STORE_IC);
5853 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
5902 return reinterpret_cast<void*
>(ptr);
5906 void JSArrayBuffer::set_backing_store(
void* value,
WriteBarrierMode mode) {
5907 intptr_t ptr =
reinterpret_cast<intptr_t
>(value);
5912 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
5916 bool JSArrayBuffer::is_external() {
5941 ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
5942 ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
5943 ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
5949 JSRegExp::
Type JSRegExp::TypeTag() {
5950 Object* data = this->data();
5971 ASSERT(this->data()->IsFixedArray());
5972 Object* data = this->data();
5979 ASSERT(this->data()->IsFixedArray());
5980 Object* data = this->data();
6007 if (ElementsAreSafeToExamine()) {
6008 Map* map = fixed_array->
map();
6010 (map ==
GetHeap()->fixed_array_map() ||
6011 map ==
GetHeap()->fixed_cow_array_map())) ||
6013 (fixed_array->IsFixedDoubleArray() ||
6014 fixed_array ==
GetHeap()->empty_fixed_array())) ||
6016 fixed_array->IsFixedArray() &&
6017 fixed_array->IsDictionary()) ||
6020 (elements()->IsFixedArray() && elements()->length() >= 2));
6075 return array->IsExternalArray();
6079 #define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6080 bool JSObject::HasExternal##Type##Elements() { \
6081 HeapObject* array = elements(); \
6082 ASSERT(array != NULL); \
6083 if (!array->IsHeapObject()) \
6085 return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
6090 #undef EXTERNAL_ELEMENTS_CHECK
6096 return array->IsFixedTypedArrayBase();
6100 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6101 bool JSObject::HasFixed##Type##Elements() { \
6102 HeapObject* array = elements(); \
6103 ASSERT(array != NULL); \
6104 if (!array->IsHeapObject()) \
6106 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6111 #undef FIXED_TYPED_ELEMENTS_CHECK
6128 if (elems->
map() != isolate->
heap()->fixed_cow_array_map())
return elems;
6129 Object* writable_elems;
6131 elems, isolate->
heap()->fixed_array_map());
6132 if (!maybe_writable_elems->ToObject(&writable_elems)) {
6133 return maybe_writable_elems;
6137 isolate->
counters()->cow_arrays_converted()->Increment();
6138 return writable_elems;
6175 raw_running_hash_(seed),
6177 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6178 is_first_char_(
true) {
6179 ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
6188 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash,
uint16_t c) {
6190 running_hash += (running_hash << 10);
6191 running_hash ^= (running_hash >> 6);
6192 return running_hash;
6196 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6197 running_hash += (running_hash << 3);
6198 running_hash ^= (running_hash >> 11);
6199 running_hash += (running_hash << 15);
6203 return running_hash;
6207 void StringHasher::AddCharacter(
uint16_t c) {
6210 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6214 bool StringHasher::UpdateIndex(
uint16_t c) {
6216 if (c < '0' || c >
'9') {
6217 is_array_index_ =
false;
6221 if (is_first_char_) {
6222 is_first_char_ =
false;
6223 if (c ==
'0' && length_ > 1) {
6224 is_array_index_ =
false;
6228 if (array_index_ > 429496729
U - ((d + 2) >> 3)) {
6229 is_array_index_ =
false;
6232 array_index_ = array_index_ * 10 + d;
6237 template<
typename Char>
6239 ASSERT(
sizeof(Char) == 1 ||
sizeof(Char) == 2);
6241 if (is_array_index_) {
6242 for (; i < length; i++) {
6243 AddCharacter(chars[i]);
6244 if (!UpdateIndex(chars[i])) {
6250 for (; i < length; i++) {
6251 ASSERT(!is_array_index_);
6252 AddCharacter(chars[i]);
6257 template <
typename s
char>
6277 return SlowAsArrayIndex(index);
6282 return map()->prototype();
6287 return map()->constructor();
6293 if (object->IsJSProxy()) {
6295 return JSProxy::HasPropertyWithHandler(proxy, name);
6303 if (object->IsJSProxy()) {
6305 return JSProxy::HasPropertyWithHandler(proxy, name);
6314 if (object->IsJSObject() && key->AsArrayIndex(&index)) {
6323 if (object->IsJSProxy()) {
6343 return object->IsJSProxy()
6357 if (object->IsJSProxy()) {
6359 return JSProxy::HasElementWithHandler(proxy, index);
6367 if (object->IsJSProxy()) {
6369 return JSProxy::HasElementWithHandler(proxy, index);
6378 if (object->IsJSProxy()) {
6418 return AttributesField::decode(static_cast<uint32_t>(
flag()->value()));
6423 set_flag(
Smi::FromInt(AttributesField::update(
flag()->value(), attributes)));
6428 Object* function_template = expected_receiver_type();
6429 if (!function_template->IsFunctionTemplateInfo())
return true;
6435 int current = access_flags()->value();
6437 kProhibitsOverwritingBit,
6464 template<
typename Shape,
typename Key>
6468 SetEntry(entry, key, value, PropertyDetails(
Smi::FromInt(0)));
6472 template<
typename Shape,
typename Key>
6476 PropertyDetails details) {
6478 details.IsDeleted() ||
6479 details.dictionary_index() > 0);
6490 ASSERT(other->IsNumber());
6491 return key ==
static_cast<uint32_t
>(other->
Number());
6502 ASSERT(other->IsNumber());
6513 ASSERT(other->IsNumber());
6541 ASSERT(key->IsUniqueName());
6546 template <
int entrysize>
6552 template <
int entrysize>
6558 template <
int entrysize>
6565 template <
int entrysize>
6572 template <
int entrysize>
6578 template <
int entrysize>
6580 intptr_t hash =
reinterpret_cast<intptr_t
>(key);
6581 return (uint32_t)(hash & 0xFFFFFFFF);
6585 template <
int entrysize>
6588 intptr_t hash =
reinterpret_cast<intptr_t
>(other);
6589 return (uint32_t)(hash & 0xFFFFFFFF);
6593 template <
int entrysize>
6611 ASSERT(array->HasFastSmiOrObjectElements());
6613 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
6614 if (elts->length() < required_size) {
6617 Expand(array, required_size + (required_size >> 3));
6619 }
else if (!array->GetHeap()->new_space()->Contains(*elts) &&
6620 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
6623 Expand(array, required_size);
6635 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
6646 ASSERT((storage->map() == array->GetHeap()->fixed_double_array_map() &&
6648 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
6652 array->set_elements(*storage);
6658 if (
length() == 0)
return this;
6664 if (
length() == 0)
return this;
6670 if (
length() == 0)
return this;
6676 return isolate->
factory()->uninitialized_symbol();
6681 return isolate->
factory()->megamorphic_symbol();
6692 return heap->uninitialized_symbol();
6698 return ICTotalCountField::decode(current);
6704 value = ICTotalCountField::update(value,
6705 ICTotalCountField::decode(count));
6712 return ICsWithTypeInfoCountField::decode(current);
6718 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
6724 if (new_count >= 0) {
6725 new_count &= ICsWithTypeInfoCountField::kMask;
6726 value = ICsWithTypeInfoCountField::update(value, new_count);
6740 int checksum = OwnTypeChangeChecksum::decode(value);
6741 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
6742 value = OwnTypeChangeChecksum::update(value, checksum);
6752 int mask = (1 << kTypeChangeChecksumBits) - 1;
6753 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
6763 return OwnTypeChangeChecksum::decode(value);
6769 int mask = (1 << kTypeChangeChecksumBits) - 1;
6770 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
6775 kFeedbackVectorOffset)
6781 Relocatable::Relocatable(
Isolate* isolate) {
6783 prev_ = isolate->relocatable_top();
6784 isolate->set_relocatable_top(
this);
6788 Relocatable::~Relocatable() {
6789 ASSERT_EQ(isolate_->relocatable_top(),
this);
6790 isolate_->set_relocatable_top(prev_);
6800 v->VisitExternalReference(
6801 reinterpret_cast<Address*>(
FIELD_ADDR(
this, kForeignAddressOffset)));
6805 template<
typename StaticVisitor>
6807 StaticVisitor::VisitExternalReference(
6808 reinterpret_cast<Address*>(
FIELD_ADDR(
this, kForeignAddressOffset)));
6814 v->VisitExternalAsciiString(
6815 reinterpret_cast<Resource**>(
FIELD_ADDR(
this, kResourceOffset)));
6819 template<
typename StaticVisitor>
6822 StaticVisitor::VisitExternalAsciiString(
6823 reinterpret_cast<Resource**>(
FIELD_ADDR(
this, kResourceOffset)));
6829 v->VisitExternalTwoByteString(
6830 reinterpret_cast<Resource**>(
FIELD_ADDR(
this, kResourceOffset)));
6834 template<
typename StaticVisitor>
6837 StaticVisitor::VisitExternalTwoByteString(
6838 reinterpret_cast<Resource**>(
FIELD_ADDR(
this, kResourceOffset)));
6842 template<
int start_offset,
int end_offset,
int size>
6851 template<
int start_offset>
6861 #undef CAST_ACCESSOR
6862 #undef INT_ACCESSORS
6864 #undef ACCESSORS_TO_SMI
6865 #undef SMI_ACCESSORS
6867 #undef BOOL_ACCESSORS
6871 #undef WRITE_BARRIER
6872 #undef CONDITIONAL_WRITE_BARRIER
6873 #undef READ_DOUBLE_FIELD
6874 #undef WRITE_DOUBLE_FIELD
6875 #undef READ_INT_FIELD
6876 #undef WRITE_INT_FIELD
6877 #undef READ_INTPTR_FIELD
6878 #undef WRITE_INTPTR_FIELD
6879 #undef READ_UINT32_FIELD
6880 #undef WRITE_UINT32_FIELD
6881 #undef READ_SHORT_FIELD
6882 #undef WRITE_SHORT_FIELD
6883 #undef READ_BYTE_FIELD
6884 #undef WRITE_BYTE_FIELD
6888 #endif // V8_OBJECTS_INL_H_
String * Operate(String *string, unsigned *, int32_t *, unsigned *)
MUST_USE_RESULT MaybeObject * CopyConstantPoolArray(ConstantPoolArray *src)
static int SizeOf(Map *map, HeapObject *object)
#define WRITE_BYTE_FIELD(p, offset, value)
void FastPropertyAtPut(int index, Object *value)
static const double kPretenureRatio
Object * unchecked_first()
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Object * type_feedback_info()
static Handle< Object > GetElementWithReceiver(Isolate *isolate, Handle< Object > object, Handle< Object > receiver, uint32_t index)
#define HAS_FAILURE_TAG(value)
bool prohibits_overwriting()
void TryReenableOptimization()
#define CHECK_NOT_EMPTY_HANDLE(isolate, call)
void SetBackPointer(Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static const int kBitFieldOffset
#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode)
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
static bool IsMatch(uint32_t key, Object *other)
Address GetCharsAddress()
void set_prohibits_overwriting(bool value)
static void EnsureCanContainElements(Handle< JSObject > object, Object **elements, uint32_t count, EnsureElementsMode mode)
Code * builtin(Name name)
static const int kTypeOffset
void set_deopt_count(int value)
bool IsDetachedFrom(GlobalObject *global)
#define SLOW_ASSERT(condition)
int allow_osr_at_loop_nesting_level()
const intptr_t kSmiTagMask
void set_deopt_dependent_code(bool deopt)
static ElementType from_double(double value)
static const int kVisitorIdOffset
static const int kExternalAsciiRepresentationTag
static const int kCodeOffset
void AddCharacters(const Char *chars, int len)
static bool is_the_hole_nan(double value)
FixedArray * function_bindings()
static ConstantPoolArray * cast(Object *obj)
MUST_USE_RESULT MaybeObject * get(int index)
static const int kCodeEntryOffset
bool has_instance_prototype()
bool HasElementsTransition()
static V8_INLINE int SmiValue(internal::Object *value)
static const int kEntries
static int EntryToIndex(int entry)
void EvictCandidate(SharedFunctionInfo *shared_info)
static ByteArray * FromDataStartAddress(Address address)
void set_all_can_write(bool value)
static const int kCacheSizeIndex
void set_constant_pool(Object *constant_pool)
static const int kValueOffset
int inobject_properties()
void set_has_deoptimization_support(bool value)
static uint32_t Hash(uint32_t key)
Object ** RawFieldOfElementAt(int index)
Utf8StringKey(Vector< const char > string, uint32_t seed)
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
void set(int index, Object *value)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths true
int GetInternalFieldOffset(int index)
static ElementType from_int(int value)
static bool get(Smi *smi, int bit_position)
const uint16_t * buffer16_
void PrintF(const char *format,...)
#define ASSERT_TAG_ALIGNED(address)
bool IsOneByteEqualTo(Vector< const uint8_t > str)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
void set(int index, uint16_t value)
void set_all_can_read(bool value)
virtual MaybeObject * AsObject(Heap *heap)
void set_function_with_prototype(bool value)
static double hole_nan_as_double()
bool InNewSpace(Object *object)
kInstanceClassNameOffset kNeedsAccessCheckBit kRemovePrototypeBit is_expression
static const int kTransitionsOrBackPointerOffset
static String * cast(Object *obj)
#define READ_DOUBLE_FIELD(p, offset)
#define READ_INTPTR_FIELD(p, offset)
static const int kAllowOSRAtLoopNestingLevelOffset
void copy(int from, int to)
kInstanceClassNameOffset needs_access_check
MaybeObject * TryFlatten(PretenureFlag pretenure=NOT_TENURED)
const uint32_t kTwoByteStringTag
const int kFailureTypeTagSize
static const uint32_t kExponentMask
void set_access_flags(v8::AccessControl access_control)
int memento_create_count()
bool function_with_prototype()
void set_opt_count(int opt_count)
static uint32_t Hash(Object *key)
static DescriptorArray * cast(Object *obj)
static Failure * InternalError()
virtual MaybeObject * AsObject(Heap *heap)
static int SizeOf(Map *map, HeapObject *object)
static const int kFlagsOffset
int unused_property_fields()
void set_length(Smi *length)
double get_scalar(int index)
void set_javascript_builtin(Builtins::JavaScript id, Object *value)
Object * InObjectPropertyAt(int index)
static const int kStorage2Offset
static Smi * FromInt(int value)
bool HasFastSmiElements()
bool IsFastObjectElementsKind(ElementsKind kind)
void IteratePointer(ObjectVisitor *v, int offset)
void set_number_of_entries(DependencyGroup group, int value)
MUST_USE_RESULT MaybeObject * ToSmi()
int32_t get_scalar(int index)
static Flags ComputeHandlerFlags(Kind handler_kind, StubType type=NORMAL, InlineCacheHolderFlag holder=OWN_MAP)
Map * elements_transition_map()
void set_second(String *second, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static Object * GetObjectFromEntryAddress(Address location_of_address)
void set_memento_create_count(int count)
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size)
kInstanceClassNameOffset kNeedsAccessCheckBit remove_prototype
int NumberOfOwnDescriptors()
kInstanceClassNameOffset kNeedsAccessCheckBit kRemovePrototypeBit start_position_and_type
static MemoryChunk * FromAddress(Address a)
static const int kDataOffset
void VerifyApiCallResultType()
static HeapObject * cast(Object *obj)
static bool HasLocalElement(Handle< JSReceiver > object, uint32_t index)
MaybeObject * AllocateNewStorageFor(Heap *heap, Representation representation)
#define READ_UINT32_FIELD(p, offset)
static Handle< T > cast(Handle< S > that)
void set_function_bindings(FixedArray *bindings)
static const byte kArgumentMarker
static const int kMaxHashCalcLength
bool is_access_check_needed()
void set_pre_allocated_property_fields(int value)
static const byte kUndefined
static const int kConstructionCountOffset
int opt_count_and_bailout_reason()
String * ContinueOperation(int32_t *type_out, unsigned *length_out)
const int kVariableSizeSentinel
static void IterateBody(HeapObject *obj, int object_size, ObjectVisitor *v)
void Get(int descriptor_number, Descriptor *desc)
static const int kFastPropertiesSoftLimit
void ZapPrototypeTransitions()
PropertyAttributes property_attributes()
static const int kStackSlotsBitCount
HeapObject * UncheckedPrototypeTransitions()
static const int kJSBuiltinsCount
static ExternalTwoByteString * cast(Object *obj)
static const int kFullCodeFlags
SeededNumberDictionary * element_dictionary()
static Map * cast(Object *obj)
CodeFlusher * code_flusher()
void set_has_debug_break_slots(bool value)
Object * object_at(int i)
static void EnsureCanContainHeapObjectElements(Handle< JSObject > obj)
bool has_non_instance_prototype()
kSerializedDataOffset Object
bool prohibits_overwriting()
SubStringKey(Handle< String > string, int from, int length)
static StubType ExtractTypeFromFlags(Flags flags)
static const byte kTheHole
int32_t get_int32_entry(int index)
static const int kExponentBias
bool attached_to_shared_function_info()
static Object * RawUninitializedSentinel(Heap *heap)
void set_context(Object *context)
#define READ_FIELD(p, offset)
static void EnsureSize(Handle< JSArray > array, int minimum_size_of_backing_fixed_array)
void set(int index, double value)
uint32_t get_scalar(int index)
void Set(int descriptor_number, Descriptor *desc, const WhitenessWitness &)
bool SameValue(Object *other)
void Add(Handle< Map > map_to_find, Handle< Object > obj_to_replace)
static SeqOneByteString * cast(Object *obj)
#define MAKE_STRUCT_CAST(NAME, Name, name)
void set_is_crankshafted(bool value)
void set_object_at(int i, Object *object)
WriteBarrierMode GetWriteBarrierMode(const DisallowHeapAllocation &promise)
static Failure * Exception()
static const int kExternalPointerOffset
static MUST_USE_RESULT MaybeObject * AsObject(Heap *heap, Name *key)
static Foreign * cast(Object *obj)
MUST_USE_RESULT MaybeObject * GetElementsTransitionMapSlow(ElementsKind elements_kind)
bool marked_for_deoptimization()
const uint32_t kIsNotInternalizedMask
static bool IsMatch(Object *key, Object *other)
static Handle< Object > GetElementNoExceptionThrown(Isolate *isolate, Handle< Object > object, uint32_t index)
virtual bool IsMatch(Object *string)
static V8_INLINE bool HasHeapObjectTag(internal::Object *value)
#define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset)
static const int kIsAccessCheckNeeded
void set(int index, uint32_t value)
#define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size)
bool IsTwoByteEqualTo(Vector< const uc16 > str)
uint16_t SlicedStringGet(int index)
static Smi * FromIntptr(intptr_t value)
#define READ_BYTE_FIELD(p, offset)
bool is_migration_target()
void change_ic_with_type_info_count(int count)
#define ASSERT(condition)
bool TooManyFastProperties(StoreFromKeyed store_mode=MAY_BE_STORE_FROM_KEYED)
void set(int index, int16_t value)
void set_profiler_ticks(int ticks)
#define READ_INT32_FIELD(p, offset)
Object * instance_prototype()
static Handle< Object > GetPropertyWithReceiver(Handle< Object > object, Handle< Object > receiver, Handle< Name > name, PropertyAttributes *attributes)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
const int kPointerSizeLog2
static const int kKindSpecificFlags2Offset
uint8_t get_scalar(int index)
void set_start_position(int start_position)
#define WRITE_INT_FIELD(p, offset, value)
static const int kInstanceSizeOffset
void set_optimizable(bool value)
#define READ_INT64_FIELD(p, offset)
#define WRITE_UINT32_FIELD(p, offset, value)
static Context * cast(Object *context)
static int OffsetOfFunctionWithId(Builtins::JavaScript id)
static uint32_t HashForObject(Object *key, Object *object)
static const int kSafepointTableOffsetBitCount
bool back_edges_patched_for_osr()
static const int kSourceIndex
static const int kForeignAddressOffset
static V8_INLINE bool IsValidSmi(intptr_t value)
#define WRITE_INTPTR_FIELD(p, offset, value)
const uint32_t kStringRepresentationMask
static const int kProfilerTicksOffset
bool NonFailureIsHeapObject()
int SizeFromMap(Map *map)
void set_compiled_optimizable(bool value)
uint8_t get_scalar(int index)
void IncrementMementoCreateCount()
Object * DataAt(int index)
Handle< Object > NewNumber(double value, PretenureFlag pretenure=NOT_TENURED)
Object ** GetKeySlot(int descriptor_number)
bool IsInternalError() const
ACCESSORS(AccessorInfo, expected_receiver_type, Object, kExpectedReceiverTypeOffset) ACCESSORS(DeclaredAccessorDescriptor
Name * GetSortedKey(int descriptor_number)
bool HasSpecificClassOf(String *name)
Name * GetKey(int transition_number)
int GetInternalFieldCount()
void initialize_elements()
static const int kUnusedPropertyFieldsOffset
const bool FLAG_enable_slow_asserts
int number_of_entries(DependencyGroup group)
const Resource * resource()
void set_first(String *first, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
void ReplaceCode(Code *code)
void SetRepresentation(int descriptor_number, Representation representation)
void set_map_and_elements(Map *map, FixedArrayBase *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
bool IsFastElementsKind(ElementsKind kind)
static ExternalAsciiString * cast(Object *obj)
MUST_USE_RESULT MaybeObject * EnsureWritableFastElements()
static MUST_USE_RESULT MaybeObject * NewWith(SimpleTransitionFlag flag, Name *key, Map *target, Object *back_pointer)
bool DigestPretenuringFeedback()
void set_the_hole(int index)
void init_back_pointer(Object *undefined)
int8_t get_scalar(int index)
void set_foreign_address(Address value)
MUST_USE_RESULT MaybeObject * Copy()
static const int kContextOffset
MUST_USE_RESULT MaybeObject * get(int index)
void SeqTwoByteStringSet(int index, uint16_t value)
static PropertyAttributes GetElementAttribute(Handle< JSReceiver > object, uint32_t index)
static Handle< Map > ExpectedTransitionTarget(Handle< Map > map)
static Code * cast(Object *obj)
#define CAST_ACCESSOR(type)
const uint32_t kShortExternalStringMask
#define WRITE_INT64_FIELD(p, offset, value)
Handle< Object > get_as_handle(int index)
bool AsArrayIndex(uint32_t *index)
kInstanceClassNameOffset BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype, kHiddenPrototypeBit) BOOL_ACCESSORS(FunctionTemplateInfo
void set_compilation_state(CompilationState state)
kSerializedDataOffset kPrototypeTemplateOffset kIndexedPropertyHandlerOffset kInstanceCallHandlerOffset internal_field_count
Object * GetValue(int descriptor_number)
static bool HasElement(Handle< JSReceiver > object, uint32_t index)
static const int kPretenureMinimumCreated
BOOL_GETTER(SharedFunctionInfo, compiler_hints, optimization_disabled, kOptimizationDisabled) void SharedFunctionInfo
static Object ** RawField(HeapObject *obj, int offset)
void change_own_type_change_checksum()
TransitionArray * unchecked_transition_array()
ConstantPoolArray * constant_pool()
static Smi * cast(Object *object)
void set_literals(FixedArray *literals)
WhitenessWitness(FixedArray *array)
static void IterateBody(HeapObject *obj, ObjectVisitor *v)
static uint32_t Hash(Object *key)
void ClearCodeCache(Heap *heap)
Object ** GetDescriptorStartSlot(int descriptor_number)
static const int kZeroHash
static const int kHeaderSize
FixedTypedArrayBase * EmptyFixedTypedArrayForMap(Map *map)
Code * javascript_builtin_code(Builtins::JavaScript id)
int GetInObjectPropertyOffset(int index)
kInstanceClassNameOffset flag
#define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size)
Object * GetInternalField(int index)
void set_dictionary_map(bool value)
static void TransitionElementsKind(Handle< JSObject > object, ElementsKind to_kind)
bool has_instance_call_handler()
void set_opt_count_and_bailout_reason(int value)
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
void set_has_instance_call_handler()
GlobalObject * global_object()
Object * InObjectPropertyAtPut(int index, Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
uint16_t ExternalTwoByteStringGet(int index)
Map * GetTransition(int transition_index)
MUST_USE_RESULT MaybeObject * AllocateHeapNumber(double value, PretenureFlag pretenure=NOT_TENURED)
static void SetContent(Handle< JSArray > array, Handle< FixedArrayBase > storage)
static const int kFirstOffset
void IterateNextCodeLink(ObjectVisitor *v, int offset)
ByteArray * unchecked_relocation_info()
static PropertyAttributes GetLocalElementAttribute(Handle< JSReceiver > object, uint32_t index)
void LookupTransition(JSObject *holder, Name *name, LookupResult *result)
bool HasFastSmiOrObjectElements()
MUST_USE_RESULT MaybeObject * get(int index)
static const int kKindOffset
V8_INLINE bool IsNull() const
static void NoWriteBarrierSet(FixedArray *array, int index, Object *value)
#define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size)
static const int kParentOffset
static Handle< String > ExpectedTransitionKey(Handle< Map > map)
Object ** GetDescriptorEndSlot(int descriptor_number)
void set_back_edges_patched_for_osr(bool value)
static const int kTransitionSize
const uint64_t kHoleNanInt64
void set_the_hole(int index)
#define READ_SHORT_FIELD(p, offset)
#define FIELD_ADDR(p, offset)
void set_opt_reenable_tries(int value)
static const int kStartPositionShift
void SetEntryCounts(int number_of_int64_entries, int number_of_code_ptr_entries, int number_of_heap_ptr_entries, int number_of_int32_entries)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
MUST_USE_RESULT MaybeObject * get(int index)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Object * get_heap_ptr_entry(int index)
static uint32_t ComputeUtf8Hash(Vector< const char > chars, uint32_t seed, int *utf16_length_out)
bool IsUtf8EqualTo(Vector< const char > str, bool allow_prefix_match=false)
static SeededNumberDictionary * cast(Object *obj)
static JSGlobalProxy * cast(Object *obj)
void Append(Descriptor *desc, const WhitenessWitness &)
virtual void Validate(JSObject *obj)=0
void set_ic_total_count(int count)
static const int kDescriptorLengthOffset
Vector< const Char > string_
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
MUST_USE_RESULT MaybeObject * SetValue(uint32_t index, Object *value)
static const int kExponentShift
MUST_USE_RESULT MaybeObject * get(int index)
static Cell * cast(Object *obj)
bool IsStringObjectWithCharacterAt(uint32_t index)
static const int kValueOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
const int kFailureTagSize
const uint32_t kHoleNanUpper32
void ReplaceCode(Code *code)
static InlineCacheHolderFlag ExtractCacheHolderFromFlags(Flags flags)
void ExternalTwoByteStringIterateBody()
void set_undefined(int index)
void set_migration_target(bool value)
static SlicedString * cast(Object *obj)
static const int kHashNotComputedMask
MUST_USE_RESULT MaybeObject * SetPrototypeTransitions(FixedArray *prototype_transitions)
MUST_USE_RESULT MaybeObject * get(int index)
static const int kDataIndex
static const int kDontAdaptArgumentsSentinel
int pre_allocated_property_fields()
static uint32_t SeededHash(uint32_t key, uint32_t seed)
static const int kScopeInfoOffset
void SetNumberOfProtoTransitions(int value)
#define WRITE_BARRIER(heap, object, offset, value)
double get_int64_entry_as_double(int index)
static ExtraICState ExtractExtraICStateFromFlags(Flags flags)
#define HAS_SMI_TAG(value)
static uint32_t update(uint32_tprevious, intvalue)
Context * native_context()
void InitializeBody(int object_size)
#define MAKE_STRUCT_PREDICATE(NAME, Name, name)
int64_t get_int64_entry(int index)
static const int kFirstOffset
static Failure * RetryAfterGC()
void IteratePointers(ObjectVisitor *v, int start, int end)
int SeqTwoByteStringSize(InstanceType instance_type)
Object * GetConstant(int descriptor_number)
static const int kNotFound
static bool IsValid(intptr_t value)
void set_resource(const Resource *buffer)
static Failure * cast(MaybeObject *object)
#define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE)
const uint32_t kIsIndirectStringMask
void set_inlined_type_change_checksum(int checksum)
#define READ_INT_FIELD(p, offset)
static const int kMinValue
bool ToArrayIndex(uint32_t *index)
MUST_USE_RESULT MaybeObject * ResetElements()
int SeqOneByteStringSize(InstanceType instance_type)
ElementsKind GetElementsKind()
byte * instruction_start()
bool HasSloppyArgumentsElements()
static Handle< Map > GetElementsTransitionMap(Handle< JSObject > object, ElementsKind to_kind)
const uint8_t * GetChars()
#define TYPE_CHECKER(type, instancetype)
static Oddball * cast(Object *obj)
static Address & Address_at(Address addr)
int GetInObjectPropertyOffset(int index)
int GetFieldIndex(int descriptor_number)
void VisitTwoByteString(const uint16_t *chars, unsigned length)
static Handle< Map > FindTransitionToField(Handle< Map > map, Handle< Name > key)
bool IsAligned(T value, U alignment)
static PropertyAttributes GetElementAttributeWithHandler(Handle< JSProxy > proxy, Handle< JSReceiver > receiver, uint32_t index)
static DependentCode * cast(Object *object)
void set_inobject_properties(int value)
unsigned safepoint_table_offset()
Object * RawFastPropertyAt(int index)
const uint16_t * ExternalTwoByteStringGetData(unsigned start)
static const int kBackingStoreOffset
void set(int index, float value)
#define WRITE_SHORT_FIELD(p, offset, value)
bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind, ElementsKind to_kind)
int first_code_ptr_index()
AllocationSpace allocation_space() const
bool HasBuiltinFunctionId()
STATIC_ASSERT(NUMBER_OF_KINDS<=16)
virtual uint32_t HashForObject(Object *other)
static const int kMaxRegularHeapObjectSize
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static MUST_USE_RESULT MaybeObject * Allocate(Heap *heap, int at_least_space_for, PretenureFlag pretenure=NOT_TENURED)
const uint32_t kOneByteDataHintMask
static Handle< Object > GetOrCreateIdentityHash(Handle< JSReceiver > object)
int Search(T *array, Name *name, int valid_entries)
OneByteStringKey(Vector< const uint8_t > str, uint32_t seed)
bool IsTwoByteRepresentationUnderneath()
static FunctionTemplateInfo * cast(Object *obj)
static const int kIsNotArrayIndexMask
static PropertyAttributes GetPropertyAttribute(Handle< JSReceiver > object, Handle< Name > name)
virtual bool IsMatch(Object *string)
int memento_found_count()
bool IsOneByteRepresentationUnderneath()
virtual uint32_t HashForObject(Object *other)
ExternalArray * EmptyExternalArrayForMap(Map *map)
static const int kPropertiesOffset
static const byte kUninitialized
static const int kStorage1Offset
MUST_USE_RESULT MaybeObject * get(int index)
T RoundUp(T x, intptr_t m)
static bool IsMatch(Name *key, Object *other)
PretenureFlag GetPretenureMode()
static PropertyAttributes GetPropertyAttributeWithReceiver(Handle< JSReceiver > object, Handle< JSReceiver > receiver, Handle< Name > name)
bool IsTwoByteRepresentation()
static NameDictionary * cast(Object *obj)
MUST_USE_RESULT MaybeObject * get(int index)
uint16_t ExternalAsciiStringGet(int index)
static Code * GetCodeFromTargetAddress(Address address)
bool is_inline_cache_stub()
static const int kInObjectPropertiesOffset
bool IsFastSmiElementsKind(ElementsKind kind)
void set_length(int value)
virtual MaybeObject * AsObject(Heap *heap)
bool AllowsSetElementsLength()
const uint32_t kShortExternalStringTag
void SetNumberOfOwnDescriptors(int number)
ElementsKind FastSmiToObjectElementsKind(ElementsKind from_kind)
Object * GetIdentityHash()
static String * Operate(String *, unsigned *, int32_t *, unsigned *)
void RemoveOptimizedFunction(JSFunction *function)
MUST_USE_RESULT MaybeObject * FastPropertyAt(Representation representation, int index)
void NotifyLeafMapLayoutChange()
void set(int index, uint8_t value)
int BinarySearch(T *array, Name *name, int low, int high, int valid_entries)
HeapObject * UncheckedPrototypeTransitions()
static int SizeFor(int length)
static const int kElementsOffset
void set_start_position_and_type(int value)
void set_resource(const Resource *buffer)
static Handle< Object > MonomorphicArraySentinel(Isolate *isolate, ElementsKind elements_kind)
PropertyDetails GetDetails(int descriptor_number)
static const int kIrregexpCaptureCountIndex
Object ** GetFirstElementAddress()
kSerializedDataOffset kPrototypeTemplateOffset kIndexedPropertyHandlerOffset kInstanceCallHandlerOffset kInternalFieldCountOffset DependentCode
static uint32_t HashForObject(uint32_t key, Object *object)
void set_type_feedback_info(Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
unsigned back_edge_table_offset()
BuiltinFunctionId builtin_function_id()
MUST_USE_RESULT MaybeObject * Copy()
const uint32_t kStringTag
byte * relocation_start()
InlineCacheState ic_state()
uint8_t * external_uint8_clamped_pointer()
void Reset(String *string, unsigned offset=0)
V8_INLINE bool IsUndefined() const
Address GetCharsAddress()
void set_construction_count(int value)
double get_scalar(int index)
MUST_USE_RESULT MaybeObject * Copy()
static const int kTypeFeedbackInfoOffset
uint16_t ConsStringGet(int index)
StringCharacterStream(String *string, ConsStringIteratorOp *op, unsigned offset=0)
DescriptorLookupCache * descriptor_lookup_cache()
const uint32_t kInternalizedTag
void set_map_no_write_barrier(Map *value)
Object * GetConstructor()
void initialize_storage()
static const int kRelocationInfoOffset
void DontAdaptArguments()
bool has_function_cache()
MUST_USE_RESULT MaybeObject * GetProperty(Name *key)
void set(int index, ElementType value)
static const int kSimpleTransitionIndex
virtual bool IsMatch(Object *string)
void SetTransition(int transition_index, Map *target)
BailoutReason DisableOptimizationReason()
virtual bool IsMatch(Object *string)
static int SizeFor(int length)
#define T(name, string, precedence)
static MUST_USE_RESULT MaybeObject * Allocate(Isolate *isolate, int number_of_transitions)
static TransitionArray * cast(Object *obj)
static const int kMaxLoopNestingMarker
static bool HasProperty(Handle< JSReceiver > object, Handle< Name > name)
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static ElementsAccessor * ForKind(ElementsKind elements_kind)
static SeqTwoByteString * cast(Object *obj)
bool HasTransitionArray()
void SetDataAt(int index, Object *value)
static bool IsMatch(Object *key, Object *other)
static const int kHeaderSize
void set(int index, double value)
void SetElementsKind(ElementsKind kind)
MUST_USE_RESULT MaybeObject * NumberFromDouble(double value, PretenureFlag pretenure=NOT_TENURED)
static InlineCacheState ExtractICStateFromFlags(Flags flags)
CompilationState compilation_state()
bool has_deoptimization_support()
bool AsArrayIndex(uint32_t *index)
static Kind ExtractKindFromFlags(Flags flags)
static const int kMapOffset
bool has_named_interceptor()
void set_memento_found_count(int count)
bool HasPrototypeTransitions()
int32_t DoubleToInt32(double x)
bool is_the_hole(int index)
void set_instance_type(InstanceType value)
const uint32_t kIsNotStringMask
virtual uint32_t HashForObject(Object *other)
static HeapNumber * cast(Object *obj)
bool CanHaveMoreTransitions()
static PropertyAttributes GetElementAttributeWithReceiver(Handle< JSObject > object, Handle< JSReceiver > receiver, uint32_t index, bool continue_search)
NameDictionary * property_dictionary()
static uint32_t HashSequentialString(const schar *chars, int length, uint32_t seed)
void InitializeRepresentations(Representation representation)
void set_value(double value)
bool IsSimpleTransition()
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
bool should_have_prototype()
const uint32_t kNotInternalizedTag
static const int kLengthOffset
static double nan_value()
bool has_deoptimization_support()
void set_raw_kind_specific_flags1(int value)
static Handle< Object > UninitializedSentinel(Isolate *isolate)
void set_counters(int value)
void set_code_no_write_barrier(Code *code)
uint32_t ComputeIntegerHash(uint32_t key, uint32_t seed)
Handle< T > handle(T *t, Isolate *isolate)
bool is_the_hole(int index)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
AccessorDescriptor * GetCallbacks(int descriptor_number)
static const int kOptimizableOffset
void set_bit_field3(uint32_t bits)
#define OBJECT_POINTER_ALIGN(value)
Map * GetTarget(int transition_number)
const intptr_t kObjectAlignment
bool IsWeakObjectInOptimizedCode(Object *object)
static const int kHasNonInstancePrototype
void SetInternalField(int index, Object *value)
PropertyType GetType(int descriptor_number)
name_should_print_as_anonymous
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
IncrementalMarking * incremental_marking()
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
ConsString * cons_string_
MUST_USE_RESULT MaybeObject * get(int index)
bool has_indexed_interceptor()
ElementsKind GetInitialFastElementsKind()
kInstanceClassNameOffset kNeedsAccessCheckBit kRemovePrototypeBit kIsExpressionBit kAllowLazyCompilation kUsesArguments kFormalParameterCountOffset PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, expected_nof_properties, kExpectedNofPropertiesOffset) PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo
void set_back_edge_table_offset(unsigned offset)
int16_t get_scalar(int index)
void SeqOneByteStringSet(int index, uint16_t value)
void ForeignIterateBody()
void SetNumberOfDescriptors(int number_of_descriptors)
Object * GetBackPointer()
virtual MaybeObject * AsObject(Heap *heap)
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, InlineCacheHolderFlag holder=OWN_MAP)
static const int kBitField3Offset
TwoByteStringKey(Vector< const uc16 > str, uint32_t seed)
bool IncrementMementoFoundCount()
Traits::ElementType ElementType
static const int kEntriesIndex
float get_scalar(int index)
static const uint32_t kSignMask
static ConsString * VisitFlat(Visitor *visitor, String *string, int offset, int length, int32_t type)
void set_bit_field(byte value)
#define WRITE_INT32_FIELD(p, offset, value)
static int SizeFor(int length)
static uint32_t HashForObject(Name *key, Object *object)
kSerializedDataOffset kPrototypeTemplateOffset kIndexedPropertyHandlerOffset instance_call_handler
const uint32_t kOneByteDataHintTag
static JSValue * cast(Object *obj)
static const int kHeaderSize
FunctionTemplateInfo * get_api_func_data()
void set_back_pointer_storage(Object *back_pointer, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
void set_strict_mode(StrictMode strict_mode)
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
static void Visit(String *string, unsigned offset, Visitor &visitor, ConsOp &cons_op, int32_t type, unsigned length)
const Resource * resource()
int number_of_descriptors()
static Handle< T > null()
#define WRITE_FIELD(p, offset, value)
static const int kFullStringRepresentationMask
void MemsetPointer(T **dest, U *value, int counter)
bool is_keyed_store_stub()
static int SizeFor(int number_of_int64_entries, int number_of_code_ptr_entries, int number_of_heap_ptr_entries, int number_of_int32_entries)
void set_major_key(int value)
bool IsInOptimizationQueue()
bool NeedsArgumentsAdaption()
bool HasFixedTypedArrayElements()
void Set(int index, uint16_t value)
static void NoIncrementalWriteBarrierSet(FixedArray *array, int index, Object *value)
bool IsMarkedForOptimization()
void set_is_access_check_needed(bool access_check_needed)
static void Expand(Handle< JSArray > array, int minimum_size_of_backing_fixed_array)
#define ASSERT_EQ(v1, v2)
void set_owns_descriptors(bool is_shared)
bool HasFastObjectElements()
static PropertyAttributes GetLocalPropertyAttribute(Handle< JSReceiver > object, Handle< Name > name)
int ic_with_type_info_count()
InstanceType instance_type()
static JSProxy * cast(Object *obj)
static const int kMaxFastProperties
static bool ShouldZapGarbage()
kSerializedDataOffset kPrototypeTemplateOffset kIndexedPropertyHandlerOffset kInstanceCallHandlerOffset kInternalFieldCountOffset ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count, kPretenureCreateCountOffset) ACCESSORS(AllocationSite
static HeapObject * FromAddress(Address address)
static MUST_USE_RESULT MaybeObject * AsObject(Heap *heap, Object *key)
bool HasFastHoleyElements()
bool HasNamedInterceptor()
kInstanceClassNameOffset kNeedsAccessCheckBit kRemovePrototypeBit kIsExpressionBit kAllowLazyCompilation kUsesArguments formal_parameter_count
int NumberOfProtoTransitions()
bool requires_slow_elements()
int count_of_int32_entries()
MUST_USE_RESULT MaybeObject * get(int index)
SequentialStringKey(Vector< const Char > string, uint32_t seed)
const uint32_t kOneByteStringTag
void set(int index, byte value)
void VisitOneByteString(const uint8_t *chars, unsigned length)
PropertyDetails GetLastDescriptorDetails()
int own_type_change_checksum()
static double canonical_not_the_hole_nan_as_double()
void set(int index, Address value)
#define INT_ACCESSORS(holder, name, offset)
int start_position_and_type()
ElementType get_scalar(int index)
bool IsTemplateFor(Object *object)
static FixedArray * cast(Object *obj)
MUST_USE_RESULT MaybeObject * AllocateInternalizedStringFromUtf8(Vector< const char > str, int chars, uint32_t hash_field)
void AppendDescriptor(Descriptor *desc, const DescriptorArray::WhitenessWitness &)
StringHasher(int length, uint32_t seed)
static const int kHeaderSize
static Smi * set(Smi *smi, int bit_position, bool v)
bool IsCompatibleReceiver(Object *receiver)
kSerializedDataOffset kPrototypeTemplateOffset kIndexedPropertyHandlerOffset kInstanceCallHandlerOffset kInternalFieldCountOffset dependent_code
static Flags ComputeMonomorphicFlags(Kind kind, ExtraICState extra_ic_state=kNoExtraICState, InlineCacheHolderFlag holder=OWN_MAP, StubType type=NORMAL)
static Handle< TransitionArray > AddTransition(Handle< Map > map, Handle< Name > key, Handle< Map > target, SimpleTransitionFlag flag)
static HashTable * cast(Object *obj)
void set_is_extensible(bool value)
ElementsKind elements_kind()
void ClearTransitions(Heap *heap, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Object * back_pointer_storage()
void set_is_shared(bool value)
void set_is_external(bool value)
static const int kKindSpecificFlags1Offset
void set_stub_info(int info)
void set_attached_to_shared_function_info(bool value)
int first_heap_ptr_index()
bool IsOneByteRepresentation()
void set_stack_slots(unsigned slots)
void AddOptimizedFunction(JSFunction *function)
int OffsetOfElementAt(int index)
MUST_USE_RESULT MaybeObject * NumberFromInt32(int32_t value, PretenureFlag pretenure=NOT_TENURED)
const uint32_t kIsIndirectStringTag
void SetEntry(int entry, Object *key, Object *value)
kSerializedDataOffset kPrototypeTemplateOffset indexed_property_handler
static const int kFlagsIndex
Object * GetCallbacksObject(int descriptor_number)
static const uint32_t kHashBitMask
static const int kStringEncodingMask
void set_instance_size(int value)
static const int kPrototypeOffset
void set_compiler_hints(int value)
bool IsFastHoleyElementsKind(ElementsKind kind)
static const int kFingerIndex
Address GetDataStartAddress()
static Handle< Object > GetElement(Isolate *isolate, Handle< Object > object, uint32_t index)
void set_formal_parameter_count(int value)
static uint32_t HashForObject(Object *key, Object *object)
bool HasDictionaryElements()
void set_javascript_builtin_code(Builtins::JavaScript id, Code *value)
bool HasOnlyOneByteChars()
ElementsAccessor * GetElementsAccessor()
bool HasFastDoubleElements()
static const int kAttachedToSharedFunctionInfo
String * TryFlattenGetString(PretenureFlag pretenure=NOT_TENURED)
int count_of_code_ptr_entries()
void set_bit_field2(byte value)
static MUST_USE_RESULT MaybeObject * AsObject(Heap *heap, Object *key)
kInstanceClassNameOffset kNeedsAccessCheckBit kRemovePrototypeBit kIsExpressionBit kAllowLazyCompilation uses_arguments
void set_marked_for_deoptimization(bool flag)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric literals(0o77, 0b11)") DEFINE_bool(harmony_strings
static const int kHashShift
uint16_t get_scalar(int index)
void set_finger_index(int finger_index)
void set_map_word(MapWord map_word)
static Name * cast(Object *obj)
bool has_debug_break_slots()
static bool decode(uint32_tvalue)
static uint32_t Hash(Name *key)
bool HasIndexedInterceptor()
Object * unchecked_second()
static const byte kNotBooleanMask
MUST_USE_RESULT MaybeObject * get(int index)
int GetSortedKeyIndex(int descriptor_number)
static const int kExternalTwoByteRepresentationTag
Address foreign_address()
static const int kEntrySize
const int kFailureTypeTagMask
void set_compilation_type(CompilationType type)
static Flags RemoveTypeFromFlags(Flags flags)
kInstanceClassNameOffset kNeedsAccessCheckBit kRemovePrototypeBit kIsExpressionBit compiler_hints
void set_visitor_id(int visitor_id)
void set_should_be_freed(bool value)
MUST_USE_RESULT MaybeObject * set_elements_transition_map(Map *transitioned_map)
kSerializedDataOffset prototype_template
kInstanceClassNameOffset kNeedsAccessCheckBit kRemovePrototypeBit kIsExpressionBit allows_lazy_compilation
static int SizeFor(int length)
void set_property_attributes(PropertyAttributes attributes)
void set_code(Code *code)
static ConsString * cast(Object *obj)
void set_pretenure_decision(PretenureDecision decision)
void set_safepoint_table_offset(unsigned offset)
int count_of_heap_ptr_entries()
static FixedArrayBase * cast(Object *object)
bool is_compiled_optimizable()
static V8_INLINE internal::Object * IntToSmi(int value)
bool ContainsOnlySmisOrHoles()
void set_flags(Flags flags)
static bool CanTrack(InstanceType type)
static const int kMaxValue
bool IsMarkedForConcurrentOptimization()
static const int kCodeCacheOffset
static const int kBitField2Offset
static const int kConstantPoolOffset
#define WRITE_DOUBLE_FIELD(p, offset, value)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
static const int kNotFound
void set_non_instance_prototype(bool value)
void increment_deopt_count()
void set(int index, int8_t value)
uint16_t SeqTwoByteStringGet(int index)
Object ** GetValueSlot(int descriptor_number)
static uint32_t SeededHashForObject(uint32_t key, uint32_t seed, Object *object)
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
Vector< const char > string_
int LinearSearch(T *array, Name *name, int len, int valid_entries)
static const int kExponentOffset
static MUST_USE_RESULT MaybeObject * AsObject(Heap *heap, uint32_t key)
Address get_code_ptr_entry(int index)
void SetSortedKey(int pointer, int descriptor_number)
void set(int index, uint8_t value)
void InitializeDescriptors(DescriptorArray *descriptors)
void set_hash_field(uint32_t value)
FixedArray * GetPrototypeTransitions()
void set_allow_osr_at_loop_nesting_level(int level)
ExtraICState extra_ic_state()
static JSObject * cast(Object *obj)
uint32_t RoundUpToPowerOf2(uint32_t x)
bool HasExternalArrayElements()
int64_t get_representation(int index)
bool matches_inlined_type_change_checksum(int checksum)
#define MAKE_STRUCT_CASE(NAME, Name, name)
Object * javascript_builtin(Builtins::JavaScript id)
MarkCompactCollector * mark_compact_collector()
void set_raw_kind_specific_flags2(int value)
static bool IsHashFieldComputed(uint32_t field)
int Lookup(Map *source, Name *name)
static int OffsetOfCodeWithId(Builtins::JavaScript id)
void set(int index, int32_t value)
CompilationInfo * compilation_info_at(int i)
static uint32_t encode(Kindvalue)
PropertyDetails GetTargetDetails(int transition_number)
uint32_t max_number_key()
void set_initial_map(Map *value)
bool IsFastDoubleElementsKind(ElementsKind kind)
void set_has_function_cache(bool flag)
static const int kFirstIndex
void set_unused_property_fields(int value)
bool is_keyed_load_stub()
const uint32_t kStringEncodingMask
Name * GetKey(int descriptor_number)
void LookupDescriptor(JSObject *holder, Name *name, LookupResult *result)
const uint16_t * GetChars()
uint16_t SeqOneByteStringGet(int index)
static const int kIsExtensible
SMI_ACCESSORS(ConstantPoolArray, first_code_ptr_index, kFirstCodePointerIndexOffset) SMI_ACCESSORS(ConstantPoolArray
static const int kInstanceTypeOffset
static int ComputeCapacity(int at_least_space_for)
void ExternalAsciiStringIterateBody()
static const int kPreAllocatedPropertyFieldsOffset
int count_of_int64_entries()
void InitializeBody(Map *map, Object *pre_allocated_value, Object *filler_value)
static const int kStartPositionMask
void set_requires_slow_elements()
void EnterNoMarkingScope()
void set_parent(String *parent, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static bool HasLocalProperty(Handle< JSReceiver >, Handle< Name > name)