44 char IC::TransitionMarkFromState(
IC::State state) {
69 if (IsGrowStoreMode(mode))
return ".GROW";
74 void IC::TraceIC(
const char* type,
75 Handle<Object>
name) {
77 Code* new_target = raw_target();
78 State new_state = new_target->ic_state();
79 PrintF(
"[%s%s in ", new_target->is_keyed_stub() ?
"Keyed" :
"", type);
80 StackFrameIterator it(
isolate());
81 while (it.frame()->fp() != this->
fp()) it.Advance();
83 if (raw_frame->is_internal()) {
85 Builtins::kFunctionApply);
86 if (raw_frame->unchecked_code() == apply_builtin) {
89 raw_frame = it.frame();
94 const char* modifier =
95 GetTransitionMarkModifier(
98 TransitionMarkFromState(
state()),
99 TransitionMarkFromState(new_state),
106 #define TRACE_GENERIC_IC(isolate, type, reason) \
108 if (FLAG_trace_ic) { \
109 PrintF("[%s patching generic stub in ", type); \
110 JavaScriptFrame::PrintTop(isolate, stdout, false, true); \
111 PrintF(" (%s)]\n", reason); \
116 #define TRACE_GENERIC_IC(isolate, type, reason)
119 #define TRACE_IC(type, name) \
120 ASSERT((TraceIC(type, name), true))
131 if (FLAG_enable_ool_constant_pool) {
142 if (FLAG_enable_ool_constant_pool) {
147 pc_address =
reinterpret_cast<Address*
>(fp + kCallerPCOffset);
152 for (
int i = 0; i < depth + 1; i++) it.
Advance();
154 ASSERT(fp == frame->fp() && pc_address == frame->pc_address());
157 if (FLAG_enable_ool_constant_pool) {
158 raw_constant_pool_ =
handle(
162 pc_address_ = StackFrame::ResolveReturnAddressLocation(pc_address);
163 target_ =
handle(raw_target(), isolate);
164 state_ = target_->ic_state();
165 extra_ic_state_ = target_->extra_ic_state();
169 #ifdef ENABLE_DEBUGGER_SUPPORT
175 while (it.frame()->fp() != this->
fp()) it.Advance();
180 return function->shared();
184 Code* IC::GetCode()
const {
187 Code*
code = shared->code();
192 Code* IC::GetOriginalCode()
const {
194 Handle<SharedFunctionInfo> shared(GetSharedFunctionInfo(),
isolate());
195 ASSERT(Debug::HasDebugInfo(shared));
196 Code* original_code = Debug::GetDebugInfo(shared)->original_code();
197 ASSERT(original_code->IsCode());
198 return original_code;
203 static bool HasInterceptorGetter(JSObject*
object) {
204 return !
object->GetNamedInterceptor()->getter()->IsUndefined();
208 static bool HasInterceptorSetter(JSObject*
object) {
209 return !
object->GetNamedInterceptor()->setter()->IsUndefined();
213 static void LookupForRead(Handle<Object>
object,
215 LookupResult* lookup) {
219 object->Lookup(*name, lookup);
223 if (!lookup->IsInterceptor() || !lookup->IsCacheable()) {
227 Handle<JSObject> holder(lookup->holder(), lookup->isolate());
228 if (HasInterceptorGetter(*holder)) {
232 holder->LocalLookupRealNamedProperty(*name, lookup);
233 if (lookup->IsFound()) {
234 ASSERT(!lookup->IsInterceptor());
238 Handle<Object> proto(holder->GetPrototype(), lookup->isolate());
239 if (proto->IsNull()) {
240 ASSERT(!lookup->IsFound());
251 if (
target()->is_keyed_stub()) {
253 if (!name->IsName())
return false;
255 if (*name != stub_name)
return false;
261 switch (cache_holder) {
265 if (!receiver->IsJSObject())
return false;
284 int index = map->IndexInCodeCache(*name, *
target());
286 map->RemoveFromCodeCache(*name, *
target(), index);
302 if (old_map == *map)
return true;
303 if (old_map !=
NULL) {
306 map->elements_kind())) {
312 if (receiver->IsGlobalObject()) {
313 LookupResult lookup(
isolate());
316 if (!lookup.IsFound())
return false;
327 target()->FindHandlers(&handlers);
328 for (
int i = 0; i < handlers.length(); i++) {
330 int index = map->IndexInCodeCache(*name, *handler);
332 map->RemoveFromCodeCache(*name, *handler, index);
389 bool was_uninitialized =
391 bool is_uninitialized =
393 return (was_uninitialized && !is_uninitialized) ? 1 :
394 (!was_uninitialized && is_uninitialized) ? -1 : 0;
400 Code* host = isolate->
401 inner_pointer_to_code_cache()->GetCacheEntry(address)->code;
402 if (host->
kind() != Code::FUNCTION)
return;
404 if (FLAG_type_info_threshold > 0 &&
407 int delta = ComputeTypeInfoCountDelta(old_target->
ic_state(),
436 switch (target->
kind()) {
438 return LoadIC::Clear(isolate, address, target, constant_pool);
439 case Code::KEYED_LOAD_IC:
440 return KeyedLoadIC::Clear(isolate, address, target, constant_pool);
442 return StoreIC::Clear(isolate, address, target, constant_pool);
443 case Code::KEYED_STORE_IC:
444 return KeyedStoreIC::Clear(isolate, address, target, constant_pool);
445 case Code::COMPARE_IC:
446 return CompareIC::Clear(isolate, address, target, constant_pool);
447 case Code::COMPARE_NIL_IC:
449 case Code::BINARY_OP_IC:
450 case Code::TO_BOOLEAN_IC:
459 void KeyedLoadIC::Clear(
Isolate* isolate,
471 void LoadIC::Clear(
Isolate* isolate,
474 ConstantPoolArray* constant_pool) {
476 Code* code = target->GetIsolate()->stub_cache()->FindPreMonomorphicIC(
477 Code::LOAD_IC, target->extra_ic_state());
482 void StoreIC::Clear(Isolate* isolate,
485 ConstantPoolArray* constant_pool) {
487 Code* code = target->GetIsolate()->stub_cache()->FindPreMonomorphicIC(
488 Code::STORE_IC, target->extra_ic_state());
493 void KeyedStoreIC::Clear(Isolate* isolate,
496 ConstantPoolArray* constant_pool) {
505 void CompareIC::Clear(Isolate* isolate,
508 ConstantPoolArray* constant_pool) {
509 ASSERT(target->major_key() == CodeStub::CompareIC);
513 &handler_state, &op);
521 static bool MigrateDeprecated(Handle<Object>
object) {
522 if (!object->IsJSObject())
return false;
524 if (!receiver->map()->is_deprecated())
return false;
535 return TypeError(
"non_object_property_load",
object, name);
540 if (object->IsJSFunction() &&
541 name->Equals(
isolate()->heap()->prototype_string()) &&
545 stub = pre_monomorphic_stub();
548 stub = function_prototype_stub.GetCode(
isolate());
555 if (FLAG_trace_ic)
PrintF(
"[LoadIC : +#prototype /function]\n");
564 if (
kind() == Code::KEYED_LOAD_IC && name->AsArrayIndex(&index)) {
573 bool use_ic = MigrateDeprecated(
object) ?
false : FLAG_use_ic;
576 LookupResult lookup(
isolate());
577 LookupForRead(
object, name, &lookup);
580 if (!lookup.IsFound()) {
584 LOG(
isolate(), SuspectReadEvent(*name, *
object));
597 if ((lookup.IsInterceptor() || lookup.IsHandler()) &&
606 static bool AddOneReceiverMapIfMissing(
MapHandleList* receiver_maps,
609 for (
int current = 0; current < receiver_maps->length(); ++current) {
610 if (!receiver_maps->
at(current).is_null() &&
611 receiver_maps->
at(current).is_identical_to(new_receiver_map)) {
615 receiver_maps->
Add(new_receiver_map);
623 if (!code->is_handler())
return false;
627 target()->FindAllTypes(&types);
628 int number_of_types = types.length();
629 int deprecated_types = 0;
630 int handler_to_overwrite = -1;
632 for (
int i = 0; i < number_of_types; i++) {
634 if (current_type->IsClass() && current_type->AsClass()->is_deprecated()) {
637 }
else if (type->IsCurrently(current_type)) {
641 handler_to_overwrite = i;
642 }
else if (handler_to_overwrite == -1 &&
643 current_type->IsClass() &&
647 handler_to_overwrite = i;
651 int number_of_valid_types =
652 number_of_types - deprecated_types - (handler_to_overwrite != -1);
654 if (number_of_valid_types >= 4)
return false;
655 if (number_of_types == 0)
return false;
656 if (!
target()->FindHandlers(&handlers, types.length()))
return false;
658 number_of_valid_types++;
659 if (handler_to_overwrite >= 0) {
660 handlers.
Set(handler_to_overwrite, code);
661 if (!type->IsCurrently(types.
at(handler_to_overwrite))) {
662 types.
Set(handler_to_overwrite, type);
677 return object->IsJSGlobalObject()
684 if (type->
Is(HeapType::Number()))
685 return isolate->
factory()->heap_number_map();
686 if (type->
Is(HeapType::Boolean()))
return isolate->
factory()->oddball_map();
697 typename T::Region* region) {
699 return T::Number(region);
702 return T::Boolean(region);
704 return T::Class(map, region);
720 if (!handler->is_handler())
return set_target(*handler);
730 target()->FindAllTypes(&types);
731 if (!
target()->FindHandlers(&handlers, types.length()))
return;
732 for (
int i = 0; i < types.length(); i++) {
739 if (source_map == NULL)
return true;
740 if (target_map == NULL)
return false;
742 bool more_general_transition =
745 Map* transitioned_map = more_general_transition
749 return transitioned_map == target_map;
764 if (!
target()->is_keyed_stub()) {
802 if (
kind() == Code::LOAD_IC) {
804 return stub.GetCode(
isolate());
806 KeyedLoadFieldStub stub(inobject, offset, representation);
807 return stub.GetCode(
isolate());
826 if (!lookup->IsCacheable()) {
829 }
else if (!lookup->IsProperty()) {
830 if (
kind() == Code::LOAD_IC) {
858 isolate(), *
object, cache_holder));
861 name,
handle(stub_holder->map()),
kind(), cache_holder,
868 ASSERT(code->is_handler());
883 if (object->
IsString() && name->Equals(
isolate()->heap()->length_string())) {
885 return SimpleFieldLoad(length_index);
888 if (object->IsStringWrapper() &&
889 name->Equals(
isolate()->heap()->length_string())) {
890 if (
kind() == Code::LOAD_IC) {
892 return string_length_stub.GetCode(
isolate());
895 return string_length_stub.GetCode(
isolate());
903 switch (lookup->type()) {
906 if (
object.is_identical_to(holder)) {
907 return SimpleFieldLoad(field.
translate(holder),
909 lookup->representation());
911 return compiler.CompileLoadField(
912 type, holder, name, field, lookup->representation());
918 if (constant->IsConsString())
break;
919 return compiler.CompileLoadConstant(type, holder, name, constant);
922 if (
kind() != Code::LOAD_IC)
break;
923 if (holder->IsGlobalObject()) {
926 global->GetPropertyCell(lookup),
isolate());
928 type, global, cell, name, lookup->IsDontDelete());
931 isolate(), *
object, cache_holder));
939 if (!
object.is_identical_to(holder))
break;
943 if (object->IsJSObject()) {
948 if (Accessors::IsJSObjectFieldAccessor<HeapType>(
949 type, name, &object_offset)) {
955 if (callback->IsExecutableAccessorInfo()) {
958 if (v8::ToCData<Address>(info->getter()) == 0)
break;
959 if (!info->IsCompatibleReceiver(*
object))
break;
960 return compiler.CompileLoadCallback(type, holder, name, info);
961 }
else if (callback->IsAccessorPair()) {
964 if (!getter->IsJSFunction())
break;
965 if (holder->IsGlobalObject())
break;
966 if (!holder->HasFastProperties())
break;
968 if (!object->IsJSObject() &&
969 !
function->IsBuiltin() &&
970 function->shared()->strict_mode() ==
SLOPPY) {
975 CallOptimization call_optimization(
function);
976 if (call_optimization.is_simple_api_call() &&
977 call_optimization.IsCompatibleReceiver(
object, holder)) {
978 return compiler.CompileLoadCallback(
979 type, holder, name, call_optimization);
981 return compiler.CompileLoadViaGetter(type, holder, name,
function);
984 if (callback->IsDeclaredAccessorInfo())
break;
985 ASSERT(callback->IsForeign());
990 ASSERT(HasInterceptorGetter(*holder));
991 return compiler.CompileLoadInterceptor(type, holder, name);
1003 if (key->IsHeapNumber()) {
1006 key = isolate->
factory()->nan_string();
1008 int int_value =
FastD2I(value);
1013 }
else if (key->IsUndefined()) {
1014 key = isolate->
factory()->undefined_string();
1037 if (
target().is_identical_to(string_stub())) {
1038 target_receiver_maps.Add(
isolate()->factory()->string_map());
1040 target()->FindAllMaps(&target_receiver_maps);
1041 if (target_receiver_maps.length() == 0) {
1055 target_receiver_maps.at(0)->elements_kind(),
1056 receiver->GetElementsKind())) {
1064 if (!AddOneReceiverMapIfMissing(&target_receiver_maps, receiver_map)) {
1079 &target_receiver_maps);
1084 if (MigrateDeprecated(
object)) {
1088 MaybeObject* maybe_object =
NULL;
1093 key = TryConvertKey(key,
isolate());
1095 if (key->IsInternalizedString()) {
1097 if (maybe_object->IsFailure())
return maybe_object;
1098 }
else if (FLAG_use_ic && !object->IsAccessCheckNeeded()) {
1101 }
else if (object->IsJSObject()) {
1103 if (receiver->elements()->map() ==
1104 isolate()->
heap()->sloppy_arguments_elements_map()) {
1105 stub = sloppy_arguments_stub();
1106 }
else if (receiver->HasIndexedInterceptor()) {
1107 stub = indexed_interceptor_stub();
1108 }
else if (!key->ToSmi()->IsFailure() &&
1109 (!
target().is_identical_to(sloppy_arguments_stub()))) {
1123 if (maybe_object != NULL)
return maybe_object;
1131 LookupResult* lookup,
1134 receiver->Lookup(*name, lookup);
1135 if (lookup->IsFound()) {
1136 if (lookup->IsInterceptor() && !HasInterceptorSetter(lookup->holder())) {
1137 receiver->LocalLookupRealNamedProperty(*name, lookup);
1138 if (!lookup->IsFound())
return false;
1141 if (lookup->IsReadOnly() || !lookup->IsCacheable())
return false;
1142 if (lookup->holder() == *receiver)
return lookup->CanHoldValue(value);
1143 if (lookup->IsPropertyCallbacks())
return true;
1147 if (receiver->IsJSGlobalProxy()) {
1148 return lookup->holder() == receiver->GetPrototype();
1153 if (lookup->IsInterceptor() || lookup->IsNormal())
return false;
1154 holder = Handle<JSObject>(lookup->holder(), lookup->isolate());
1162 receiver->map()->LookupTransition(*holder, *name, lookup);
1163 if (!lookup->IsTransition())
return false;
1164 PropertyDetails target_details = lookup->GetTransitionDetails();
1165 if (target_details.IsReadOnly())
return false;
1173 ASSERT(!receiver->map()->is_deprecated());
1175 Handle<Map> target(lookup->GetTransitionTarget());
1177 target, target->LastAdded(),
1181 receiver->map()->LookupTransition(*holder, *name, lookup);
1182 if (!lookup->IsTransition())
return false;
1193 if (MigrateDeprecated(
object) || object->IsJSProxy()) {
1203 return TypeError(
"non_object_property_store",
object, name);
1208 name->Equals(
isolate()->heap()->length_string())) {
1209 return TypeError(
"strict_read_only_property",
object, name);
1214 if (!object->IsJSObject())
return *value;
1220 if (name->AsArrayIndex(&index)) {
1228 if (receiver->map()->is_observed()) {
1235 LookupResult lookup(
isolate());
1236 bool can_store = LookupForWrite(receiver, name, value, &lookup,
this);
1239 !(lookup.IsProperty() && lookup.IsReadOnly()) &&
1240 object->IsGlobalObject()) {
1249 }
else if (can_store) {
1251 }
else if (!name->IsCacheable(isolate()) ||
1252 lookup.IsNormal() ||
1253 (lookup.IsField() && lookup.CanHoldValue(value))) {
1297 ASSERT(lookup->IsFound());
1300 ASSERT(!lookup->IsHandler());
1314 if (object->IsAccessCheckNeeded())
return slow_stub();
1322 switch (lookup->type()) {
1324 return compiler.CompileStoreField(receiver, lookup, name);
1328 Handle<Map> transition(lookup->GetTransitionTarget());
1329 PropertyDetails details = transition->GetLastDescriptorDetails();
1331 if (details.type() ==
CALLBACKS || details.attributes() !=
NONE)
break;
1333 return compiler.CompileStoreTransition(
1334 receiver, lookup, transition, name);
1337 if (
kind() == Code::KEYED_STORE_IC)
break;
1338 if (receiver->IsJSGlobalProxy() || receiver->IsGlobalObject()) {
1348 union_type->IsConstant(), receiver->IsJSGlobalProxy());
1355 ASSERT(holder.is_identical_to(receiver));
1359 if (callback->IsExecutableAccessorInfo()) {
1362 if (v8::ToCData<Address>(info->setter()) == 0)
break;
1363 if (!holder->HasFastProperties())
break;
1364 if (!info->IsCompatibleReceiver(*receiver))
break;
1365 return compiler.CompileStoreCallback(receiver, holder, name, info);
1366 }
else if (callback->IsAccessorPair()) {
1369 if (!setter->IsJSFunction())
break;
1370 if (holder->IsGlobalObject())
break;
1371 if (!holder->HasFastProperties())
break;
1373 CallOptimization call_optimization(
function);
1374 if (call_optimization.is_simple_api_call() &&
1375 call_optimization.IsCompatibleReceiver(receiver, holder)) {
1376 return compiler.CompileStoreCallback(
1377 receiver, holder, name, call_optimization);
1379 return compiler.CompileStoreViaSetter(
1383 if (callback->IsDeclaredAccessorInfo())
break;
1384 ASSERT(callback->IsForeign());
1389 if (receiver->IsJSArray() &&
1390 name->Equals(
isolate()->heap()->length_string()) &&
1392 receiver->HasFastProperties()) {
1393 return compiler.CompileStoreArrayLength(receiver, lookup, name);
1400 if (
kind() == Code::KEYED_STORE_IC)
break;
1401 ASSERT(HasInterceptorSetter(*holder));
1402 return compiler.CompileStoreInterceptor(receiver, name);
1421 return generic_stub();
1428 Handle<Map> monomorphic_map = ComputeTransitionedMap(receiver, store_mode);
1429 store_mode = GetNonTransitioningStoreMode(store_mode);
1435 target()->FindAllMaps(&target_receiver_maps);
1436 if (target_receiver_maps.length() == 0) {
1440 return generic_stub();
1449 Handle<Map> previous_receiver_map = target_receiver_maps.
at(0);
1451 Handle<Map> transitioned_receiver_map = receiver_map;
1452 if (IsTransitionStoreMode(store_mode)) {
1453 transitioned_receiver_map = ComputeTransitionedMap(receiver, store_mode);
1455 if ((receiver_map.is_identical_to(previous_receiver_map) &&
1456 IsTransitionStoreMode(store_mode)) ||
1458 *transitioned_receiver_map)) {
1462 store_mode = GetNonTransitioningStoreMode(store_mode);
1464 transitioned_receiver_map,
strict_mode(), store_mode);
1465 }
else if (*previous_receiver_map == receiver->map() &&
1481 AddOneReceiverMapIfMissing(&target_receiver_maps, receiver_map);
1483 if (IsTransitionStoreMode(store_mode)) {
1485 ComputeTransitionedMap(receiver, store_mode);
1486 map_added |= AddOneReceiverMapIfMissing(&target_receiver_maps,
1487 transitioned_receiver_map);
1494 return generic_stub();
1501 return generic_stub();
1506 store_mode = GetNonTransitioningStoreMode(store_mode);
1509 store_mode = old_store_mode;
1510 }
else if (store_mode != old_store_mode) {
1512 return generic_stub();
1520 int external_arrays = 0;
1521 for (
int i = 0; i < target_receiver_maps.length(); ++i) {
1522 if (target_receiver_maps[i]->has_external_array_elements() ||
1523 target_receiver_maps[i]->has_fixed_typed_array_elements()) {
1527 if (external_arrays != 0 &&
1528 external_arrays != target_receiver_maps.length()) {
1530 "unsupported combination of external and normal arrays");
1531 return generic_stub();
1536 &target_receiver_maps, store_mode,
strict_mode());
1543 switch (store_mode) {
1563 ASSERT(receiver->map()->has_external_array_elements());
1576 if (receiver->IsJSArray()) {
1580 return index >= receiver->elements()->length();
1587 ASSERT(!key->ToSmi()->IsFailure());
1588 Smi* smi_key =
NULL;
1589 key->ToSmi()->To(&smi_key);
1590 int index = smi_key->value();
1594 bool allow_growth = receiver->IsJSArray() && oob_access &&
1595 !receiver->WouldConvertToSlowElements(key);
1598 if (receiver->HasFastSmiElements()) {
1599 if (value->IsHeapNumber()) {
1600 if (receiver->HasFastHoleyElements()) {
1606 if (value->IsHeapObject()) {
1607 if (receiver->HasFastHoleyElements()) {
1613 }
else if (receiver->HasFastDoubleElements()) {
1614 if (!value->IsSmi() && !value->IsHeapNumber()) {
1615 if (receiver->HasFastHoleyElements()) {
1625 if (receiver->HasFastSmiElements()) {
1626 if (value->IsHeapNumber()) {
1627 if (receiver->HasFastHoleyElements()) {
1632 }
else if (value->IsHeapObject()) {
1633 if (receiver->HasFastHoleyElements()) {
1639 }
else if (receiver->HasFastDoubleElements()) {
1640 if (!value->IsSmi() && !value->IsHeapNumber()) {
1641 if (receiver->HasFastHoleyElements()) {
1648 if (!FLAG_trace_external_array_abuse &&
1649 receiver->map()->has_external_array_elements() && oob_access) {
1652 Heap* heap = receiver->GetHeap();
1653 if (receiver->elements()->map() == heap->fixed_cow_array_map()) {
1665 if (MigrateDeprecated(
object)) {
1677 key = TryConvertKey(key,
isolate());
1679 MaybeObject* maybe_object =
NULL;
1682 if (key->IsInternalizedString()) {
1687 if (maybe_object->IsFailure())
return maybe_object;
1689 bool use_ic = FLAG_use_ic &&
1690 !
object->IsAccessCheckNeeded() &&
1691 !
object->IsJSGlobalProxy() &&
1692 !(
object->IsJSObject() &&
1694 if (use_ic && !object->IsSmi()) {
1699 if (heap_object->map()->IsMapInArrayPrototypeChain()) use_ic =
false;
1703 ASSERT(!object->IsAccessCheckNeeded());
1705 if (object->IsJSObject()) {
1707 bool key_is_smi_like = key->IsSmi() || !key->ToSmi()->IsFailure();
1708 if (receiver->elements()->map() ==
1709 isolate()->
heap()->sloppy_arguments_elements_map()) {
1711 stub = sloppy_arguments_stub();
1713 }
else if (key_is_smi_like &&
1714 !(
target().is_identical_to(sloppy_arguments_stub()))) {
1719 if (!(receiver->map()->DictionaryElementsInPrototypeChainOnly())) {
1721 GetStoreMode(receiver, key, value);
1730 if (*stub == *generic_stub()) {
1738 if (maybe_object)
return maybe_object;
1759 ASSERT(args.length() == 2);
1763 ic.UpdateState(receiver, key);
1764 return ic.Load(receiver, key);
1771 ASSERT(args.length() == 2);
1775 ic.UpdateState(receiver, key);
1776 return ic.Load(receiver, key);
1782 ASSERT(args.length() == 2);
1786 ic.UpdateState(receiver, key);
1787 return ic.Load(receiver, key);
1794 ASSERT(args.length() == 3);
1798 ic.UpdateState(receiver, key);
1799 return ic.Store(receiver, key, args.at<
Object>(2));
1805 ASSERT(args.length() == 3);
1809 ic.UpdateState(receiver, key);
1810 return ic.Store(receiver, key, args.at<
Object>(2));
1817 ASSERT(args.length() == 2);
1826 LookupResult debug_lookup(isolate);
1827 receiver->LocalLookup(isolate->
heap()->length_string(), &debug_lookup);
1828 ASSERT(debug_lookup.IsPropertyCallbacks() && !debug_lookup.IsReadOnly());
1841 SealHandleScope shs(isolate);
1842 ASSERT(args.length() == 3);
1850 ASSERT(object->HasFastProperties());
1851 ASSERT(object->map()->unused_property_fields() == 0);
1854 FixedArray* old_storage =
object->properties();
1856 int new_size = old_storage->length() + new_unused + 1;
1858 MaybeObject* maybe_result = old_storage->CopySize(new_size);
1859 if (!maybe_result->ToObject(&result))
return maybe_result;
1863 Object* to_store = value;
1867 if (details.representation().IsDouble()) {
1868 MaybeObject* maybe_storage =
1870 if (!maybe_storage->To(&to_store))
return maybe_storage;
1873 new_storage->
set(old_storage->length(), to_store);
1876 object->set_properties(new_storage);
1877 object->set_map(transition);
1887 ASSERT(args.length() == 3);
1891 ic.UpdateState(receiver, key);
1892 return ic.Store(receiver, key, args.at<
Object>(2));
1898 ASSERT(args.length() == 3);
1902 ic.UpdateState(receiver, key);
1903 return ic.Store(receiver, key, args.at<
Object>(2));
1909 ASSERT(args.length() == 3);
1926 ASSERT(args.length() == 3);
1943 ASSERT(args.length() == 4);
1950 if (object->IsJSObject()) {
1952 map->elements_kind());
1963 BinaryOpIC::State::State(
ExtraICState extra_ic_state) {
1968 FIRST_TOKEN + OpField::decode(extra_ic_state));
1969 mode_ = OverwriteModeField::decode(extra_ic_state);
1971 HasFixedRightArgField::decode(extra_ic_state),
1972 1 << FixedRightArgValueField::decode(extra_ic_state));
1973 left_kind_ = LeftKindField::decode(extra_ic_state);
1974 if (fixed_right_arg_.has_value) {
1975 right_kind_ =
Smi::IsValid(fixed_right_arg_.value) ? SMI : INT32;
1977 right_kind_ = RightKindField::decode(extra_ic_state);
1979 result_kind_ = ResultKindField::decode(extra_ic_state);
1985 ExtraICState BinaryOpIC::State::GetExtraICState()
const {
1986 bool sse2 = (
Max(result_kind_,
Max(left_kind_, right_kind_)) > SMI &&
1989 SSE2Field::encode(sse2) |
1990 OpField::encode(op_ - FIRST_TOKEN) |
1991 OverwriteModeField::encode(mode_) |
1992 LeftKindField::encode(left_kind_) |
1993 ResultKindField::encode(result_kind_) |
1994 HasFixedRightArgField::encode(fixed_right_arg_.has_value);
1995 if (fixed_right_arg_.has_value) {
1996 extra_ic_state = FixedRightArgValueField::update(
1999 extra_ic_state = RightKindField::update(extra_ic_state, right_kind_);
2001 return extra_ic_state;
2006 void BinaryOpIC::State::GenerateAheadOfTime(
2007 Isolate* isolate,
void (*Generate)(Isolate*,
const State&)) {
2012 #define GENERATE(op, left_kind, right_kind, result_kind, mode) \
2014 State state(op, mode); \
2015 state.left_kind_ = left_kind; \
2016 state.fixed_right_arg_.has_value = false; \
2017 state.right_kind_ = right_kind; \
2018 state.result_kind_ = result_kind; \
2019 Generate(isolate, state); \
2207 #define GENERATE(op, left_kind, fixed_right_arg_value, result_kind, mode) \
2209 State state(op, mode); \
2210 state.left_kind_ = left_kind; \
2211 state.fixed_right_arg_.has_value = true; \
2212 state.fixed_right_arg_.value = fixed_right_arg_value; \
2213 state.right_kind_ = SMI; \
2214 state.result_kind_ = result_kind; \
2215 Generate(isolate, state); \
2228 Type* BinaryOpIC::State::GetResultType(Zone* zone)
const {
2229 Kind result_kind = result_kind_;
2230 if (HasSideEffects()) {
2233 return Type::Union(Type::Number(zone), Type::String(zone), zone);
2234 }
else if (result_kind == NUMBER && op_ == Token::SHR) {
2235 return Type::Unsigned32(zone);
2238 return KindToType(result_kind, zone);
2246 if (CouldCreateAllocationMementos()) stream->Add(
"_CreateAllocationMementos");
2247 stream->Add(
":%s*", KindToString(left_kind_));
2248 if (fixed_right_arg_.has_value) {
2249 stream->Add(
"%d", fixed_right_arg_.value);
2251 stream->Add(
"%s", KindToString(right_kind_));
2253 stream->Add(
"->%s)", KindToString(result_kind_));
2257 void BinaryOpIC::State::Update(Handle<Object> left,
2258 Handle<Object> right,
2259 Handle<Object> result) {
2262 left_kind_ = UpdateKind(left, left_kind_);
2263 right_kind_ = UpdateKind(right, right_kind_);
2265 int32_t fixed_right_arg_value = 0;
2266 bool has_fixed_right_arg =
2267 op_ == Token::MOD &&
2268 right->ToInt32(&fixed_right_arg_value) &&
2269 fixed_right_arg_value > 0 &&
2271 FixedRightArgValueField::is_valid(
WhichPowerOf2(fixed_right_arg_value)) &&
2272 (left_kind_ == SMI || left_kind_ == INT32) &&
2273 (result_kind_ ==
NONE || !fixed_right_arg_.has_value);
2274 fixed_right_arg_ = Maybe<int32_t>(has_fixed_right_arg,
2275 fixed_right_arg_value);
2277 result_kind_ = UpdateKind(result, result_kind_);
2280 Kind input_kind =
Max(left_kind_, right_kind_);
2281 if (result_kind_ < input_kind && input_kind <= NUMBER) {
2282 result_kind_ = input_kind;
2288 if (left_kind_ ==
STRING && right_kind_ == INT32) {
2291 right_kind_ = NUMBER;
2292 }
else if (right_kind_ ==
STRING && left_kind_ == INT32) {
2295 left_kind_ = NUMBER;
2302 result_kind_ > NUMBER) {
2306 if (old_extra_ic_state == GetExtraICState()) {
2308 if (left->IsUndefined() || left->IsBoolean()) {
2310 }
else if (right->IsUndefined() || right->IsBoolean()) {
2316 ASSERT(result_kind_ < NUMBER);
2317 result_kind_ = NUMBER;
2323 BinaryOpIC::State::Kind BinaryOpIC::State::UpdateKind(Handle<Object>
object,
2327 if (object->IsBoolean() && is_truncating) {
2330 }
else if (object->IsUndefined()) {
2332 new_kind = is_truncating ? INT32 : NUMBER;
2333 }
else if (object->IsSmi()) {
2335 }
else if (object->IsHeapNumber()) {
2337 new_kind = IsInt32Double(value) ? INT32 : NUMBER;
2338 }
else if (object->IsString() && op() ==
Token::ADD) {
2341 if (new_kind == INT32 && SmiValuesAre32Bits()) {
2345 ((new_kind <= NUMBER && kind > NUMBER) ||
2346 (new_kind > NUMBER && kind <= NUMBER))) {
2349 return Max(kind, new_kind);
2354 const char* BinaryOpIC::State::KindToString(Kind kind) {
2356 case NONE:
return "None";
2357 case SMI:
return "Smi";
2358 case INT32:
return "Int32";
2359 case NUMBER:
return "Number";
2360 case STRING:
return "String";
2361 case GENERIC:
return "Generic";
2369 Type* BinaryOpIC::State::KindToType(Kind kind, Zone* zone) {
2372 case SMI:
return Type::SignedSmall(zone);
2373 case INT32:
return Type::Signed32(zone);
2374 case NUMBER:
return Type::Number(zone);
2375 case STRING:
return Type::String(zone);
2376 case GENERIC:
return Type::Any(zone);
2392 bool caught_exception;
2394 isolate(),
function, left, 1, &right, &caught_exception);
2402 state.Update(left, right, result);
2406 if (!allocation_site.
is_null() || state.ShouldCreateAllocationMementos()) {
2408 if (allocation_site.
is_null()) {
2413 BinaryOpICWithAllocationSiteStub stub(state);
2414 target = stub.GetCodeCopyFromTemplate(
isolate(), allocation_site);
2417 ASSERT_EQ(*allocation_site, target->FindFirstAllocationSite());
2421 target = stub.GetCode(
isolate());
2424 ASSERT_EQ(NULL, target->FindFirstAllocationSite());
2428 if (FLAG_trace_ic) {
2431 buffer, static_cast<unsigned>(
sizeof(buffer)));
2433 stream.
Add(
"[BinaryOpIC");
2434 old_state.Print(&stream);
2436 state.Print(&stream);
2437 stream.
Add(
" @ %p <- ", static_cast<void*>(*target));
2440 if (!allocation_site.
is_null()) {
2441 PrintF(
" using allocation site %p", static_cast<void*>(*allocation_site));
2447 if (!old_state.UseInlinedSmiCode() && state.UseInlinedSmiCode()) {
2449 }
else if (old_state.UseInlinedSmiCode() && !state.UseInlinedSmiCode()) {
2471 BinaryOpWithAllocationSiteStub::kAllocationSite);
2473 BinaryOpWithAllocationSiteStub::kLeft);
2475 BinaryOpWithAllocationSiteStub::kRight);
2477 return ic.Transition(allocation_site, left, right);
2484 CHECK(stub.FindCodeInCache(&code, isolate));
2491 return stub.GetCode(isolate);
2498 case SMI:
return "SMI";
2499 case NUMBER:
return "NUMBER";
2501 case STRING:
return "STRING";
2503 case OBJECT:
return "OBJECT";
2505 case GENERIC:
return "GENERIC";
2536 Type** overall_type,
2539 State left_state, right_state, handler_state;
2541 &handler_state, NULL);
2544 *overall_type =
StateToType(zone, handler_state, map);
2550 switch (old_state) {
2552 if (value->IsSmi())
return SMI;
2553 if (value->IsHeapNumber())
return NUMBER;
2557 if (value->IsJSObject())
return OBJECT;
2560 if (value->IsSmi())
return SMI;
2561 if (value->IsHeapNumber())
return NUMBER;
2564 if (value->
IsNumber())
return NUMBER;
2578 if (value->IsJSObject())
return OBJECT;
2593 bool has_inlined_smi_code,
2596 switch (old_state) {
2598 if (x->IsSmi() && y->IsSmi())
return SMI;
2608 if (x->IsInternalizedString() && y->IsInternalizedString()) {
2615 if (x->IsUniqueName() && y->IsUniqueName())
return UNIQUE_NAME;
2616 if (x->IsJSObject() && y->IsJSObject()) {
2617 if (Handle<JSObject>::cast(x)->map() ==
2630 if (x->IsUniqueName() && y->IsUniqueName())
return UNIQUE_NAME;
2636 if (old_left ==
SMI && x->IsHeapNumber())
return NUMBER;
2637 if (old_right ==
SMI && y->IsHeapNumber())
return NUMBER;
2641 if (x->IsJSObject() && y->IsJSObject())
return OBJECT;
2656 State previous_left, previous_right, previous_state;
2658 &previous_right, &previous_state, NULL);
2661 State state = TargetState(previous_state, previous_left, previous_right,
2662 HasInlinedSmiCode(
address()), x, y);
2671 if (FLAG_trace_ic) {
2672 PrintF(
"[CompareIC in ");
2674 PrintF(
" ((%s+%s=%s)->(%s+%s=%s))#%s @ %p]\n",
2682 static_cast<void*>(*stub.GetCode(isolate())));
2697 ASSERT(args.length() == 3);
2698 CompareIC ic(isolate, static_cast<Token::Value>(args.smi_at(2)));
2699 return ic.UpdateCaches(args.at<
Object>(0), args.at<
Object>(1));
2721 if (object->
IsNull() ||
object->IsUndefined()) {
2745 ?
target()->FindFirstMap()
2749 code = stub.GetCode(
isolate());
2760 return ic.CompareNil(
object);
2767 return isolate->heap()->undefined_value();
2788 return Builtins::MOD;
2791 return Builtins::BIT_OR;
2793 case Token::BIT_AND:
2794 return Builtins::BIT_AND;
2796 case Token::BIT_XOR:
2797 return Builtins::BIT_XOR;
2800 return Builtins::SAR;
2803 return Builtins::SHR;
2806 return Builtins::SHL;
2822 ASSERT(args.length() == 1);
2826 return ic.ToBoolean(
object);
2830 static const Address IC_utilities[] = {
2831 #define ADDR(name) FUNCTION_ADDR(name),
2839 return IC_utilities[id];
static Handle< Code > initialize_stub(Isolate *isolate, StrictMode strict_mode)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Object * type_feedback_info()
static Handle< Object > SetElement(Handle< JSObject > object, uint32_t index, Handle< Object > value, PropertyAttributes attributes, StrictMode strict_mode, bool check_prototype=true, SetPropertyMode set_mode=SET_PROPERTY)
Handle< Code > ComputeLoadNonexistent(Handle< Name > name, Handle< HeapType > type)
ExtraICState extra_ic_state() const
void CopyICToMegamorphicCache(Handle< String > name)
virtual Handle< Code > slow_stub() const
Code * builtin(Name name)
static ExtraICState ComputeExtraICState(StrictMode flag)
static bool IsTruncatingBinaryOp(Value op)
static ConstantPoolArray * cast(Object *obj)
static MUST_USE_RESULT MaybeObject * DoCompareNilSlow(NilValue nil, Handle< Object > object)
void UpdateState(Handle< Object > receiver, Handle< Object > name)
MUST_USE_RESULT MaybeObject * Load(Handle< Object > object, Handle< String > name)
i::Handle< i::Map > AsClass()
void set(int index, Object *value)
PropertyCell * GetPropertyCell(LookupResult *result)
void PrintF(const char *format,...)
static void DecodeMinorKey(int minor_key, CompareIC::State *left_state, CompareIC::State *right_state, CompareIC::State *handler_state, Token::Value *op)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
static HeapObject * GetCodeCacheHolder(Isolate *isolate, Object *object, InlineCacheHolderFlag holder)
static TypeFeedbackInfo * cast(Object *obj)
virtual Code::Kind kind() const
int unused_property_fields()
static const char * Name(Value tok)
static Smi * FromInt(int value)
bool IsMonomorphic() const
#define LOG(isolate, Call)
virtual void UpdateMegamorphicCache(HeapType *type, Name *name, Code *code)
static Handle< Map > GeneralizeRepresentation(Handle< Map > map, int modify_index, Representation new_representation, StoreMode store_mode)
static Handle< Code > GetUninitialized(Isolate *isolate, Token::Value op)
Code * UpdateCaches(Handle< Object > x, Handle< Object > y)
static HeapObject * cast(Object *obj)
static Handle< T > cast(Handle< S > that)
static void Clear(Address address, Code *target, ConstantPoolArray *constant_pool)
static Map * cast(Object *obj)
static State NewInputState(State old_state, Handle< Object > value)
virtual Handle< Code > CompileHandler(LookupResult *lookup, Handle< Object > object, Handle< String > name, Handle< Object > value, InlineCacheHolderFlag cache_holder)
void set_known_map(Handle< Map > map)
TypeImpl< ZoneTypeConfig > Type
static bool IsSupported(CpuFeature f)
static Failure * Exception()
static void Clear(Isolate *isolate, Address address, ConstantPoolArray *constant_pool)
bool is_inobject(Handle< JSObject > holder)
Map * LookupElementsTransitionMap(ElementsKind elements_kind)
Handle< Code > LoadElementStub(Handle< JSObject > receiver)
void change_ic_with_type_info_count(int count)
Failure * TypeError(const char *type, Handle< Object > object, Handle< Object > key)
#define ASSERT(condition)
int translate(Handle< JSObject > holder)
void set_profiler_ticks(int ticks)
Handle< Code > ComputeKeyedLoadElement(Handle< Map > receiver_map)
static bool IsSafeForSnapshot(CpuFeature f)
Isolate * isolate() const
i::Handle< i::Object > AsConstant()
static const int kCallerFPOffset
static Handle< Code > initialize_stub(Isolate *isolate, ExtraICState extra_state)
void TryRemoveInvalidHandlers(Handle< Map > map, Handle< String > name)
#define RETURN_IF_EMPTY_HANDLE(isolate, call)
int WhichPowerOf2(uint32_t x)
bool IsOutOfBoundsAccess(Handle< JSObject > receiver, int index)
void change_own_type_change_checksum()
void PatchInlinedSmiCode(Address address, InlinedSmiCheck check)
static Smi * cast(Object *object)
MaybeObject * Transition(Handle< AllocationSite > allocation_site, Handle< Object > left, Handle< Object > right) V8_WARN_UNUSED_RESULT
StrictMode strict_mode() const
MUST_USE_RESULT MaybeObject * Store(Handle< Object > object, Handle< Object > name, Handle< Object > value)
void Add(Vector< const char > format, Vector< FmtElm > elms)
static void TransitionElementsKind(Handle< JSObject > object, ElementsKind to_kind)
static void StubInfoToType(int stub_minor_key, Type **left_type, Type **right_type, Type **overall_type, Handle< Map > map, Zone *zone)
MUST_USE_RESULT MaybeObject * AllocateHeapNumber(double value, PretenureFlag pretenure=NOT_TENURED)
virtual Handle< Code > pre_monomorphic_stub()
bool IsTransitionOfMonomorphicTarget(Map *source_map, Map *target_map)
V8_INLINE bool IsNull() const
HANDLE HANDLE LPSTACKFRAME64 StackFrame
static const int kCallerPCOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
Handle< Object > NewReferenceError(const char *message, Vector< Handle< Object > > args)
static void PostPatching(Address address, Code *target, Code *old_target)
void UpdateStatus(Handle< Object > object)
static const int kLengthOffset
virtual Handle< Code > megamorphic_stub()
void PatchCache(Handle< HeapType > type, Handle< String > name, Handle< Code > code)
void UpdateMonomorphicIC(Handle< HeapType > type, Handle< Code > handler, Handle< String > name)
static Address c_entry_fp(ThreadLocalTop *thread)
virtual Handle< Code > CompileHandler(LookupResult *lookup, Handle< Object > object, Handle< String > name, Handle< Object > value, InlineCacheHolderFlag cache_holder)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
Handle< Code > ComputeMonomorphicIC(Code::Kind kind, Handle< Name > name, Handle< HeapType > type, Handle< Code > handler, ExtraICState extra_ic_state)
static InlineCacheHolderFlag ExtractCacheHolderFromFlags(Flags flags)
IC(FrameDepth depth, Isolate *isolate)
StackFrame * frame() const
bool FitsRepresentation(Representation representation)
static Handle< HeapType > CurrentTypeOf(Handle< Object > object, Isolate *isolate)
RuntimeProfiler * runtime_profiler()
static const int kCallerFPOffset
Local< Value > GetPrototype()
Handle< Code > ComputePolymorphicIC(Code::Kind kind, TypeHandleList *types, CodeHandleList *handlers, int number_of_valid_maps, Handle< Name > name, ExtraICState extra_ic_state)
Handle< Code > ComputeStore(InlineCacheState ic_state, ExtraICState extra_state)
static bool IsValid(intptr_t value)
virtual Handle< Code > generic_stub() const
virtual Handle< Code > megamorphic_stub()
static Handle< Map > GetElementsTransitionMap(Handle< JSObject > object, ElementsKind to_kind)
static void PrintTop(Isolate *isolate, FILE *file, bool print_args, bool print_line_number)
Handle< Object > NewTypeError(const char *message, Vector< Handle< Object > > args)
NilValue GetNilValue() const
static KeyedAccessStoreMode GetKeyedAccessStoreMode(ExtraICState extra_state)
static Address & Address_at(Address addr)
static void UpdateMapCodeCache(Handle< HeapObject > object, Handle< Name > name, Handle< Code > code)
bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind, ElementsKind to_kind)
static const int kConstantPoolOffset
bool IsUndeclaredGlobal(Handle< Object > receiver)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static bool IsCleared(Code *code)
virtual Code::Kind kind() const
#define ASSERT_LE(v1, v2)
static const int kCallerPCOffset
Handle< AllocationSite > NewAllocationSite()
Handle< Code > ComputeKeyedStoreElement(Handle< Map > receiver_map, StrictMode strict_mode, KeyedAccessStoreMode store_mode)
bool is_inline_cache_stub()
Handle< Code > ComputeHandler(LookupResult *lookup, Handle< Object > object, Handle< String > name, Handle< Object > value=Handle< Code >::null())
static Type * StateToType(Zone *zone, State state, Handle< Map > map=Handle< Map >())
static Handle< Object > GetElementOrCharAt(Isolate *isolate, Handle< Object > object, uint32_t index)
Handle< JSBuiltinsObject > js_builtins_object()
void MarkMonomorphicPrototypeFailure()
PropertyDetails GetDetails(int descriptor_number)
static JavaScriptFrame * cast(StackFrame *frame)
Failure * Throw(Object *exception, MessageLocation *location=NULL)
virtual Code::Kind kind() const
InlineCacheState ic_state()
static bool IsEqualityOp(Value op)
V8_INLINE bool IsUndefined() const
static void SetTargetAtAddress(Address address, Code *target, ConstantPoolArray *constant_pool)
static Code * GetTargetAtAddress(Address address, ConstantPoolArray *constant_pool)
MUST_USE_RESULT MaybeObject * GetProperty(Name *key)
static JSArray * cast(Object *obj)
bool UpdateStatus(Handle< Object > object)
static StrictMode GetStrictMode(ExtraICState state)
Handle< Code > ComputeLoad(InlineCacheState ic_state, ExtraICState extra_state)
V8_INLINE bool IsString() const
#define TRACE_IC(type, name)
virtual Handle< Code > generic_stub() const
void UpdateCaches(LookupResult *lookup, Handle< JSObject > receiver, Handle< String > name, Handle< Object > value)
virtual Handle< Code > generic_stub() const
static void MigrateInstance(Handle< JSObject > instance)
#define GENERATE(op, left_kind, right_kind, result_kind, mode)
MaybeObject * ToBoolean(Handle< Object > object)
static Handle< Object > SetElementsLength(Handle< JSArray > array, Handle< Object > length)
void Set(int index, const T &element)
MUST_USE_RESULT MaybeObject * Store(Handle< Object > object, Handle< String > name, Handle< Object > value, JSReceiver::StoreFromKeyed store_mode=JSReceiver::CERTAINLY_NOT_STORE_FROM_KEYED)
bool TryRemoveInvalidPrototypeDependentStub(Handle< Object > receiver, Handle< String > name)
static Handle< Object > Call(Isolate *isolate, Handle< Object > callable, Handle< Object > receiver, int argc, Handle< Object > argv[], bool *pending_exception, bool convert_receiver=false)
#define IC_UTIL_LIST(ICU)
static Handle< Object > SetProperty(Handle< JSReceiver > object, Handle< Name > key, Handle< Object > value, PropertyAttributes attributes, StrictMode strict_mode, StoreFromKeyed store_mode=MAY_BE_STORE_FROM_KEYED)
Handle< T > handle(T *t, Isolate *isolate)
static InlineCacheHolderFlag GetCodeCacheForObject(Object *object)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
#define TRACE_GENERIC_IC(isolate, type, reason)
static MUST_USE_RESULT MaybeObject * GetObjectPropertyOrFail(Isolate *isolate, Handle< Object > object, Handle< Object > key)
Handle< Code > ComputeCompareNil(Handle< Map > receiver_map, CompareNilICStub &stub)
void LocalLookupRealNamedProperty(Name *name, LookupResult *result)
static Handle< HeapType > UpdatedType(Handle< PropertyCell > cell, Handle< Object > value)
static Handle< T > null()
static T::TypeHandle MapToType(Handle< Map > map, typename T::Region *region)
Representation OptimalRepresentation(ValueType type=OPTIMAL_REPRESENTATION)
#define ASSERT_EQ(v1, v2)
Handle< Code > ComputeStoreElementPolymorphic(MapHandleList *receiver_maps, KeyedAccessStoreMode store_mode, StrictMode strict_mode)
MUST_USE_RESULT MaybeObject * Load(Handle< Object > object, Handle< Object > key)
#define ASSERT_NE(v1, v2)
static FixedArray * cast(Object *obj)
static Builtins::JavaScript TokenToJSBuiltin(Token::Value op)
void Print(const v8::FunctionCallbackInfo< v8::Value > &args)
ElementsKind elements_kind()
MUST_USE_RESULT MaybeObject * CompareNil(Handle< Object > object)
static Handle< Object > SetObjectProperty(Isolate *isolate, Handle< Object > object, Handle< Object > key, Handle< Object > value, PropertyAttributes attr, StrictMode strict_mode)
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
static TypeHandle Constant(i::Handle< i::Object > value, Region *region)
Code * Set(Name *name, Map *map, Code *code)
static TypeHandle OfCurrently(i::Handle< i::Object > value, Region *region)
Handle< Code > StoreElementStub(Handle< JSObject > receiver, KeyedAccessStoreMode store_mode)
void set_target(Code *code)
virtual Handle< Code > megamorphic_stub()
static TypeHandle Union(TypeHandle type1, TypeHandle type2, Region *reg)
void UpdateCaches(LookupResult *lookup, Handle< Object > object, Handle< String > name)
Vector< Handle< Object > > HandleVector(v8::internal::Handle< T > *elms, int length)
static TypeHandle Class(i::Handle< i::Map > map, Region *region)
const int kMaxKeyedPolymorphism
static Handle< Object > FunctionGetPrototype(Handle< JSFunction > object)
static GlobalObject * cast(Object *obj)
static const char * GetStateName(State state)
Handle< Code > target() const
virtual Handle< Code > slow_stub() const
ThreadLocalTop * thread_local_top()
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
static Address AddressFromUtilityId(UtilityId id)
static const int kConstantPoolOffset
Failure * ReferenceError(const char *type, Handle< String > name)
ExtraICState extra_ic_state()
static JSObject * cast(Object *obj)
void set_target(Code *code)
JSFunction * function() const
virtual Handle< Code > CompileHandler(LookupResult *lookup, Handle< Object > object, Handle< String > name, Handle< Object > unused, InlineCacheHolderFlag cache_holder)
bool UpdatePolymorphicIC(Handle< HeapType > type, Handle< String > name, Handle< Code > code)
virtual Handle< Code > pre_monomorphic_stub()
static bool IsOrderedRelationalCompareOp(Value op)
Handle< Code > FindHandler(Handle< Name > name, Handle< Map > map, Code::Kind kind, InlineCacheHolderFlag cache_holder, Code::StubType type)
Handle< Code > ComputeLoadElementPolymorphic(MapHandleList *receiver_maps)
static Handle< Map > TypeToMap(HeapType *type, Isolate *isolate)
static JSFunction * cast(Object *obj)