48 StubCache::StubCache(Isolate* isolate)
49 : isolate_(isolate) { }
52 void StubCache::Initialize() {
65 ASSERT(!heap()->InNewSpace(name));
66 ASSERT(name->IsUniqueName());
76 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
79 int primary_offset = PrimaryOffset(name, flags, map);
80 Entry* primary = entry(primary_, primary_offset);
85 if (old_code != isolate_->builtins()->builtin(Builtins::kIllegal)) {
86 Map* old_map = primary->
map;
88 int seed = PrimaryOffset(primary->
key, old_flags, old_map);
89 int secondary_offset = SecondaryOffset(primary->
key, old_flags, seed);
90 Entry* secondary = entry(secondary_, secondary_offset);
91 *secondary = *primary;
98 isolate()->counters()->megamorphic_stub_cache_updates()->Increment();
109 kind, extra_state, cache_holder);
110 Handle<Object> probe(stub_holder->FindInCodeCache(*name, flags), isolate_);
123 Handle<Object> probe(stub_holder->FindInCodeCache(*name, flags), isolate_);
142 bool can_be_cached = !type->Is(HeapType::String());
144 stub_holder = IC::GetCodeCacheHolder(flag, *type, isolate());
145 ic = FindIC(name, stub_holder, kind, extra_ic_state, flag);
149 if (kind == Code::LOAD_IC) {
152 }
else if (kind == Code::KEYED_LOAD_IC) {
155 }
else if (kind == Code::STORE_IC) {
159 ASSERT(kind == Code::KEYED_STORE_IC);
161 KeyedStoreIC::GetKeyedAccessStoreMode(extra_ic_state));
166 if (can_be_cached) Map::UpdateCodeCache(stub_holder, name, ic);
174 Handle<Map> stub_holder = IC::GetCodeCacheHolder(flag, *type, isolate());
181 Handle<Name> cache_name = current_map->is_dictionary_map()
185 while (!next->IsNull()) {
187 next =
handle(current_map->prototype(), isolate());
189 if (current_map->is_dictionary_map()) cache_name = name;
195 cache_name, stub_holder, Code::LOAD_IC, flag, Code::FAST);
202 Map::UpdateCodeCache(stub_holder, cache_name, handler);
210 isolate()->factory()->KeyedLoadElementMonomorphic_string();
212 Handle<Object> probe(receiver_map->FindInCodeCache(*name, flags), isolate_);
218 Map::UpdateCodeCache(receiver_map, name, code);
228 KeyedStoreIC::ComputeExtraICState(strict_mode, store_mode);
230 Code::KEYED_STORE_IC, extra_state);
238 isolate()->factory()->KeyedStoreElementMonomorphic_string();
239 Handle<Object> probe(receiver_map->FindInCodeCache(*name, flags), isolate_);
245 Map::UpdateCodeCache(receiver_map, name, code);
246 ASSERT(KeyedStoreIC::GetKeyedAccessStoreMode(code->extra_ic_state())
252 #define CALL_LOGGER_TAG(kind, type) (Logger::KEYED_##type)
254 static void FillCache(Isolate* isolate, Handle<Code>
code) {
255 Handle<UnseededNumberDictionary> dictionary =
256 UnseededNumberDictionary::Set(isolate->factory()->non_monomorphic_cache(),
259 isolate->heap()->public_set_non_monomorphic_cache(*dictionary);
266 isolate()->heap()->non_monomorphic_cache();
267 int entry = dictionary->
FindEntry(isolate(), flags);
272 return reinterpret_cast<Code*
>(
code);
278 Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC, ic_state, extra_state);
280 isolate_->factory()->non_monomorphic_cache();
281 int entry = cache->FindEntry(isolate_, flags);
282 if (entry != -1)
return Handle<Code>(Code::cast(cache->ValueAt(entry)));
284 StubCompiler compiler(isolate_);
287 code = compiler.CompileLoadInitialize(flags);
289 code = compiler.CompileLoadPreMonomorphic(flags);
291 code = compiler.CompileLoadMegamorphic(flags);
295 FillCache(isolate_, code);
302 Code::Flags flags = Code::ComputeFlags(Code::STORE_IC, ic_state, extra_state);
304 isolate_->factory()->non_monomorphic_cache();
305 int entry = cache->FindEntry(isolate_, flags);
306 if (entry != -1)
return Handle<Code>(Code::cast(cache->ValueAt(entry)));
308 StubCompiler compiler(isolate_);
311 code = compiler.CompileStoreInitialize(flags);
313 code = compiler.CompileStorePreMonomorphic(flags);
314 }
else if (ic_state ==
GENERIC) {
315 code = compiler.CompileStoreGeneric(flags);
317 code = compiler.CompileStoreMegamorphic(flags);
322 FillCache(isolate_, code);
330 if (!receiver_map->is_shared()) {
333 if (!cached_ic.
is_null())
return cached_ic;
337 pattern.
Add(isolate_->factory()->meta_map(), receiver_map);
340 if (!receiver_map->is_shared()) {
341 Map::UpdateCodeCache(receiver_map,
name, ic);
353 isolate_->factory()->polymorphic_code_cache();
358 for (
int i = 0; i < receiver_maps->length(); i++) {
359 types.
Add(HeapType::Class(receiver_maps->
at(i), isolate()));
363 compiler.CompileElementHandlers(receiver_maps, &handlers);
367 isolate()->counters()->keyed_load_polymorphic_stubs()->Increment();
369 PolymorphicCodeCache::Update(cache, receiver_maps, flags, code);
378 int number_of_valid_types,
382 Code::StubType type = number_of_valid_types == 1 ? handler->type()
384 if (kind == Code::LOAD_IC) {
387 types, handlers, name, type,
PROPERTY);
389 ASSERT(kind == Code::STORE_IC);
392 types, handlers, name, type,
PROPERTY);
406 isolate_->factory()->polymorphic_code_cache();
407 ExtraICState extra_state = KeyedStoreIC::ComputeExtraICState(
408 strict_mode, store_mode);
410 Code::ComputeFlags(Code::KEYED_STORE_IC,
POLYMORPHIC, extra_state);
416 PolymorphicCodeCache::Update(cache, receiver_maps, flags, code);
421 void StubCache::Clear() {
422 Code* empty = isolate_->builtins()->builtin(Builtins::kIllegal);
423 for (
int i = 0; i < kPrimaryTableSize; i++) {
424 primary_[i].key = heap()->empty_string();
426 primary_[i].value = empty;
428 for (
int j = 0; j < kSecondaryTableSize; j++) {
429 secondary_[j].key = heap()->empty_string();
431 secondary_[j].value = empty;
436 void StubCache::CollectMatchingMaps(SmallMapList* types,
441 for (
int i = 0; i < kPrimaryTableSize; i++) {
442 if (primary_[i].key == *name) {
443 Map*
map = primary_[i].map;
446 if (map ==
NULL)
continue;
448 int offset = PrimaryOffset(*name, flags, map);
449 if (entry(primary_, offset) == &primary_[i] &&
450 !TypeFeedbackOracle::CanRetainOtherContext(map, *native_context)) {
456 for (
int i = 0; i < kSecondaryTableSize; i++) {
457 if (secondary_[i].key == *name) {
458 Map*
map = secondary_[i].map;
461 if (map ==
NULL)
continue;
464 int primary_offset = PrimaryOffset(*name, flags, map);
467 int offset = SecondaryOffset(*name, flags, primary_offset);
468 if (entry(secondary_, offset) == &secondary_[i] &&
469 !TypeFeedbackOracle::CanRetainOtherContext(map, *native_context)) {
482 JSObject* receiver = JSObject::cast(args[0]);
483 JSObject* holder = JSObject::cast(args[1]);
485 Address setter_address = v8::ToCData<Address>(callback->setter());
487 FUNCTION_CAST<v8::AccessorSetterCallback>(setter_address);
495 if (name->IsSymbol())
return *value;
498 LOG(isolate, ApiNamedPropertyAccess(
"store", receiver, *name));
500 custom_args(isolate, callback->data(), receiver, holder);
515 ASSERT(args.length() == StubCache::kInterceptorArgsLength);
517 args.at<
Name>(StubCache::kInterceptorArgsNameIndex);
522 if (name_handle->IsSymbol())
523 return isolate->heap()->no_interceptor_result_sentinel();
526 Address getter_address = v8::ToCData<Address>(interceptor_info->getter());
528 FUNCTION_CAST<v8::NamedPropertyGetterCallback>(getter_address);
532 args.at<
JSObject>(StubCache::kInterceptorArgsThisIndex);
534 args.at<
JSObject>(StubCache::kInterceptorArgsHolderIndex);
536 isolate, interceptor_info->data(), *receiver, *holder);
545 result->VerifyApiCallResultType();
550 return isolate->heap()->no_interceptor_result_sentinel();
554 static MaybeObject* ThrowReferenceError(
Isolate* isolate, Name*
name) {
558 LoadIC ic(IC::NO_EXTRA_FRAME, isolate);
560 return isolate->heap()->undefined_value();
564 Handle<Name> name_handle(name);
565 Handle<Object> error =
566 isolate->factory()->NewReferenceError(
"not_defined",
568 return isolate->Throw(*error);
572 static Handle<Object> LoadWithInterceptor(Arguments* args,
574 ASSERT(args->length() == StubCache::kInterceptorArgsLength);
575 Handle<Name> name_handle =
576 args->at<Name>(StubCache::kInterceptorArgsNameIndex);
577 Handle<InterceptorInfo> interceptor_info =
578 args->at<InterceptorInfo>(StubCache::kInterceptorArgsInfoIndex);
579 Handle<JSObject> receiver_handle =
580 args->at<JSObject>(StubCache::kInterceptorArgsThisIndex);
581 Handle<JSObject> holder_handle =
582 args->at<JSObject>(StubCache::kInterceptorArgsHolderIndex);
584 Isolate* isolate = receiver_handle->GetIsolate();
587 if (name_handle->IsSymbol()) {
588 return JSObject::GetPropertyPostInterceptor(
589 holder_handle, receiver_handle, name_handle, attrs);
593 Address getter_address = v8::ToCData<Address>(interceptor_info->getter());
595 FUNCTION_CAST<v8::NamedPropertyGetterCallback>(getter_address);
598 PropertyCallbackArguments callback_args(isolate,
599 interceptor_info->data(),
603 HandleScope scope(isolate);
611 result->VerifyApiCallResultType();
612 return scope.CloseAndEscape(result);
616 Handle<Object> result = JSObject::GetPropertyPostInterceptor(
617 holder_handle, receiver_handle, name_handle, attrs);
633 if (attr !=
ABSENT)
return *result;
634 return ThrowReferenceError(isolate, Name::cast(args[0]));
652 ASSERT(args.length() == 3);
653 StoreIC ic(IC::NO_EXTRA_FRAME, isolate);
657 ASSERT(receiver->HasNamedInterceptor());
660 receiver, name, value, attr, ic.strict_mode());
669 ASSERT(args.smi_at(1) >= 0);
670 uint32_t index = args.smi_at(1);
672 JSObject::GetElementWithInterceptor(receiver, receiver, index);
679 LoadIC::GenerateInitialize(masm());
682 CodeCreateEvent(Logger::LOAD_INITIALIZE_TAG, *code, 0));
683 GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *code));
688 Handle<Code> StubCompiler::CompileLoadPreMonomorphic(Code::Flags flags) {
689 LoadIC::GeneratePreMonomorphic(masm());
690 Handle<Code> code = GetCodeWithFlags(flags,
"CompileLoadPreMonomorphic");
692 CodeCreateEvent(Logger::LOAD_PREMONOMORPHIC_TAG, *code, 0));
693 GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *code));
698 Handle<Code> StubCompiler::CompileLoadMegamorphic(Code::Flags flags) {
699 LoadIC::GenerateMegamorphic(masm());
700 Handle<Code> code = GetCodeWithFlags(flags,
"CompileLoadMegamorphic");
702 CodeCreateEvent(Logger::LOAD_MEGAMORPHIC_TAG, *code, 0));
703 GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *code));
708 Handle<Code> StubCompiler::CompileStoreInitialize(Code::Flags flags) {
709 StoreIC::GenerateInitialize(masm());
710 Handle<Code> code = GetCodeWithFlags(flags,
"CompileStoreInitialize");
712 CodeCreateEvent(Logger::STORE_INITIALIZE_TAG, *code, 0));
713 GDBJIT(AddCode(GDBJITInterface::STORE_IC, *code));
718 Handle<Code> StubCompiler::CompileStorePreMonomorphic(Code::Flags flags) {
719 StoreIC::GeneratePreMonomorphic(masm());
720 Handle<Code> code = GetCodeWithFlags(flags,
"CompileStorePreMonomorphic");
722 CodeCreateEvent(Logger::STORE_PREMONOMORPHIC_TAG, *code, 0));
723 GDBJIT(AddCode(GDBJITInterface::STORE_IC, *code));
728 Handle<Code> StubCompiler::CompileStoreGeneric(Code::Flags flags) {
729 ExtraICState extra_state = Code::ExtractExtraICStateFromFlags(flags);
730 StrictMode strict_mode = StoreIC::GetStrictMode(extra_state);
731 StoreIC::GenerateRuntimeSetProperty(masm(), strict_mode);
732 Handle<Code> code = GetCodeWithFlags(flags,
"CompileStoreGeneric");
734 CodeCreateEvent(Logger::STORE_GENERIC_TAG, *code, 0));
735 GDBJIT(AddCode(GDBJITInterface::STORE_IC, *code));
740 Handle<Code> StubCompiler::CompileStoreMegamorphic(Code::Flags flags) {
741 StoreIC::GenerateMegamorphic(masm());
742 Handle<Code> code = GetCodeWithFlags(flags,
"CompileStoreMegamorphic");
744 CodeCreateEvent(Logger::STORE_MEGAMORPHIC_TAG, *code, 0));
745 GDBJIT(AddCode(GDBJITInterface::STORE_IC, *code));
750 #undef CALL_LOGGER_TAG
753 Handle<Code> StubCompiler::GetCodeWithFlags(Code::Flags flags,
757 masm_.GetCode(&desc);
758 Handle<Code> code = factory()->NewCode(desc, flags, masm_.CodeObject());
759 if (code->has_major_key()) {
760 code->set_major_key(CodeStub::NoCache);
762 #ifdef ENABLE_DISASSEMBLER
763 if (FLAG_print_code_stubs) code->Disassemble(name);
769 Handle<Code> StubCompiler::GetCodeWithFlags(Code::Flags flags,
771 return (FLAG_print_code_stubs && !name.is_null() && name->IsString())
773 : GetCodeWithFlags(flags,
NULL);
777 void StubCompiler::LookupPostInterceptor(Handle<JSObject> holder,
779 LookupResult* lookup) {
780 holder->LocalLookupRealNamedProperty(*name, lookup);
781 if (lookup->IsFound())
return;
782 if (holder->GetPrototype()->IsNull())
return;
787 #define __ ACCESS_MASM(masm())
797 int function_index = -1;
798 if (type->Is(HeapType::String())) {
799 function_index = Context::STRING_FUNCTION_INDEX;
801 function_index = Context::SYMBOL_FUNCTION_INDEX;
802 }
else if (type->Is(HeapType::Number())) {
803 function_index = Context::NUMBER_FUNCTION_INDEX;
804 }
else if (type->Is(HeapType::Boolean())) {
807 GenerateBooleanCheck(object_reg, miss);
808 function_index = Context::BOOLEAN_FUNCTION_INDEX;
814 GenerateDirectLoadGlobalFunctionPrototype(
815 masm(), function_index, scratch1(), miss);
816 Object*
function = isolate()->native_context()->get(function_index);
817 Object* prototype = JSFunction::cast(
function)->instance_prototype();
818 type = IC::CurrentTypeOf(
handle(prototype, isolate()), isolate());
819 object_reg = scratch1();
823 return CheckPrototypes(
824 type, object_reg, holder, scratch1(), scratch2(), scratch3(),
825 name, miss, check_type);
837 return CheckPrototypes(type, object_reg, holder, this->
name(),
843 for (
int i = 0; i < types->length(); ++i) {
844 if (types->
at(i)->Is(HeapType::Number()))
return true;
856 Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
858 HandlerFrontendFooter(name, &miss);
873 last_map = IC::TypeToMap(*type, isolate());
875 ASSERT(last_map->prototype() == isolate()->heap()->null_value());
877 holder = HandlerFrontendHeader(type, receiver(), last, name, &miss);
878 last_map =
handle(last->map());
881 if (last_map->is_dictionary_map() &&
882 !last_map->IsJSGlobalObjectMap() &&
883 !last_map->IsJSGlobalProxyMap()) {
884 if (!name->IsUniqueName()) {
889 last->property_dictionary()->FindEntry(*name) ==
890 NameDictionary::kNotFound);
891 GenerateDictionaryNegativeLookup(masm(), &miss, holder, name,
892 scratch2(), scratch3());
897 if (last_map->IsJSGlobalObjectMap()) {
901 GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss);
904 HandlerFrontendFooter(name, &miss);
914 Register reg = HandlerFrontend(type, receiver(), holder, name);
915 GenerateLoadField(reg, holder, field, representation);
918 return GetCode(kind(), Code::FAST, name);
927 HandlerFrontend(type, receiver(), holder, name);
928 GenerateLoadConstant(value);
931 return GetCode(kind(), Code::FAST, name);
940 Register reg = CallbackHandlerFrontend(
941 type, receiver(), holder, name, callback);
942 GenerateLoadCallback(reg, callback);
945 return GetCode(kind(), Code::FAST, name);
953 const CallOptimization& call_optimization) {
954 ASSERT(call_optimization.is_simple_api_call());
956 CallbackHandlerFrontend(type, receiver(), holder, name, callback);
957 Handle<Map>receiver_map = IC::TypeToMap(*type, isolate());
959 masm(), call_optimization, receiver_map,
960 receiver(), scratch1(),
false, 0,
NULL);
962 return GetCode(kind(), Code::FAST, name);
970 LookupResult lookup(isolate());
971 LookupPostInterceptor(holder, name, &lookup);
973 Register reg = HandlerFrontend(type, receiver(), holder, name);
976 GenerateLoadInterceptor(reg, type, holder, &lookup, name);
979 return GetCode(kind(), Code::FAST, name);
983 void LoadStubCompiler::GenerateLoadPostInterceptor(
987 LookupResult* lookup) {
989 if (lookup->IsField()) {
991 if (interceptor_holder.is_identical_to(holder)) {
993 interceptor_reg, holder, field, lookup->representation());
998 IC::CurrentTypeOf(interceptor_holder, isolate()),
999 interceptor_reg, holder, name);
1001 reg, holder, field, lookup->representation());
1008 ExecutableAccessorInfo::cast(lookup->GetCallbackObject()));
1011 Register reg = CallbackHandlerFrontend(
1012 IC::CurrentTypeOf(interceptor_holder, isolate()),
1013 interceptor_reg, holder, name, callback);
1014 GenerateLoadCallback(reg, callback);
1026 handlers.
Add(handler);
1028 return CompilePolymorphicIC(&types, &handlers, name, stub_type,
PROPERTY);
1037 HandlerFrontend(type, receiver(), holder, name);
1038 GenerateLoadViaGetter(masm(), type, receiver(), getter);
1041 return GetCode(kind(), Code::FAST, name);
1047 LookupResult* lookup,
1053 __ CheckMapDeprecated(transition, scratch1(), &miss);
1056 if (object->GetPrototype()->IsJSObject()) {
1059 if (lookup->holder() != *object) {
1066 }
while (holder->GetPrototype()->IsJSObject());
1069 Register holder_reg = HandlerFrontendHeader(
1070 IC::CurrentTypeOf(
object, isolate()), receiver(), holder, name, &miss);
1075 if (lookup->holder() == *object) {
1076 GenerateNegativeHolderLookup(masm(), holder, holder_reg, name, &miss);
1080 GenerateStoreTransition(masm(),
1085 receiver(), this->
name(), value(),
1086 scratch1(), scratch2(), scratch3(),
1091 GenerateRestoreName(masm(), &miss, name);
1092 TailCallBuiltin(masm(), MissBuiltin(kind()));
1094 GenerateRestoreName(masm(), &slow, name);
1095 TailCallBuiltin(masm(), SlowBuiltin(kind()));
1098 return GetCode(kind(), Code::FAST, name);
1103 LookupResult* lookup,
1107 HandlerFrontendHeader(IC::CurrentTypeOf(
object, isolate()),
1108 receiver(),
object, name, &miss);
1111 GenerateStoreField(masm(),
1114 receiver(), this->
name(), value(), scratch1(), scratch2(),
1119 TailCallBuiltin(masm(), MissBuiltin(kind()));
1122 return GetCode(kind(), Code::FAST, name);
1127 LookupResult* lookup,
1136 __ JumpIfNotSmi(value(), &miss);
1139 GenerateStoreArrayLength();
1143 TailCallBuiltin(masm(), MissBuiltin(kind()));
1146 return GetCode(kind(), Code::FAST, name);
1156 HandlerFrontend(type, receiver(), holder, name);
1157 GenerateStoreViaSetter(masm(), type, receiver(), setter);
1159 return GetCode(kind(), Code::FAST, name);
1167 const CallOptimization& call_optimization) {
1168 HandlerFrontend(IC::CurrentTypeOf(
object, isolate()),
1169 receiver(), holder, name);
1171 GenerateFastApiCall(
1172 masm(), call_optimization,
handle(object->map()),
1173 receiver(), scratch1(),
true, 1, values);
1175 return GetCode(kind(), Code::FAST, name);
1181 ElementsKind elements_kind = receiver_map->elements_kind();
1182 if (receiver_map->has_fast_elements() ||
1183 receiver_map->has_external_array_elements() ||
1184 receiver_map->has_fixed_typed_array_elements()) {
1187 elements_kind).GetCode(isolate());
1188 __ DispatchMap(receiver(), scratch1(), receiver_map, stub,
DO_SMI_CHECK);
1190 Handle<Code> stub = FLAG_compiled_keyed_dictionary_loads
1193 __ DispatchMap(receiver(), scratch1(), receiver_map, stub,
DO_SMI_CHECK);
1196 TailCallBuiltin(masm(), Builtins::kKeyedLoadIC_Miss);
1199 return GetICCode(kind(),
Code::NORMAL, factory()->empty_string());
1205 ElementsKind elements_kind = receiver_map->elements_kind();
1206 bool is_jsarray = receiver_map->instance_type() ==
JS_ARRAY_TYPE;
1208 if (receiver_map->has_fast_elements() ||
1209 receiver_map->has_external_array_elements() ||
1210 receiver_map->has_fixed_typed_array_elements()) {
1214 store_mode()).GetCode(isolate());
1218 store_mode()).GetCode(isolate());
1221 __ DispatchMap(receiver(), scratch1(), receiver_map, stub,
DO_SMI_CHECK);
1223 TailCallBuiltin(masm(), Builtins::kKeyedStoreIC_Miss);
1226 return GetICCode(kind(),
Code::NORMAL, factory()->empty_string());
1235 GenerateTailCall(masm, code);
1240 #ifdef ENABLE_GDB_JIT_INTERFACE
1241 GDBJITInterface::CodeTag tag;
1242 if (kind_ == Code::LOAD_IC) {
1243 tag = GDBJITInterface::LOAD_IC;
1244 }
else if (kind_ == Code::KEYED_LOAD_IC) {
1245 tag = GDBJITInterface::KEYED_LOAD_IC;
1246 }
else if (kind_ == Code::STORE_IC) {
1247 tag = GDBJITInterface::STORE_IC;
1249 tag = GDBJITInterface::KEYED_STORE_IC;
1251 GDBJIT(AddCode(tag, *name, *code));
1256 void BaseLoadStoreStubCompiler::InitializeRegisters() {
1257 if (kind_ == Code::LOAD_IC) {
1258 registers_ = LoadStubCompiler::registers();
1259 }
else if (kind_ == Code::KEYED_LOAD_IC) {
1260 registers_ = KeyedLoadStubCompiler::registers();
1261 }
else if (kind_ == Code::STORE_IC) {
1262 registers_ = StoreStubCompiler::registers();
1264 registers_ = KeyedStoreStubCompiler::registers();
1273 Code::Flags flags = Code::ComputeFlags(kind, state, extra_state(), type);
1275 PROFILE(isolate(), CodeCreateEvent(log_kind(code), *code, *name));
1276 JitEvent(name, code);
1284 ASSERT_EQ(kNoExtraICState, extra_state());
1285 Code::Flags flags = Code::ComputeHandlerFlags(kind, type, cache_holder_);
1287 PROFILE(isolate(), CodeCreateEvent(log_kind(code), *code, *name));
1288 JitEvent(name, code);
1293 void KeyedLoadStubCompiler::CompileElementHandlers(
MapHandleList* receiver_maps,
1295 for (
int i = 0; i < receiver_maps->length(); ++i) {
1300 cached_stub = isolate()->builtins()->KeyedLoadIC_String();
1302 cached_stub = isolate()->builtins()->KeyedLoadIC_Slow();
1304 bool is_js_array = receiver_map->instance_type() ==
JS_ARRAY_TYPE;
1305 ElementsKind elements_kind = receiver_map->elements_kind();
1312 elements_kind).GetCode(isolate());
1314 cached_stub = isolate()->builtins()->KeyedLoadIC_SloppyArguments();
1321 handlers->
Add(cached_stub);
1331 for (
int i = 0; i < receiver_maps->length(); ++i) {
1335 receiver_map->FindTransitionedMap(receiver_maps);
1342 bool is_js_array = receiver_map->instance_type() ==
JS_ARRAY_TYPE;
1343 ElementsKind elements_kind = receiver_map->elements_kind();
1344 if (!transitioned_map.
is_null()) {
1347 transitioned_map->elements_kind(),
1349 store_mode()).GetCode(isolate());
1351 cached_stub = isolate()->builtins()->KeyedStoreIC_Slow();
1353 if (receiver_map->has_fast_elements() ||
1354 receiver_map->has_external_array_elements() ||
1355 receiver_map->has_fixed_typed_array_elements()) {
1359 store_mode()).GetCode(isolate());
1364 store_mode()).GetCode(isolate());
1368 handlers.Add(cached_stub);
1369 transitioned_maps.Add(transitioned_map);
1372 CompileStorePolymorphic(receiver_maps, &handlers, &transitioned_maps);
1373 isolate()->counters()->keyed_store_polymorphic_stubs()->Increment();
1375 CodeCreateEvent(Logger::KEYED_STORE_POLYMORPHIC_IC_TAG, *code, 0));
1380 void KeyedStoreStubCompiler::GenerateStoreDictionaryElement(
1382 KeyedStoreIC::GenerateSlow(masm);
1386 CallOptimization::CallOptimization(LookupResult* lookup) {
1387 if (lookup->IsFound() &&
1388 lookup->IsCacheable() &&
1389 lookup->IsConstantFunction()) {
1398 CallOptimization::CallOptimization(Handle<JSFunction>
function) {
1399 Initialize(
function);
1403 Handle<JSObject> CallOptimization::LookupHolderOfExpectedType(
1404 Handle<Map> object_map,
1405 HolderLookup* holder_lookup)
const {
1406 ASSERT(is_simple_api_call());
1407 if (!object_map->IsJSObjectMap()) {
1408 *holder_lookup = kHolderNotFound;
1409 return Handle<JSObject>::null();
1411 if (expected_receiver_type_.is_null() ||
1412 expected_receiver_type_->IsTemplateFor(*object_map)) {
1413 *holder_lookup = kHolderIsReceiver;
1414 return Handle<JSObject>::null();
1417 if (!object_map->prototype()->IsJSObject())
break;
1418 Handle<JSObject> prototype(JSObject::cast(object_map->prototype()));
1419 if (!prototype->map()->is_hidden_prototype())
break;
1420 object_map =
handle(prototype->map());
1421 if (expected_receiver_type_->IsTemplateFor(*object_map)) {
1422 *holder_lookup = kHolderFound;
1426 *holder_lookup = kHolderNotFound;
1427 return Handle<JSObject>::null();
1431 bool CallOptimization::IsCompatibleReceiver(Handle<Object> receiver,
1432 Handle<JSObject> holder)
const {
1433 ASSERT(is_simple_api_call());
1434 if (!receiver->IsJSObject())
return false;
1435 Handle<Map>
map(JSObject::cast(*receiver)->map());
1436 HolderLookup holder_lookup;
1437 Handle<JSObject> api_holder =
1438 LookupHolderOfExpectedType(
map, &holder_lookup);
1439 switch (holder_lookup) {
1440 case kHolderNotFound:
1442 case kHolderIsReceiver:
1445 if (api_holder.is_identical_to(holder))
return true;
1448 JSObject*
object = *api_holder;
1450 Object* prototype =
object->map()->prototype();
1451 if (!prototype->IsJSObject())
return false;
1452 if (prototype == *holder)
return true;
1453 object = JSObject::cast(prototype);
1463 void CallOptimization::Initialize(Handle<JSFunction>
function) {
1464 constant_function_ = Handle<JSFunction>::null();
1465 is_simple_api_call_ =
false;
1466 expected_receiver_type_ = Handle<FunctionTemplateInfo>::null();
1467 api_call_info_ = Handle<CallHandlerInfo>::null();
1469 if (
function.is_null() || !function->is_compiled())
return;
1471 constant_function_ =
function;
1472 AnalyzePossibleApiFunction(
function);
1476 void CallOptimization::AnalyzePossibleApiFunction(Handle<JSFunction>
function) {
1477 if (!function->shared()->IsApiFunction())
return;
1478 Handle<FunctionTemplateInfo>
info(function->shared()->get_api_func_data());
1483 Handle<CallHandlerInfo>(CallHandlerInfo::cast(
info->call_code()));
1488 Handle<SignatureInfo> signature =
1489 Handle<SignatureInfo>(SignatureInfo::cast(
info->signature()));
1490 if (!signature->args()->IsUndefined())
return;
1491 if (!signature->receiver()->IsUndefined()) {
1492 expected_receiver_type_ =
1493 Handle<FunctionTemplateInfo>(
1494 FunctionTemplateInfo::cast(signature->receiver()));
1498 is_simple_api_call_ =
true;
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Isolate * isolate() const
bool IsExternalArrayElementsKind(ElementsKind kind)
#define PROFILE(IsolateGetter, Call)
#define RETURN_IF_SCHEDULED_EXCEPTION(isolate)
Handle< Code > CompileMonomorphicIC(Handle< HeapType > type, Handle< Code > handler, Handle< Name > name)
#define LOG(isolate, Call)
static Handle< String > cast(Handle< S > that)
kSerializedDataOffset Object
virtual ExtraICState GetExtraICState()
void Add(Handle< Map > map_to_find, Handle< Object > obj_to_replace)
#define RUNTIME_FUNCTION(Type, Name)
#define ASSERT(condition)
void(* AccessorSetterCallback)(Local< String > property, Local< Value > value, const PropertyCallbackInfo< void > &info)
#define RETURN_IF_EMPTY_HANDLE(isolate, call)
bool IsFastElementsKind(ElementsKind kind)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
kInstanceClassNameOffset flag
Object * ValueAt(int entry)
const uint32_t kNotStringTag
Local< Value > GetPrototype()
Handle< Code > CompilePolymorphicIC(TypeHandleList *types, CodeHandleList *handlers, Handle< Name > name, Code::StubType type, IcCheckType check)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Handle< Code > CompileStoreElement(Handle< Map > receiver_map)
bool IsFixedTypedArrayElementsKind(ElementsKind kind)
Handle< Code > CompileLoadNonexistent(Handle< HeapType > type, Handle< JSObject > last, Handle< Name > name)
V8_INLINE bool IsUndefined() const
static v8::internal::Handle< To > OpenHandle(v8::Local< From > handle)
static Local< Context > ToLocal(v8::internal::Handle< v8::internal::Context > obj)
#define RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, T)
Handle< T > handle(T *t, Isolate *isolate)
V8_INLINE bool IsEmpty() const
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
void(* NamedPropertyGetterCallback)(Local< String > property, const PropertyCallbackInfo< Value > &info)
#define ASSERT_EQ(v1, v2)
IN DWORD64 OUT PDWORD64 OUT PIMAGEHLP_SYMBOL64 Symbol
bool IsCompatibleReceiver(Object *receiver)
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Vector< Handle< Object > > HandleVector(v8::internal::Handle< T > *elms, int length)
#define STATIC_ASSERT(test)
Handle< Code > CompileStoreElementPolymorphic(MapHandleList *receiver_maps)
Handle< Code > CompileLoadElement(Handle< Map > receiver_map)