30 #if V8_TARGET_ARCH_ARM64
40 #define __ ACCESS_MASM(masm)
43 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
50 ASSERT(name->IsUniqueName());
51 Counters* counters = masm->isolate()->counters();
52 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
53 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
57 const int kInterceptorOrAccessCheckNeededMask =
61 Register
map = scratch1;
64 __ Tst(scratch0, kInterceptorOrAccessCheckNeededMask);
73 Register properties = scratch0;
77 __ JumpIfNotRoot(map, Heap::kHashTableMapRootIndex, miss_label);
87 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
97 static void ProbeTable(Isolate* isolate,
111 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
112 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
113 ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
115 uintptr_t key_off_addr =
reinterpret_cast<uintptr_t
>(key_offset.address());
116 uintptr_t value_off_addr =
117 reinterpret_cast<uintptr_t
>(value_offset.address());
118 uintptr_t map_off_addr =
reinterpret_cast<uintptr_t
>(map_offset.address());
125 __ Add(scratch3, offset, Operand(offset,
LSL, 1));
128 __ Mov(scratch, key_offset);
133 __ Cmp(name, scratch2);
137 __ Ldr(scratch2,
MemOperand(scratch, map_off_addr - key_off_addr));
139 __ Cmp(scratch2, scratch3);
143 __ Ldr(scratch,
MemOperand(scratch, value_off_addr - key_off_addr));
176 Isolate* isolate = masm->isolate();
190 Counters* counters = masm->isolate()->counters();
191 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
195 __ JumpIfSmi(receiver, &miss);
200 __ Add(scratch, scratch, extra);
201 __ Eor(scratch, scratch, flags);
207 ProbeTable(isolate, masm, flags,
kPrimary, receiver, name,
208 scratch, extra, extra2, extra3);
213 __ And(scratch, scratch, kSecondaryTableSize - 1);
216 ProbeTable(isolate, masm, flags,
kSecondary, receiver, name,
217 scratch, extra, extra2, extra3);
222 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
227 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
229 Register prototype) {
245 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
246 MacroAssembler* masm,
250 Isolate* isolate = masm->isolate();
252 Handle<JSFunction>
function(
256 Register scratch = prototype;
260 __ Cmp(scratch, Operand(
function));
264 __ Mov(prototype, Operand(Handle<Map>(function->initial_map())));
270 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
275 Representation representation) {
276 ASSERT(!representation.IsDouble());
290 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
297 __ JumpIfSmi(receiver, miss_label);
309 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
314 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
319 __ Mov(x0, scratch1);
327 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
328 Handle<JSGlobalObject> global,
333 ASSERT(cell->value()->IsTheHole());
334 __ Mov(scratch, Operand(cell));
336 __ JumpIfNotRoot(scratch, Heap::kTheHoleValueRootIndex, miss);
341 MacroAssembler* masm,
342 Handle<JSObject> holder,
346 if (holder->IsJSGlobalObject()) {
347 GenerateCheckPropertyCell(
348 masm, Handle<JSGlobalObject>::cast(holder), name,
scratch1(), miss);
349 }
else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
350 GenerateDictionaryNegativeLookup(
361 Handle<JSObject>
object,
362 LookupResult* lookup,
363 Handle<Map> transition,
365 Register receiver_reg,
366 Register storage_reg,
376 scratch1, scratch2, scratch3));
381 int descriptor = transition->LastAdded();
382 DescriptorArray* descriptors = transition->instance_descriptors();
383 PropertyDetails details = descriptors->GetDetails(descriptor);
384 Representation representation = details.representation();
385 ASSERT(!representation.IsNone());
388 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
389 __ LoadObject(scratch1, constant);
390 __ Cmp(value_reg, scratch1);
391 __ B(
ne, miss_label);
392 }
else if (representation.IsSmi()) {
393 __ JumpIfNotSmi(value_reg, miss_label);
394 }
else if (representation.IsHeapObject()) {
395 __ JumpIfSmi(value_reg, miss_label);
396 }
else if (representation.IsDouble()) {
397 UseScratchRegisterScope temps(masm);
401 Label do_store, heap_number;
402 __ AllocateHeapNumber(storage_reg, slow, scratch1, scratch2);
404 __ JumpIfSmi(value_reg, &do_store);
406 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
415 ASSERT(object->IsJSGlobalProxy() || !
object->IsAccessCheckNeeded());
418 if ((details.type() ==
FIELD) &&
419 (object->map()->unused_property_fields() == 0)) {
422 __ Mov(scratch1, Operand(transition));
423 __ Push(receiver_reg, scratch1, value_reg);
424 __ TailCallExternalReference(
425 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
433 __ Mov(scratch1, Operand(transition));
437 __ RecordWriteField(receiver_reg,
452 int index = transition->instance_descriptors()->GetFieldIndex(
453 transition->LastAdded());
458 index -=
object->map()->inobject_properties();
461 SmiCheck smi_check = representation.IsTagged()
463 Register prop_reg = representation.IsDouble() ? storage_reg : value_reg;
466 int offset =
object->map()->instance_size() + (index *
kPointerSize);
469 if (!representation.IsSmi()) {
471 if (!representation.IsDouble()) {
472 __ Mov(storage_reg, value_reg);
474 __ RecordWriteField(receiver_reg,
491 if (!representation.IsSmi()) {
493 if (!representation.IsDouble()) {
494 __ Mov(storage_reg, value_reg);
496 __ RecordWriteField(scratch1,
519 Handle<JSObject>
object,
520 LookupResult* lookup,
521 Register receiver_reg,
532 ASSERT(object->IsJSGlobalProxy() || !
object->IsAccessCheckNeeded());
534 int index = lookup->GetFieldIndex().field_index();
539 index -=
object->map()->inobject_properties();
541 Representation representation = lookup->representation();
542 ASSERT(!representation.IsNone());
543 if (representation.IsSmi()) {
544 __ JumpIfNotSmi(value_reg, miss_label);
545 }
else if (representation.IsHeapObject()) {
546 __ JumpIfSmi(value_reg, miss_label);
547 }
else if (representation.IsDouble()) {
548 UseScratchRegisterScope temps(masm);
555 int offset = (index *
kPointerSize) + object->map()->instance_size();
558 int offset = (index *
kPointerSize) + FixedArray::kHeaderSize;
565 Label do_store, heap_number;
567 __ JumpIfSmi(value_reg, &do_store);
569 __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
583 SmiCheck smi_check = representation.IsTagged()
587 int offset =
object->map()->instance_size() + (index *
kPointerSize);
590 if (!representation.IsSmi()) {
592 __ JumpIfSmi(value_reg, &exit);
596 __ Mov(name_reg, value_reg);
597 __ RecordWriteField(receiver_reg,
614 if (!representation.IsSmi()) {
616 __ JumpIfSmi(value_reg, &exit);
620 __ Mov(name_reg, value_reg);
621 __ RecordWriteField(scratch1,
642 if (!label->is_unused()) {
644 __ Mov(this->
name(), Operand(name));
649 static void PushInterceptorArguments(MacroAssembler* masm,
653 Handle<JSObject> holder_obj) {
661 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
662 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
663 Register scratch =
name;
664 __ Mov(scratch, Operand(interceptor));
665 __ Push(scratch, receiver, holder);
669 static void CompileCallLoadPropertyWithInterceptor(
670 MacroAssembler* masm,
674 Handle<JSObject> holder_obj,
676 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
678 __ CallExternalReference(
679 ExternalReference(IC_Utility(
id), masm->isolate()),
685 void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
686 const CallOptimization& optimization,
687 Handle<Map> receiver_map,
695 MacroAssembler::PushPopQueue queue(masm);
696 queue.Queue(receiver);
698 for (
int i = 0; i < argc; i++) {
699 Register arg = values[argc-1-i];
705 ASSERT(optimization.is_simple_api_call());
708 Register callee = x0;
709 Register call_data = x4;
710 Register holder = x2;
711 Register api_function_address = x1;
714 CallOptimization::HolderLookup holder_lookup;
715 Handle<JSObject> api_holder =
716 optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup);
717 switch (holder_lookup) {
718 case CallOptimization::kHolderIsReceiver:
719 __ Mov(holder, receiver);
721 case CallOptimization::kHolderFound:
722 __ LoadObject(holder, api_holder);
724 case CallOptimization::kHolderNotFound:
729 Isolate* isolate = masm->isolate();
730 Handle<JSFunction>
function = optimization.constant_function();
731 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
732 Handle<Object> call_data_obj(api_call_info->data(), isolate);
735 __ LoadObject(callee,
function);
737 bool call_data_undefined =
false;
739 if (isolate->heap()->InNewSpace(*call_data_obj)) {
740 __ LoadObject(call_data, api_call_info);
742 }
else if (call_data_obj->IsUndefined()) {
743 call_data_undefined =
true;
744 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
746 __ LoadObject(call_data, call_data_obj);
750 Address function_address = v8::ToCData<Address>(api_call_info->callback());
751 ApiFunction fun(function_address);
752 ExternalReference ref = ExternalReference(&fun,
753 ExternalReference::DIRECT_API_CALL,
755 __ Mov(api_function_address, ref);
758 CallApiFunctionStub stub(is_store, call_data_undefined, argc);
759 __ TailCallStub(&stub);
763 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code>
code) {
764 __ Jump(code, RelocInfo::CODE_TARGET);
769 #define __ ACCESS_MASM(masm())
772 Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
774 Handle<JSObject> holder,
788 Register reg = object_reg;
792 if (type->IsConstant()) {
796 Handle<Map> current_map = receiver_map;
797 Handle<Map> holder_map(holder->map());
800 while (!current_map.is_identical_to(holder_map)) {
805 ASSERT(current_map->IsJSGlobalProxyMap() ||
806 !current_map->is_access_check_needed());
809 if (current_map->is_dictionary_map() &&
810 !current_map->IsJSGlobalObjectMap() &&
811 !current_map->IsJSGlobalProxyMap()) {
812 if (!name->IsUniqueName()) {
816 ASSERT(current.is_null() ||
817 (current->property_dictionary()->FindEntry(*name) ==
820 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
828 heap()->InNewSpace(*prototype);
829 Register map_reg = NoReg;
842 if (current_map->IsJSGlobalProxyMap()) {
843 UseScratchRegisterScope temps(masm());
844 __ CheckAccessGlobalProxy(reg, scratch2, temps.AcquireX(), miss);
845 }
else if (current_map->IsJSGlobalObjectMap()) {
846 GenerateCheckPropertyCell(
847 masm(), Handle<JSGlobalObject>::cast(current), name,
853 if (heap()->InNewSpace(*prototype)) {
859 __ Mov(reg, Operand(prototype));
865 current_map =
handle(current->map());
869 LOG(isolate(), IntEvent(
"check-maps-depth", depth + 1));
878 ASSERT(current_map->IsJSGlobalProxyMap() ||
879 !current_map->is_access_check_needed());
880 if (current_map->IsJSGlobalProxyMap()) {
881 __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
890 if (!miss->is_unused()) {
903 if (!miss->is_unused()) {
917 Handle<JSObject> holder,
919 Handle<Object> callback) {
925 Register scratch2 = this->
scratch2();
926 Register scratch3 = this->
scratch3();
927 Register dictionary = this->scratch4();
930 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
943 __ Bind(&probe_done);
950 const int kValueOffset = kElementsStartOffset +
kPointerSize;
952 __ Cmp(scratch2, Operand(callback));
962 Handle<JSObject> holder,
964 Representation representation) {
966 if (
kind() == Code::LOAD_IC) {
967 LoadFieldStub stub(field.is_inobject(holder),
968 field.translate(holder),
970 GenerateTailCall(masm(), stub.GetCode(isolate()));
972 KeyedLoadFieldStub stub(field.is_inobject(holder),
973 field.translate(holder),
975 GenerateTailCall(masm(), stub.GetCode(isolate()));
982 __ LoadObject(x0, value);
989 Handle<ExecutableAccessorInfo> callback) {
1005 if (heap()->InNewSpace(callback->data())) {
1010 __ Mov(
scratch3(), Operand(Handle<Object>(callback->data(), isolate())));
1012 __ LoadRoot(scratch4(), Heap::kUndefinedValueRootIndex);
1013 __ Mov(
scratch2(), Operand(ExternalReference::isolate_address(isolate())));
1017 __ Add(args_addr,
__ StackPointer(), kPointerSize);
1028 Register getter_address_reg = x2;
1031 Address getter_address = v8::ToCData<Address>(callback->getter());
1032 ApiFunction fun(getter_address);
1034 ExternalReference ref = ExternalReference(&fun, type, isolate());
1035 __ Mov(getter_address_reg, ref);
1037 CallApiGetterStub stub;
1038 __ TailCallStub(&stub);
1043 Register holder_reg,
1044 Handle<Object>
object,
1045 Handle<JSObject> interceptor_holder,
1046 LookupResult* lookup,
1047 Handle<Name> name) {
1050 ASSERT(interceptor_holder->HasNamedInterceptor());
1051 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1055 bool compile_followup_inline =
false;
1056 if (lookup->IsFound() && lookup->IsCacheable()) {
1057 if (lookup->IsField()) {
1058 compile_followup_inline =
true;
1059 }
else if (lookup->type() ==
CALLBACKS &&
1060 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1061 ExecutableAccessorInfo* callback =
1063 compile_followup_inline = callback->getter() !=
NULL &&
1064 callback->IsCompatibleReceiver(*
object);
1068 if (compile_followup_inline) {
1078 bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1079 bool must_preserve_receiver_reg = !
receiver().
Is(holder_reg) &&
1080 (lookup->type() ==
CALLBACKS || must_perfrom_prototype_check);
1086 if (must_preserve_receiver_reg) {
1089 __ Push(holder_reg, this->
name());
1094 CompileCallLoadPropertyWithInterceptor(
1095 masm(),
receiver(), holder_reg, this->
name(), interceptor_holder,
1096 IC::kLoadPropertyWithInterceptorOnly);
1100 Label interceptor_failed;
1102 Heap::kNoInterceptorResultSentinelRootIndex,
1103 &interceptor_failed);
1104 frame_scope.GenerateLeaveFrame();
1107 __ Bind(&interceptor_failed);
1108 if (must_preserve_receiver_reg) {
1111 __ Pop(this->
name(), holder_reg);
1119 PushInterceptorArguments(
1120 masm(),
receiver(), holder_reg, this->
name(), interceptor_holder);
1122 ExternalReference ref =
1123 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1130 void StubCompiler::GenerateBooleanCheck(Register
object, Label* miss) {
1131 UseScratchRegisterScope temps(masm());
1133 Register true_root = temps.AcquireX();
1134 Register false_root = temps.AcquireX();
1136 __ LoadTrueFalseRoots(true_root, false_root);
1137 __ Cmp(
object, true_root);
1144 Handle<JSObject>
object,
1145 Handle<JSObject> holder,
1147 Handle<ExecutableAccessorInfo> callback) {
1148 ASM_LOCATION(
"StoreStubCompiler::CompileStoreCallback");
1153 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1163 ExternalReference store_callback_property =
1164 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
1165 __ TailCallExternalReference(store_callback_property, 5, 1);
1173 #define __ ACCESS_MASM(masm)
1177 MacroAssembler* masm,
1178 Handle<HeapType> type,
1180 Handle<JSFunction> setter) {
1192 if (!setter.is_null()) {
1194 if (
IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1200 __ Push(receiver, value());
1201 ParameterCount actual(1);
1202 ParameterCount expected(setter);
1203 __ InvokeFunction(setter, expected, actual,
1208 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1222 #define __ ACCESS_MASM(masm())
1226 Handle<JSObject>
object,
1227 Handle<Name> name) {
1230 ASM_LOCATION(
"StoreStubCompiler::CompileStoreInterceptor");
1235 ExternalReference store_ic_property =
1236 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
1237 __ TailCallExternalReference(store_ic_property, 3, 1);
1245 Handle<JSObject> last,
1246 Handle<Name> name) {
1251 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1265 Register* LoadStubCompiler::registers() {
1267 static Register registers[] = { x0, x2, x3, x1, x4, x5 };
1272 Register* KeyedLoadStubCompiler::registers() {
1274 static Register registers[] = { x1, x0, x2, x3, x4, x5 };
1279 Register StoreStubCompiler::value() {
1284 Register* StoreStubCompiler::registers() {
1286 static Register registers[] = { x1, x2, x3, x4, x5 };
1291 Register* KeyedStoreStubCompiler::registers() {
1293 static Register registers[] = { x2, x1, x3, x4, x5 };
1299 #define __ ACCESS_MASM(masm)
1302 Handle<HeapType> type,
1304 Handle<JSFunction> getter) {
1308 if (!getter.is_null()) {
1310 if (
IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1317 ParameterCount actual(0);
1318 ParameterCount expected(getter);
1319 __ InvokeFunction(getter, expected, actual,
1324 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
1335 #define __ ACCESS_MASM(masm())
1339 Handle<HeapType> type,
1340 Handle<GlobalObject> global,
1341 Handle<PropertyCell> cell,
1343 bool is_dont_delete) {
1348 __ Mov(x3, Operand(cell));
1352 if (!is_dont_delete) {
1353 __ JumpIfRoot(x4, Heap::kTheHoleValueRootIndex, &miss);
1356 Counters* counters = isolate()->counters();
1357 __ IncrementCounter(counters->named_load_global_stub(), 1, x1, x3);
1377 (
kind() == Code::KEYED_LOAD_IC ||
kind() == Code::KEYED_STORE_IC)) {
1378 __ CompareAndBranch(this->
name(), Operand(name),
ne, &miss);
1387 int receiver_count = types->length();
1388 int number_of_handled_maps = 0;
1389 for (
int current = 0; current < receiver_count; ++current) {
1390 Handle<HeapType> type = types->at(current);
1392 if (!map->is_deprecated()) {
1393 number_of_handled_maps++;
1395 __ Cmp(map_reg, Operand(map));
1396 __ B(
ne, &try_next);
1397 if (type->Is(HeapType::Number())) {
1398 ASSERT(!number_case.is_unused());
1399 __ Bind(&number_case);
1401 __ Jump(handlers->at(current), RelocInfo::CODE_TARGET);
1405 ASSERT(number_of_handled_maps != 0);
1421 ExternalReference ref =
1422 ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength),
1424 __ TailCallExternalReference(ref, 2, 1);
1434 ASM_LOCATION(
"KeyedStoreStubCompiler::CompileStorePolymorphic");
1438 int receiver_count = receiver_maps->length();
1440 for (
int i = 0; i < receiver_count; i++) {
1441 __ Cmp(
scratch1(), Operand(receiver_maps->at(i)));
1445 if (!transitioned_maps->at(i).is_null()) {
1448 __ Mov(transition_map(), Operand(transitioned_maps->at(i)));
1450 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET);
1463 #define __ ACCESS_MASM(masm)
1466 MacroAssembler* masm) {
1474 Register result = x0;
1476 Register receiver = x1;
1478 __ JumpIfNotSmi(key, &miss);
1480 __ LoadFromNumberDictionary(&slow, x4, key, result, x2, x3, x5, x6);
1484 __ IncrementCounter(
1485 masm->isolate()->counters()->keyed_load_external_array_slow(), 1, x2, x3);
1486 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1490 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1496 #endif // V8_TARGET_ARCH_ARM64
static const int kThisIndex
virtual void HandlerFrontendFooter(Handle< Name > name, Label *miss)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
void GenerateRestoreName(MacroAssembler *masm, Label *label, Handle< Name > name)
static const int kHashFieldOffset
static const int kBitFieldOffset
static const int kArgsLength
static const int kNotFound
static const int kPrototypeOrInitialMapOffset
static const int kValueOffset
static const int kElementsStartIndex
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
static const int kIsolateIndex
static ExecutableAccessorInfo * cast(Object *obj)
void GenerateProbe(MacroAssembler *masm, Code::Flags flags, Register receiver, Register name, Register scratch, Register extra, Register extra2=no_reg, Register extra3=no_reg)
static const int kReturnValueOffset
static const int kFlagsOffset
#define ASM_LOCATION(message)
#define LOG(isolate, Call)
Handle< Code > CompileStoreInterceptor(Handle< JSObject > object, Handle< Name > name)
static Handle< T > cast(Handle< S > that)
static const int kGlobalReceiverOffset
static const int kDataOffset
void GenerateLoadField(Register reg, Handle< JSObject > holder, PropertyIndex field, Representation representation)
static StubType ExtractTypeFromFlags(Flags flags)
TypeImpl< ZoneTypeConfig > Type
static const int kInterceptorArgsLength
static const int kDataIndex
static const int kHolderIndex
static const int kInterceptorArgsNameIndex
static const int kHasNamedInterceptor
static const int kIsAccessCheckNeeded
List< Handle< Map > > MapHandleList
#define ASSERT(condition)
static const int kContextOffset
const int kPointerSizeLog2
Handle< Code > CompileStoreCallback(Handle< JSObject > object, Handle< JSObject > holder, Handle< Name > name, Handle< ExecutableAccessorInfo > callback)
virtual Register HandlerFrontendHeader(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name, Label *miss)
virtual void HandlerFrontendFooter(Handle< Name > name, Label *miss)
DwVfpRegister DoubleRegister
void GenerateLoadPostInterceptor(Register reg, Handle< JSObject > interceptor_holder, Handle< Name > name, LookupResult *lookup)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kValueOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
static void GenerateStoreViaSetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > setter)
static Handle< HeapType > CurrentTypeOf(Handle< Object > object, Isolate *isolate)
void check(i::Vector< const uint8_t > string)
Handle< Code > CompilePolymorphicIC(TypeHandleList *types, CodeHandleList *handlers, Handle< Name > name, Code::StubType type, IcCheckType check)
void GenerateLoadConstant(Handle< Object > value)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kInterceptorArgsThisIndex
static Handle< PropertyCell > EnsurePropertyCell(Handle< JSGlobalObject > global, Handle< Name > name)
List< Handle< HeapType > > TypeHandleList
static const int kPropertiesOffset
static const int kReturnValueDefaultValueIndex
void GenerateNegativeHolderLookup(MacroAssembler *masm, Handle< JSObject > holder, Register holder_reg, Handle< Name > name, Label *miss)
static const int kElementsOffset
Handle< Code > CompileLoadNonexistent(Handle< HeapType > type, Handle< JSObject > last, Handle< Name > name)
Handle< Code > CompileLoadGlobal(Handle< HeapType > type, Handle< GlobalObject > holder, Handle< PropertyCell > cell, Handle< Name > name, bool is_dont_delete)
static void GenerateLoadDictionaryElement(MacroAssembler *masm)
void GenerateLoadInterceptor(Register holder_reg, Handle< Object > object, Handle< JSObject > holder, LookupResult *lookup, Handle< Name > name)
static const int kLengthOffset
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
static const int kInterceptorArgsInfoIndex
static const int kHeaderSize
void GenerateLoadCallback(Register reg, Handle< ExecutableAccessorInfo > callback)
static Builtins::Name MissBuiltin(Code::Kind kind)
static const int kMapOffset
List< Handle< Code > > CodeHandleList
Register CallbackHandlerFrontend(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name, Handle< Object > callback)
Handle< T > handle(T *t, Isolate *isolate)
MemOperand FieldMemOperand(Register object, int offset)
static const int kDataOffset
void GenerateStoreField(MacroAssembler *masm, Handle< JSObject > object, LookupResult *lookup, Register receiver_reg, Register name_reg, Register value_reg, Register scratch1, Register scratch2, Label *miss_label)
static const int kHeaderSize
static Handle< T > null()
void GenerateStoreArrayLength()
void NonexistentHandlerFrontend(Handle< HeapType > type, Handle< JSObject > last, Handle< Name > name)
const int kHeapObjectTagSize
static const int kPrototypeOffset
static const int kFlagsNotUsedInLookup
bool Is(const CPURegister &other) const
Handle< Code > GetCode(Code::Kind kind, Code::StubType type, Handle< Name > name)
void GenerateStoreTransition(MacroAssembler *masm, Handle< JSObject > object, LookupResult *lookup, Handle< Map > transition, Handle< Name > name, Register receiver_reg, Register name_reg, Register value_reg, Register scratch1, Register scratch2, Register scratch3, Label *miss_label, Label *slow)
static const int kNativeContextOffset
MemOperand ContextMemOperand(Register context, int index)
Register HandlerFrontend(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name)
Handle< Code > CompileStorePolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_stubs, MapHandleList *transitioned_maps)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
int CountTrailingZeros(uint64_t value, int width)
static void GenerateLoadViaGetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > getter)
static const int kInterceptorArgsHolderIndex
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Handle< Code > GetICCode(Code::Kind kind, Code::StubType type, Handle< Name > name, InlineCacheState state=MONOMORPHIC)
static JSObject * cast(Object *obj)
MemOperand GlobalObjectMemOperand()
bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
static const int kInstanceTypeOffset
static Handle< Map > TypeToMap(HeapType *type, Isolate *isolate)
bool IncludesNumberType(TypeHandleList *types)
static JSFunction * cast(Object *obj)