30 #if V8_TARGET_ARCH_ARM
39 #define __ ACCESS_MASM(masm)
42 static void ProbeTable(Isolate* isolate,
52 Register offset_scratch) {
53 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
54 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
55 ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
57 uint32_t key_off_addr =
reinterpret_cast<uint32_t
>(key_offset.address());
58 uint32_t value_off_addr =
reinterpret_cast<uint32_t
>(value_offset.address());
59 uint32_t map_off_addr =
reinterpret_cast<uint32_t
>(map_offset.address());
62 ASSERT(value_off_addr > key_off_addr);
63 ASSERT((value_off_addr - key_off_addr) % 4 == 0);
64 ASSERT((value_off_addr - key_off_addr) < (256 * 4));
65 ASSERT(map_off_addr > key_off_addr);
66 ASSERT((map_off_addr - key_off_addr) % 4 == 0);
67 ASSERT((map_off_addr - key_off_addr) < (256 * 4));
70 Register base_addr = scratch;
74 __ add(offset_scratch, offset, Operand(offset,
LSL, 1));
77 __ mov(base_addr, Operand(key_offset));
92 Register
code = scratch2;
94 __ ldr(code,
MemOperand(base_addr, value_off_addr - key_off_addr));
97 Register flags_reg = base_addr;
103 ASSERT(
__ ImmediateFitsAddrMode1Instruction(mask));
104 __ bic(flags_reg, flags_reg, Operand(mask));
105 __ cmp(flags_reg, Operand(flags));
124 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
130 ASSERT(name->IsUniqueName());
131 ASSERT(!receiver.is(scratch0));
132 Counters* counters = masm->isolate()->counters();
133 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
134 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
138 const int kInterceptorOrAccessCheckNeededMask =
142 Register
map = scratch1;
145 __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
146 __ b(
ne, miss_label);
151 __ b(
lt, miss_label);
154 Register properties = scratch0;
158 Register tmp = properties;
159 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
161 __ b(
ne, miss_label);
175 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
187 Isolate* isolate = masm->isolate();
192 ASSERT(
sizeof(Entry) == 12);
198 ASSERT(!scratch.is(receiver));
199 ASSERT(!scratch.is(name));
200 ASSERT(!extra.is(receiver));
202 ASSERT(!extra.is(scratch));
203 ASSERT(!extra2.is(receiver));
205 ASSERT(!extra2.is(scratch));
206 ASSERT(!extra2.is(extra));
214 Counters* counters = masm->isolate()->counters();
215 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
219 __ JumpIfSmi(receiver, &miss);
224 __ add(scratch, scratch, Operand(
ip));
225 uint32_t mask = kPrimaryTableSize - 1;
233 __ and_(scratch, scratch, Operand(mask));
249 uint32_t mask2 = kSecondaryTableSize - 1;
251 __ and_(scratch, scratch, Operand(mask2));
268 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
273 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
275 Register prototype) {
292 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
293 MacroAssembler* masm,
297 Isolate* isolate = masm->isolate();
299 Handle<JSFunction>
function(
303 Register scratch = prototype;
308 __ Move(
ip,
function);
313 __ Move(prototype, Handle<Map>(function->initial_map()));
319 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
324 Representation representation) {
325 ASSERT(!representation.IsDouble());
337 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
342 __ JumpIfSmi(receiver, miss_label);
346 __ b(
ne, miss_label);
354 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
359 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
360 __ mov(
r0, scratch1);
368 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
369 Handle<JSGlobalObject> global,
374 ASSERT(cell->value()->IsTheHole());
375 __ mov(scratch, Operand(cell));
377 __ LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
384 MacroAssembler* masm,
385 Handle<JSObject> holder,
389 if (holder->IsJSGlobalObject()) {
390 GenerateCheckPropertyCell(
391 masm, Handle<JSGlobalObject>::cast(holder), name,
scratch1(), miss);
392 }
else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
393 GenerateDictionaryNegativeLookup(
404 Handle<JSObject>
object,
405 LookupResult* lookup,
406 Handle<Map> transition,
408 Register receiver_reg,
409 Register storage_reg,
419 int descriptor = transition->LastAdded();
420 DescriptorArray* descriptors = transition->instance_descriptors();
421 PropertyDetails details = descriptors->GetDetails(descriptor);
422 Representation representation = details.representation();
423 ASSERT(!representation.IsNone());
426 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
427 __ Move(scratch1, constant);
428 __ cmp(value_reg, scratch1);
429 __ b(
ne, miss_label);
430 }
else if (representation.IsSmi()) {
431 __ JumpIfNotSmi(value_reg, miss_label);
432 }
else if (representation.IsHeapObject()) {
433 __ JumpIfSmi(value_reg, miss_label);
434 }
else if (representation.IsDouble()) {
435 Label do_store, heap_number;
436 __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex);
437 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow);
439 __ JumpIfNotSmi(value_reg, &heap_number);
440 __ SmiUntag(scratch1, value_reg);
441 __ vmov(
s0, scratch1);
445 __ bind(&heap_number);
446 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
456 ASSERT(object->IsJSGlobalProxy() || !
object->IsAccessCheckNeeded());
459 if (details.type() ==
FIELD &&
460 object->map()->unused_property_fields() == 0) {
463 __ push(receiver_reg);
464 __ mov(
r2, Operand(transition));
466 __ TailCallExternalReference(
467 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
475 __ mov(scratch1, Operand(transition));
479 __ RecordWriteField(receiver_reg,
494 int index = transition->instance_descriptors()->GetFieldIndex(
495 transition->LastAdded());
500 index -=
object->map()->inobject_properties();
503 SmiCheck smi_check = representation.IsTagged()
507 int offset =
object->map()->instance_size() + (index *
kPointerSize);
508 if (representation.IsDouble()) {
514 if (!representation.IsSmi()) {
516 if (!representation.IsDouble()) {
517 __ mov(storage_reg, value_reg);
519 __ RecordWriteField(receiver_reg,
534 if (representation.IsDouble()) {
540 if (!representation.IsSmi()) {
542 if (!representation.IsDouble()) {
543 __ mov(storage_reg, value_reg);
545 __ RecordWriteField(scratch1,
568 Handle<JSObject>
object,
569 LookupResult* lookup,
570 Register receiver_reg,
581 ASSERT(object->IsJSGlobalProxy() || !
object->IsAccessCheckNeeded());
583 int index = lookup->GetFieldIndex().field_index();
588 index -=
object->map()->inobject_properties();
590 Representation representation = lookup->representation();
591 ASSERT(!representation.IsNone());
592 if (representation.IsSmi()) {
593 __ JumpIfNotSmi(value_reg, miss_label);
594 }
else if (representation.IsHeapObject()) {
595 __ JumpIfSmi(value_reg, miss_label);
596 }
else if (representation.IsDouble()) {
599 int offset =
object->map()->instance_size() + (index *
kPointerSize);
609 Label do_store, heap_number;
610 __ JumpIfNotSmi(value_reg, &heap_number);
611 __ SmiUntag(scratch2, value_reg);
612 __ vmov(
s0, scratch2);
616 __ bind(&heap_number);
617 __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
630 SmiCheck smi_check = representation.IsTagged()
634 int offset =
object->map()->instance_size() + (index *
kPointerSize);
637 if (!representation.IsSmi()) {
639 __ JumpIfSmi(value_reg, &exit);
643 __ mov(name_reg, value_reg);
644 __ RecordWriteField(receiver_reg,
661 if (!representation.IsSmi()) {
663 __ JumpIfSmi(value_reg, &exit);
667 __ mov(name_reg, value_reg);
668 __ RecordWriteField(scratch1,
689 if (!label->is_unused()) {
691 __ mov(this->
name(), Operand(name));
696 static void PushInterceptorArguments(MacroAssembler* masm,
700 Handle<JSObject> holder_obj) {
707 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
708 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
709 Register scratch =
name;
710 __ mov(scratch, Operand(interceptor));
717 static void CompileCallLoadPropertyWithInterceptor(
718 MacroAssembler* masm,
722 Handle<JSObject> holder_obj,
724 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
725 __ CallExternalReference(
726 ExternalReference(IC_Utility(
id), masm->isolate()),
732 void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
733 const CallOptimization& optimization,
734 Handle<Map> receiver_map,
740 ASSERT(!receiver.is(scratch_in));
743 for (
int i = 0; i < argc; i++) {
744 Register arg = values[argc-1-i];
745 ASSERT(!receiver.is(arg));
746 ASSERT(!scratch_in.is(arg));
749 ASSERT(optimization.is_simple_api_call());
752 Register callee =
r0;
753 Register call_data =
r4;
754 Register holder =
r2;
755 Register api_function_address =
r1;
758 CallOptimization::HolderLookup holder_lookup;
759 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
762 switch (holder_lookup) {
763 case CallOptimization::kHolderIsReceiver:
764 __ Move(holder, receiver);
766 case CallOptimization::kHolderFound:
767 __ Move(holder, api_holder);
769 case CallOptimization::kHolderNotFound:
774 Isolate* isolate = masm->isolate();
775 Handle<JSFunction>
function = optimization.constant_function();
776 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
777 Handle<Object> call_data_obj(api_call_info->data(), isolate);
780 __ Move(callee,
function);
782 bool call_data_undefined =
false;
784 if (isolate->heap()->InNewSpace(*call_data_obj)) {
785 __ Move(call_data, api_call_info);
787 }
else if (call_data_obj->IsUndefined()) {
788 call_data_undefined =
true;
789 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
791 __ Move(call_data, call_data_obj);
795 Address function_address = v8::ToCData<Address>(api_call_info->callback());
796 ApiFunction fun(function_address);
798 ExternalReference ref = ExternalReference(&fun,
801 __ mov(api_function_address, Operand(ref));
804 CallApiFunctionStub stub(is_store, call_data_undefined, argc);
805 __ TailCallStub(&stub);
809 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
810 __ Jump(code, RelocInfo::CODE_TARGET);
815 #define __ ACCESS_MASM(masm())
818 Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
820 Handle<JSObject> holder,
830 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
831 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
832 && !scratch2.is(scratch1));
835 Register reg = object_reg;
839 if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant());
841 Handle<Map> current_map = receiver_map;
842 Handle<Map> holder_map(holder->map());
845 while (!current_map.is_identical_to(holder_map)) {
850 ASSERT(current_map->IsJSGlobalProxyMap() ||
851 !current_map->is_access_check_needed());
854 if (current_map->is_dictionary_map() &&
855 !current_map->IsJSGlobalObjectMap() &&
856 !current_map->IsJSGlobalProxyMap()) {
857 if (!name->IsUniqueName()) {
861 ASSERT(current.is_null() ||
862 current->property_dictionary()->FindEntry(*name) ==
865 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
872 Register map_reg = scratch1;
883 if (current_map->IsJSGlobalProxyMap()) {
884 __ CheckAccessGlobalProxy(reg, scratch2, miss);
885 }
else if (current_map->IsJSGlobalObjectMap()) {
886 GenerateCheckPropertyCell(
887 masm(), Handle<JSGlobalObject>::cast(current), name,
893 if (heap()->InNewSpace(*prototype)) {
899 __ mov(reg, Operand(prototype));
905 current_map =
handle(current->map());
909 LOG(isolate(), IntEvent(
"check-maps-depth", depth + 1));
917 ASSERT(current_map->IsJSGlobalProxyMap() ||
918 !current_map->is_access_check_needed());
919 if (current_map->IsJSGlobalProxyMap()) {
920 __ CheckAccessGlobalProxy(reg, scratch1, miss);
929 if (!miss->is_unused()) {
940 if (!miss->is_unused()) {
951 Handle<HeapType> type,
953 Handle<JSObject> holder,
955 Handle<Object> callback) {
960 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
961 ASSERT(!reg.is(scratch2()));
962 ASSERT(!reg.is(scratch3()));
963 ASSERT(!reg.is(scratch4()));
966 Register dictionary = scratch4();
978 __ bind(&probe_done);
985 const int kValueOffset = kElementsStartOffset +
kPointerSize;
997 Handle<JSObject> holder,
999 Representation representation) {
1000 if (!reg.is(receiver()))
__ mov(
receiver(), reg);
1001 if (
kind() == Code::LOAD_IC) {
1002 LoadFieldStub stub(field.is_inobject(holder),
1003 field.translate(holder),
1005 GenerateTailCall(masm(), stub.GetCode(isolate()));
1007 KeyedLoadFieldStub stub(field.is_inobject(holder),
1008 field.translate(holder),
1010 GenerateTailCall(masm(), stub.GetCode(isolate()));
1024 Handle<ExecutableAccessorInfo> callback) {
1036 ASSERT(!scratch4().is(reg));
1038 if (heap()->InNewSpace(callback->data())) {
1043 __ Move(
scratch3(), Handle<Object>(callback->data(), isolate()));
1046 __ LoadRoot(
scratch3(), Heap::kUndefinedValueRootIndex);
1050 Operand(ExternalReference::isolate_address(isolate())));
1051 __ Push(scratch4(), reg);
1056 Register getter_address_reg =
r2;
1058 Address getter_address = v8::ToCData<Address>(callback->getter());
1059 ApiFunction fun(getter_address);
1061 ExternalReference ref = ExternalReference(&fun, type, isolate());
1062 __ mov(getter_address_reg, Operand(ref));
1064 CallApiGetterStub stub;
1065 __ TailCallStub(&stub);
1070 Register holder_reg,
1071 Handle<Object>
object,
1072 Handle<JSObject> interceptor_holder,
1073 LookupResult* lookup,
1074 Handle<Name> name) {
1075 ASSERT(interceptor_holder->HasNamedInterceptor());
1076 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1081 bool compile_followup_inline =
false;
1082 if (lookup->IsFound() && lookup->IsCacheable()) {
1083 if (lookup->IsField()) {
1084 compile_followup_inline =
true;
1085 }
else if (lookup->type() ==
CALLBACKS &&
1086 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1087 ExecutableAccessorInfo* callback =
1089 compile_followup_inline = callback->getter() !=
NULL &&
1090 callback->IsCompatibleReceiver(*
object);
1094 if (compile_followup_inline) {
1104 bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1105 bool must_preserve_receiver_reg = !
receiver().
is(holder_reg) &&
1106 (lookup->type() ==
CALLBACKS || must_perfrom_prototype_check);
1112 if (must_preserve_receiver_reg) {
1115 __ Push(holder_reg, this->
name());
1120 CompileCallLoadPropertyWithInterceptor(
1121 masm(),
receiver(), holder_reg, this->
name(), interceptor_holder,
1122 IC::kLoadPropertyWithInterceptorOnly);
1126 Label interceptor_failed;
1127 __ LoadRoot(
scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
1129 __ b(
eq, &interceptor_failed);
1130 frame_scope.GenerateLeaveFrame();
1133 __ bind(&interceptor_failed);
1136 if (must_preserve_receiver_reg) {
1146 PushInterceptorArguments(masm(),
receiver(), holder_reg,
1147 this->
name(), interceptor_holder);
1149 ExternalReference ref =
1150 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1157 void StubCompiler::GenerateBooleanCheck(Register
object, Label* miss) {
1160 __ LoadRoot(
ip, Heap::kTrueValueRootIndex);
1163 __ LoadRoot(
ip, Heap::kFalseValueRootIndex);
1171 Handle<JSObject>
object,
1172 Handle<JSObject> holder,
1174 Handle<ExecutableAccessorInfo> callback) {
1179 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1182 __ push(holder_reg);
1183 __ mov(
ip, Operand(callback));
1185 __ mov(
ip, Operand(name));
1186 __ Push(
ip, value());
1189 ExternalReference store_callback_property =
1190 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
1191 __ TailCallExternalReference(store_callback_property, 5, 1);
1199 #define __ ACCESS_MASM(masm)
1203 MacroAssembler* masm,
1204 Handle<HeapType> type,
1206 Handle<JSFunction> setter) {
1216 if (!setter.is_null()) {
1218 if (
IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1224 __ Push(receiver, value());
1225 ParameterCount actual(1);
1226 ParameterCount expected(setter);
1227 __ InvokeFunction(setter, expected, actual,
1232 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1246 #define __ ACCESS_MASM(masm())
1250 Handle<JSObject>
object,
1251 Handle<Name> name) {
1255 ExternalReference store_ic_property =
1256 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
1257 __ TailCallExternalReference(store_ic_property, 3, 1);
1265 Handle<JSObject> last,
1266 Handle<Name> name) {
1271 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
1279 Register* LoadStubCompiler::registers() {
1281 static Register registers[] = {
r0,
r2,
r3,
r1,
r4,
r5 };
1286 Register* KeyedLoadStubCompiler::registers() {
1288 static Register registers[] = {
r1,
r0,
r2,
r3,
r4,
r5 };
1293 Register StoreStubCompiler::value() {
1298 Register* StoreStubCompiler::registers() {
1300 static Register registers[] = {
r1,
r2,
r3,
r4,
r5 };
1305 Register* KeyedStoreStubCompiler::registers() {
1307 static Register registers[] = {
r2,
r1,
r3,
r4,
r5 };
1313 #define __ ACCESS_MASM(masm)
1317 Handle<HeapType> type,
1319 Handle<JSFunction> getter) {
1328 if (!getter.is_null()) {
1330 if (
IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1337 ParameterCount actual(0);
1338 ParameterCount expected(getter);
1339 __ InvokeFunction(getter, expected, actual,
1344 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
1355 #define __ ACCESS_MASM(masm())
1359 Handle<HeapType> type,
1360 Handle<GlobalObject> global,
1361 Handle<PropertyCell> cell,
1363 bool is_dont_delete) {
1368 __ mov(
r3, Operand(cell));
1372 if (!is_dont_delete) {
1373 __ LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
1378 Counters* counters = isolate()->counters();
1379 __ IncrementCounter(counters->named_load_global_stub(), 1,
r1,
r3);
1399 (
kind() == Code::KEYED_LOAD_IC ||
kind() == Code::KEYED_STORE_IC)) {
1400 __ cmp(this->
name(), Operand(name));
1410 int receiver_count = types->length();
1411 int number_of_handled_maps = 0;
1413 for (
int current = 0; current < receiver_count; ++current) {
1414 Handle<HeapType> type = types->at(current);
1416 if (!map->is_deprecated()) {
1417 number_of_handled_maps++;
1418 __ mov(
ip, Operand(map));
1419 __ cmp(map_reg,
ip);
1420 if (type->Is(HeapType::Number())) {
1421 ASSERT(!number_case.is_unused());
1422 __ bind(&number_case);
1424 __ Jump(handlers->at(current), RelocInfo::CODE_TARGET,
eq);
1427 ASSERT(number_of_handled_maps != 0);
1443 ExternalReference ref =
1444 ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength),
1446 __ TailCallExternalReference(ref, 2, 1);
1457 int receiver_count = receiver_maps->length();
1459 for (
int i = 0; i < receiver_count; ++i) {
1460 __ mov(
ip, Operand(receiver_maps->at(i)));
1462 if (transitioned_maps->at(i).is_null()) {
1463 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET,
eq);
1466 __ b(
ne, &next_map);
1467 __ mov(transition_map(), Operand(transitioned_maps->at(i)));
1468 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET,
al);
1483 #define __ ACCESS_MASM(masm)
1487 MacroAssembler* masm) {
1496 Register receiver =
r1;
1498 __ UntagAndJumpIfNotSmi(
r2, key, &miss);
1500 __ LoadFromNumberDictionary(&slow,
r4, key,
r0,
r2,
r3,
r5);
1504 __ IncrementCounter(
1505 masm->isolate()->counters()->keyed_load_external_array_slow(),
1513 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1523 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1531 #endif // V8_TARGET_ARCH_ARM
static const int kThisIndex
virtual void HandlerFrontendFooter(Handle< Name > name, Label *miss)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
void GenerateRestoreName(MacroAssembler *masm, Label *label, Handle< Name > name)
static const int kHashFieldOffset
static const int kBitFieldOffset
static const int kArgsLength
static const int kNotFound
static const int kPrototypeOrInitialMapOffset
static const int kValueOffset
static int SlotOffset(int index)
static const int kElementsStartIndex
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
static const int kIsolateIndex
static ExecutableAccessorInfo * cast(Object *obj)
void GenerateProbe(MacroAssembler *masm, Code::Flags flags, Register receiver, Register name, Register scratch, Register extra, Register extra2=no_reg, Register extra3=no_reg)
const LowDwVfpRegister d0
static const int kReturnValueOffset
static const int kFlagsOffset
#define LOG(isolate, Call)
Handle< Code > CompileStoreInterceptor(Handle< JSObject > object, Handle< Name > name)
static Handle< String > cast(Handle< S > that)
static const int kGlobalReceiverOffset
static const int kDataOffset
void GenerateLoadField(Register reg, Handle< JSObject > holder, PropertyIndex field, Representation representation)
static StubType ExtractTypeFromFlags(Flags flags)
TypeImpl< ZoneTypeConfig > Type
static const int kInterceptorArgsLength
static const int kDataIndex
static const int kHolderIndex
static const int kInterceptorArgsNameIndex
static const int kHasNamedInterceptor
static const int kIsAccessCheckNeeded
List< Handle< Map > > MapHandleList
#define ASSERT(condition)
static const int kContextOffset
const int kPointerSizeLog2
Handle< Code > CompileStoreCallback(Handle< JSObject > object, Handle< JSObject > holder, Handle< Name > name, Handle< ExecutableAccessorInfo > callback)
virtual Register HandlerFrontendHeader(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name, Label *miss)
virtual void HandlerFrontendFooter(Handle< Name > name, Label *miss)
void GenerateLoadPostInterceptor(Register reg, Handle< JSObject > interceptor_holder, Handle< Name > name, LookupResult *lookup)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kValueOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
static void GenerateStoreViaSetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > setter)
static Handle< HeapType > CurrentTypeOf(Handle< Object > object, Isolate *isolate)
void check(i::Vector< const uint8_t > string)
Handle< Code > CompilePolymorphicIC(TypeHandleList *types, CodeHandleList *handlers, Handle< Name > name, Code::StubType type, IcCheckType check)
void GenerateLoadConstant(Handle< Object > value)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kInterceptorArgsThisIndex
static Handle< PropertyCell > EnsurePropertyCell(Handle< JSGlobalObject > global, Handle< Name > name)
List< Handle< HeapType > > TypeHandleList
static const int kPropertiesOffset
static const int kReturnValueDefaultValueIndex
void GenerateNegativeHolderLookup(MacroAssembler *masm, Handle< JSObject > holder, Register holder_reg, Handle< Name > name, Label *miss)
static const int kElementsOffset
Handle< Code > CompileLoadNonexistent(Handle< HeapType > type, Handle< JSObject > last, Handle< Name > name)
Handle< Code > CompileLoadGlobal(Handle< HeapType > type, Handle< GlobalObject > holder, Handle< PropertyCell > cell, Handle< Name > name, bool is_dont_delete)
static void GenerateLoadDictionaryElement(MacroAssembler *masm)
void GenerateLoadInterceptor(Register holder_reg, Handle< Object > object, Handle< JSObject > holder, LookupResult *lookup, Handle< Name > name)
static const int kLengthOffset
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
static const int kInterceptorArgsInfoIndex
static const int kHeaderSize
void GenerateLoadCallback(Register reg, Handle< ExecutableAccessorInfo > callback)
static Builtins::Name MissBuiltin(Code::Kind kind)
static const int kMapOffset
bool is(Register reg) const
List< Handle< Code > > CodeHandleList
Register CallbackHandlerFrontend(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name, Handle< Object > callback)
Handle< T > handle(T *t, Isolate *isolate)
MemOperand FieldMemOperand(Register object, int offset)
static const int kDataOffset
void GenerateStoreField(MacroAssembler *masm, Handle< JSObject > object, LookupResult *lookup, Register receiver_reg, Register name_reg, Register value_reg, Register scratch1, Register scratch2, Label *miss_label)
static const int kHeaderSize
static Handle< T > null()
void GenerateStoreArrayLength()
void NonexistentHandlerFrontend(Handle< HeapType > type, Handle< JSObject > last, Handle< Name > name)
const int kHeapObjectTagSize
static const int kPrototypeOffset
static const int kFlagsNotUsedInLookup
Handle< Code > GetCode(Code::Kind kind, Code::StubType type, Handle< Name > name)
void GenerateStoreTransition(MacroAssembler *masm, Handle< JSObject > object, LookupResult *lookup, Handle< Map > transition, Handle< Name > name, Register receiver_reg, Register name_reg, Register value_reg, Register scratch1, Register scratch2, Register scratch3, Label *miss_label, Label *slow)
static const int kNativeContextOffset
Register HandlerFrontend(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name)
Handle< Code > CompileStorePolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_stubs, MapHandleList *transitioned_maps)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static void GenerateLoadViaGetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > getter)
static const int kInterceptorArgsHolderIndex
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Handle< Code > GetICCode(Code::Kind kind, Code::StubType type, Handle< Name > name, InlineCacheState state=MONOMORPHIC)
static JSObject * cast(Object *obj)
static const int kInstanceTypeOffset
static Handle< Map > TypeToMap(HeapType *type, Isolate *isolate)
bool IncludesNumberType(TypeHandleList *types)
static JSFunction * cast(Object *obj)