30 #if V8_TARGET_ARCH_MIPS
39 #define __ ACCESS_MASM(masm)
42 static void ProbeTable(Isolate* isolate,
52 Register offset_scratch) {
53 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
54 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
55 ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
57 uint32_t key_off_addr =
reinterpret_cast<uint32_t
>(key_offset.address());
58 uint32_t value_off_addr =
reinterpret_cast<uint32_t
>(value_offset.address());
59 uint32_t map_off_addr =
reinterpret_cast<uint32_t
>(map_offset.address());
62 ASSERT(value_off_addr > key_off_addr);
63 ASSERT((value_off_addr - key_off_addr) % 4 == 0);
64 ASSERT((value_off_addr - key_off_addr) < (256 * 4));
65 ASSERT(map_off_addr > key_off_addr);
66 ASSERT((map_off_addr - key_off_addr) % 4 == 0);
67 ASSERT((map_off_addr - key_off_addr) < (256 * 4));
70 Register base_addr = scratch;
74 __ sll(offset_scratch, offset, 1);
75 __ Addu(offset_scratch, offset_scratch, offset);
78 __ li(base_addr, Operand(key_offset));
80 __ Addu(base_addr, base_addr, at);
84 __ Branch(&miss,
ne, name, Operand(at));
87 __ lw(at,
MemOperand(base_addr, map_off_addr - key_off_addr));
89 __ Branch(&miss,
ne, at, Operand(scratch2));
92 Register
code = scratch2;
94 __ lw(code,
MemOperand(base_addr, value_off_addr - key_off_addr));
97 Register flags_reg = base_addr;
101 __ Branch(&miss,
ne, flags_reg, Operand(flags));
120 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
126 ASSERT(name->IsUniqueName());
127 ASSERT(!receiver.is(scratch0));
128 Counters* counters = masm->isolate()->counters();
129 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
130 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
134 const int kInterceptorOrAccessCheckNeededMask =
138 Register
map = scratch1;
141 __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
142 __ Branch(miss_label,
ne, scratch0, Operand(zero_reg));
149 Register properties = scratch0;
153 Register tmp = properties;
154 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
155 __ Branch(miss_label,
ne, map, Operand(tmp));
169 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
181 Isolate* isolate = masm->isolate();
186 ASSERT(
sizeof(Entry) == 12);
192 ASSERT(!scratch.is(receiver));
193 ASSERT(!scratch.is(name));
194 ASSERT(!extra.is(receiver));
196 ASSERT(!extra.is(scratch));
197 ASSERT(!extra2.is(receiver));
199 ASSERT(!extra2.is(scratch));
200 ASSERT(!extra2.is(extra));
208 Counters* counters = masm->isolate()->counters();
209 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
213 __ JumpIfSmi(receiver, &miss);
218 __ Addu(scratch, scratch, at);
219 uint32_t mask = kPrimaryTableSize - 1;
224 __ And(scratch, scratch, Operand(mask));
240 __ Subu(scratch, scratch, at);
241 uint32_t mask2 = kSecondaryTableSize - 1;
243 __ And(scratch, scratch, Operand(mask2));
260 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
265 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
267 Register prototype) {
284 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
285 MacroAssembler* masm,
289 Isolate* isolate = masm->isolate();
291 Handle<JSFunction>
function(
295 Register scratch = prototype;
301 __ Branch(miss,
ne, at, Operand(scratch));
304 __ li(prototype, Handle<Map>(function->initial_map()));
310 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
315 Representation representation) {
316 ASSERT(!representation.IsDouble());
328 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
333 __ JumpIfSmi(receiver, miss_label);
336 __ GetObjectType(receiver, scratch, scratch);
345 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
350 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
352 __ mov(v0, scratch1);
356 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
357 Handle<JSGlobalObject> global,
362 ASSERT(cell->value()->IsTheHole());
363 __ li(scratch, Operand(cell));
365 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
366 __ Branch(miss,
ne, scratch, Operand(at));
371 MacroAssembler* masm,
372 Handle<JSObject> holder,
376 if (holder->IsJSGlobalObject()) {
377 GenerateCheckPropertyCell(
378 masm, Handle<JSGlobalObject>::cast(holder), name,
scratch1(), miss);
379 }
else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
380 GenerateDictionaryNegativeLookup(
390 Handle<JSObject>
object,
391 LookupResult* lookup,
392 Handle<Map> transition,
394 Register receiver_reg,
395 Register storage_reg,
405 int descriptor = transition->LastAdded();
406 DescriptorArray* descriptors = transition->instance_descriptors();
407 PropertyDetails details = descriptors->GetDetails(descriptor);
408 Representation representation = details.representation();
409 ASSERT(!representation.IsNone());
412 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
413 __ li(scratch1, constant);
414 __ Branch(miss_label,
ne, value_reg, Operand(scratch1));
415 }
else if (representation.IsSmi()) {
416 __ JumpIfNotSmi(value_reg, miss_label);
417 }
else if (representation.IsHeapObject()) {
418 __ JumpIfSmi(value_reg, miss_label);
419 }
else if (representation.IsDouble()) {
420 Label do_store, heap_number;
421 __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex);
422 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow);
424 __ JumpIfNotSmi(value_reg, &heap_number);
425 __ SmiUntag(scratch1, value_reg);
426 __ mtc1(scratch1,
f6);
430 __ bind(&heap_number);
431 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
441 ASSERT(object->IsJSGlobalProxy() || !
object->IsAccessCheckNeeded());
444 if (details.type() ==
FIELD &&
445 object->map()->unused_property_fields() == 0) {
448 __ push(receiver_reg);
449 __ li(a2, Operand(transition));
451 __ TailCallExternalReference(
452 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
459 __ li(scratch1, Operand(transition));
463 __ RecordWriteField(receiver_reg,
479 int index = transition->instance_descriptors()->GetFieldIndex(
480 transition->LastAdded());
485 index -=
object->map()->inobject_properties();
488 SmiCheck smi_check = representation.IsTagged()
492 int offset =
object->map()->instance_size() + (index *
kPointerSize);
493 if (representation.IsDouble()) {
499 if (!representation.IsSmi()) {
501 if (!representation.IsDouble()) {
502 __ mov(storage_reg, value_reg);
504 __ RecordWriteField(receiver_reg,
519 if (representation.IsDouble()) {
525 if (!representation.IsSmi()) {
527 if (!representation.IsDouble()) {
528 __ mov(storage_reg, value_reg);
530 __ RecordWriteField(scratch1,
554 Handle<JSObject>
object,
555 LookupResult* lookup,
556 Register receiver_reg,
567 ASSERT(object->IsJSGlobalProxy() || !
object->IsAccessCheckNeeded());
569 int index = lookup->GetFieldIndex().field_index();
574 index -=
object->map()->inobject_properties();
576 Representation representation = lookup->representation();
577 ASSERT(!representation.IsNone());
578 if (representation.IsSmi()) {
579 __ JumpIfNotSmi(value_reg, miss_label);
580 }
else if (representation.IsHeapObject()) {
581 __ JumpIfSmi(value_reg, miss_label);
582 }
else if (representation.IsDouble()) {
585 int offset =
object->map()->instance_size() + (index *
kPointerSize);
595 Label do_store, heap_number;
596 __ JumpIfNotSmi(value_reg, &heap_number);
597 __ SmiUntag(scratch2, value_reg);
598 __ mtc1(scratch2,
f6);
602 __ bind(&heap_number);
603 __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
617 SmiCheck smi_check = representation.IsTagged()
621 int offset =
object->map()->instance_size() + (index *
kPointerSize);
624 if (!representation.IsSmi()) {
626 __ JumpIfSmi(value_reg, &exit);
630 __ mov(name_reg, value_reg);
631 __ RecordWriteField(receiver_reg,
648 if (!representation.IsSmi()) {
650 __ JumpIfSmi(value_reg, &exit);
654 __ mov(name_reg, value_reg);
655 __ RecordWriteField(scratch1,
677 if (!label->is_unused()) {
679 __ li(this->
name(), Operand(name));
684 static void PushInterceptorArguments(MacroAssembler* masm,
688 Handle<JSObject> holder_obj) {
695 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
696 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
697 Register scratch =
name;
698 __ li(scratch, Operand(interceptor));
699 __ Push(scratch, receiver, holder);
703 static void CompileCallLoadPropertyWithInterceptor(
704 MacroAssembler* masm,
708 Handle<JSObject> holder_obj,
710 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
711 __ CallExternalReference(
712 ExternalReference(IC_Utility(
id), masm->isolate()),
718 void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
719 const CallOptimization& optimization,
720 Handle<Map> receiver_map,
726 ASSERT(!receiver.is(scratch_in));
728 __ Subu(
sp,
sp, Operand((argc + 1) * kPointerSize));
731 for (
int i = 0; i < argc; i++) {
732 Register arg = values[argc-1-i];
733 ASSERT(!receiver.is(arg));
734 ASSERT(!scratch_in.is(arg));
737 ASSERT(optimization.is_simple_api_call());
740 Register callee = a0;
741 Register call_data = t0;
742 Register holder = a2;
743 Register api_function_address = a1;
746 CallOptimization::HolderLookup holder_lookup;
747 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
750 switch (holder_lookup) {
751 case CallOptimization::kHolderIsReceiver:
752 __ Move(holder, receiver);
754 case CallOptimization::kHolderFound:
755 __ li(holder, api_holder);
757 case CallOptimization::kHolderNotFound:
762 Isolate* isolate = masm->isolate();
763 Handle<JSFunction>
function = optimization.constant_function();
764 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
765 Handle<Object> call_data_obj(api_call_info->data(), isolate);
768 __ li(callee,
function);
770 bool call_data_undefined =
false;
772 if (isolate->heap()->InNewSpace(*call_data_obj)) {
773 __ li(call_data, api_call_info);
775 }
else if (call_data_obj->IsUndefined()) {
776 call_data_undefined =
true;
777 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
779 __ li(call_data, call_data_obj);
782 Address function_address = v8::ToCData<Address>(api_call_info->callback());
783 ApiFunction fun(function_address);
785 ExternalReference ref =
786 ExternalReference(&fun,
789 __ li(api_function_address, Operand(ref));
792 CallApiFunctionStub stub(is_store, call_data_undefined, argc);
793 __ TailCallStub(&stub);
797 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
798 __ Jump(code, RelocInfo::CODE_TARGET);
803 #define __ ACCESS_MASM(masm())
806 Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
808 Handle<JSObject> holder,
818 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
819 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
820 && !scratch2.is(scratch1));
823 Register reg = object_reg;
827 if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant());
829 Handle<Map> current_map = receiver_map;
830 Handle<Map> holder_map(holder->map());
833 while (!current_map.is_identical_to(holder_map)) {
838 ASSERT(current_map->IsJSGlobalProxyMap() ||
839 !current_map->is_access_check_needed());
842 if (current_map->is_dictionary_map() &&
843 !current_map->IsJSGlobalObjectMap() &&
844 !current_map->IsJSGlobalProxyMap()) {
845 if (!name->IsUniqueName()) {
849 ASSERT(current.is_null() ||
850 current->property_dictionary()->FindEntry(*name) ==
853 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
860 Register map_reg = scratch1;
871 if (current_map->IsJSGlobalProxyMap()) {
872 __ CheckAccessGlobalProxy(reg, scratch2, miss);
873 }
else if (current_map->IsJSGlobalObjectMap()) {
874 GenerateCheckPropertyCell(
875 masm(), Handle<JSGlobalObject>::cast(current), name,
881 if (heap()->InNewSpace(*prototype)) {
887 __ li(reg, Operand(prototype));
893 current_map =
handle(current->map());
897 LOG(isolate(), IntEvent(
"check-maps-depth", depth + 1));
905 ASSERT(current_map->IsJSGlobalProxyMap() ||
906 !current_map->is_access_check_needed());
907 if (current_map->IsJSGlobalProxyMap()) {
908 __ CheckAccessGlobalProxy(reg, scratch1, miss);
917 if (!miss->is_unused()) {
928 if (!miss->is_unused()) {
939 Handle<HeapType> type,
941 Handle<JSObject> holder,
943 Handle<Object> callback) {
948 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
949 ASSERT(!reg.is(scratch2()));
950 ASSERT(!reg.is(scratch3()));
951 ASSERT(!reg.is(scratch4()));
954 Register dictionary = scratch4();
966 __ bind(&probe_done);
973 const int kValueOffset = kElementsStartOffset +
kPointerSize;
984 Handle<JSObject> holder,
986 Representation representation) {
987 if (!reg.is(receiver()))
__ mov(
receiver(), reg);
988 if (
kind() == Code::LOAD_IC) {
989 LoadFieldStub stub(field.is_inobject(holder),
990 field.translate(holder),
992 GenerateTailCall(masm(), stub.GetCode(isolate()));
994 KeyedLoadFieldStub stub(field.is_inobject(holder),
995 field.translate(holder),
997 GenerateTailCall(masm(), stub.GetCode(isolate()));
1011 Handle<ExecutableAccessorInfo> callback) {
1023 ASSERT(!scratch4().is(reg));
1025 if (heap()->InNewSpace(callback->data())) {
1030 __ li(
scratch3(), Handle<Object>(callback->data(), isolate()));
1032 __ Subu(
sp,
sp, 6 * kPointerSize);
1034 __ LoadRoot(
scratch3(), Heap::kUndefinedValueRootIndex);
1038 Operand(ExternalReference::isolate_address(isolate())));
1046 Register getter_address_reg = a2;
1048 Address getter_address = v8::ToCData<Address>(callback->getter());
1049 ApiFunction fun(getter_address);
1051 ExternalReference ref = ExternalReference(&fun, type, isolate());
1052 __ li(getter_address_reg, Operand(ref));
1054 CallApiGetterStub stub;
1055 __ TailCallStub(&stub);
1060 Register holder_reg,
1061 Handle<Object>
object,
1062 Handle<JSObject> interceptor_holder,
1063 LookupResult* lookup,
1064 Handle<Name> name) {
1065 ASSERT(interceptor_holder->HasNamedInterceptor());
1066 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1071 bool compile_followup_inline =
false;
1072 if (lookup->IsFound() && lookup->IsCacheable()) {
1073 if (lookup->IsField()) {
1074 compile_followup_inline =
true;
1075 }
else if (lookup->type() ==
CALLBACKS &&
1076 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1077 ExecutableAccessorInfo* callback =
1079 compile_followup_inline = callback->getter() !=
NULL &&
1080 callback->IsCompatibleReceiver(*
object);
1084 if (compile_followup_inline) {
1094 bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1095 bool must_preserve_receiver_reg = !
receiver().
is(holder_reg) &&
1096 (lookup->type() ==
CALLBACKS || must_perfrom_prototype_check);
1102 if (must_preserve_receiver_reg) {
1105 __ Push(holder_reg, this->
name());
1110 CompileCallLoadPropertyWithInterceptor(
1111 masm(),
receiver(), holder_reg, this->
name(), interceptor_holder,
1112 IC::kLoadPropertyWithInterceptorOnly);
1116 Label interceptor_failed;
1117 __ LoadRoot(
scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
1118 __ Branch(&interceptor_failed,
eq, v0, Operand(
scratch1()));
1119 frame_scope.GenerateLeaveFrame();
1122 __ bind(&interceptor_failed);
1125 if (must_preserve_receiver_reg) {
1134 PushInterceptorArguments(masm(),
receiver(), holder_reg,
1135 this->
name(), interceptor_holder);
1137 ExternalReference ref = ExternalReference(
1138 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
1144 void StubCompiler::GenerateBooleanCheck(Register
object, Label* miss) {
1147 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1148 __ Branch(&success,
eq,
object, Operand(at));
1149 __ LoadRoot(at, Heap::kFalseValueRootIndex);
1150 __ Branch(miss,
ne,
object, Operand(at));
1156 Handle<JSObject>
object,
1157 Handle<JSObject> holder,
1159 Handle<ExecutableAccessorInfo> callback) {
1165 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1168 __ li(at, Operand(callback));
1170 __ li(at, Operand(name));
1171 __ Push(at, value());
1174 ExternalReference store_callback_property =
1175 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
1176 __ TailCallExternalReference(store_callback_property, 5, 1);
1184 #define __ ACCESS_MASM(masm)
1188 MacroAssembler* masm,
1189 Handle<HeapType> type,
1191 Handle<JSFunction> setter) {
1201 if (!setter.is_null()) {
1203 if (
IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1209 __ Push(receiver, value());
1210 ParameterCount actual(1);
1211 ParameterCount expected(setter);
1212 __ InvokeFunction(setter, expected, actual,
1217 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1231 #define __ ACCESS_MASM(masm())
1235 Handle<JSObject>
object,
1236 Handle<Name> name) {
1240 ExternalReference store_ic_property =
1241 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
1242 __ TailCallExternalReference(store_ic_property, 3, 1);
1250 Handle<JSObject> last,
1251 Handle<Name> name) {
1255 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
1263 Register* LoadStubCompiler::registers() {
1265 static Register registers[] = { a0, a2, a3, a1, t0, t1 };
1270 Register* KeyedLoadStubCompiler::registers() {
1272 static Register registers[] = { a1, a0, a2, a3, t0, t1 };
1277 Register StoreStubCompiler::value() {
1282 Register* StoreStubCompiler::registers() {
1284 static Register registers[] = { a1, a2, a3, t0, t1 };
1289 Register* KeyedStoreStubCompiler::registers() {
1291 static Register registers[] = { a2, a1, a3, t0, t1 };
1297 #define __ ACCESS_MASM(masm)
1301 Handle<HeapType> type,
1303 Handle<JSFunction> getter) {
1312 if (!getter.is_null()) {
1314 if (
IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1321 ParameterCount actual(0);
1322 ParameterCount expected(getter);
1323 __ InvokeFunction(getter, expected, actual,
1328 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
1339 #define __ ACCESS_MASM(masm())
1343 Handle<HeapType> type,
1344 Handle<GlobalObject> global,
1345 Handle<PropertyCell> cell,
1347 bool is_dont_delete) {
1353 __ li(a3, Operand(cell));
1357 if (!is_dont_delete) {
1358 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1359 __ Branch(&miss,
eq, t0, Operand(at));
1362 Counters* counters = isolate()->counters();
1363 __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
1383 (
kind() == Code::KEYED_LOAD_IC ||
kind() == Code::KEYED_STORE_IC)) {
1384 __ Branch(&miss,
ne, this->
name(), Operand(name));
1394 int receiver_count = types->length();
1395 int number_of_handled_maps = 0;
1397 for (
int current = 0; current < receiver_count; ++current) {
1398 Handle<HeapType> type = types->at(current);
1400 if (!map->is_deprecated()) {
1401 number_of_handled_maps++;
1404 __ Subu(match, map_reg, Operand(map));
1405 if (type->Is(HeapType::Number())) {
1406 ASSERT(!number_case.is_unused());
1407 __ bind(&number_case);
1409 __ Jump(handlers->at(current), RelocInfo::CODE_TARGET,
1410 eq, match, Operand(zero_reg));
1413 ASSERT(number_of_handled_maps != 0);
1429 ExternalReference ref =
1430 ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength),
1432 __ TailCallExternalReference(ref, 2, 1);
1443 int receiver_count = receiver_maps->length();
1445 for (
int i = 0; i < receiver_count; ++i) {
1446 if (transitioned_maps->at(i).is_null()) {
1447 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET,
eq,
1448 scratch1(), Operand(receiver_maps->at(i)));
1451 __ Branch(&next_map,
ne,
scratch1(), Operand(receiver_maps->at(i)));
1452 __ li(transition_map(), Operand(transitioned_maps->at(i)));
1453 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET);
1468 #define __ ACCESS_MASM(masm)
1472 MacroAssembler* masm) {
1481 Register receiver = a1;
1483 __ JumpIfNotSmi(key, &miss);
1486 __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1);
1491 __ IncrementCounter(
1492 masm->isolate()->counters()->keyed_load_external_array_slow(),
1500 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1510 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1518 #endif // V8_TARGET_ARCH_MIPS
static const int kThisIndex
virtual void HandlerFrontendFooter(Handle< Name > name, Label *miss)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
void GenerateRestoreName(MacroAssembler *masm, Label *label, Handle< Name > name)
static const int kHashFieldOffset
static const int kBitFieldOffset
static const int kArgsLength
static const int kNotFound
static const int kPrototypeOrInitialMapOffset
static const int kValueOffset
static int SlotOffset(int index)
static const int kElementsStartIndex
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
static const int kIsolateIndex
static ExecutableAccessorInfo * cast(Object *obj)
void GenerateProbe(MacroAssembler *masm, Code::Flags flags, Register receiver, Register name, Register scratch, Register extra, Register extra2=no_reg, Register extra3=no_reg)
static const int kReturnValueOffset
static const int kFlagsOffset
#define LOG(isolate, Call)
Handle< Code > CompileStoreInterceptor(Handle< JSObject > object, Handle< Name > name)
static Handle< String > cast(Handle< S > that)
static const int kGlobalReceiverOffset
static const int kDataOffset
void GenerateLoadField(Register reg, Handle< JSObject > holder, PropertyIndex field, Representation representation)
static StubType ExtractTypeFromFlags(Flags flags)
TypeImpl< ZoneTypeConfig > Type
static const int kInterceptorArgsLength
static const int kDataIndex
static const int kHolderIndex
static const int kInterceptorArgsNameIndex
static const int kHasNamedInterceptor
static const int kIsAccessCheckNeeded
List< Handle< Map > > MapHandleList
#define ASSERT(condition)
static const int kContextOffset
const int kPointerSizeLog2
Handle< Code > CompileStoreCallback(Handle< JSObject > object, Handle< JSObject > holder, Handle< Name > name, Handle< ExecutableAccessorInfo > callback)
virtual Register HandlerFrontendHeader(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name, Label *miss)
virtual void HandlerFrontendFooter(Handle< Name > name, Label *miss)
void GenerateLoadPostInterceptor(Register reg, Handle< JSObject > interceptor_holder, Handle< Name > name, LookupResult *lookup)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kValueOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
static void GenerateStoreViaSetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > setter)
static Handle< HeapType > CurrentTypeOf(Handle< Object > object, Isolate *isolate)
void check(i::Vector< const uint8_t > string)
Handle< Code > CompilePolymorphicIC(TypeHandleList *types, CodeHandleList *handlers, Handle< Name > name, Code::StubType type, IcCheckType check)
void GenerateLoadConstant(Handle< Object > value)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kInterceptorArgsThisIndex
static Handle< PropertyCell > EnsurePropertyCell(Handle< JSGlobalObject > global, Handle< Name > name)
List< Handle< HeapType > > TypeHandleList
static const int kPropertiesOffset
static const int kReturnValueDefaultValueIndex
void GenerateNegativeHolderLookup(MacroAssembler *masm, Handle< JSObject > holder, Register holder_reg, Handle< Name > name, Label *miss)
static const int kElementsOffset
Handle< Code > CompileLoadNonexistent(Handle< HeapType > type, Handle< JSObject > last, Handle< Name > name)
Handle< Code > CompileLoadGlobal(Handle< HeapType > type, Handle< GlobalObject > holder, Handle< PropertyCell > cell, Handle< Name > name, bool is_dont_delete)
static void GenerateLoadDictionaryElement(MacroAssembler *masm)
void GenerateLoadInterceptor(Register holder_reg, Handle< Object > object, Handle< JSObject > holder, LookupResult *lookup, Handle< Name > name)
static const int kLengthOffset
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
static const int kInterceptorArgsInfoIndex
static const int kHeaderSize
void GenerateLoadCallback(Register reg, Handle< ExecutableAccessorInfo > callback)
static Builtins::Name MissBuiltin(Code::Kind kind)
static const int kMapOffset
bool is(Register reg) const
List< Handle< Code > > CodeHandleList
Register CallbackHandlerFrontend(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name, Handle< Object > callback)
Handle< T > handle(T *t, Isolate *isolate)
MemOperand FieldMemOperand(Register object, int offset)
static const int kDataOffset
void GenerateStoreField(MacroAssembler *masm, Handle< JSObject > object, LookupResult *lookup, Register receiver_reg, Register name_reg, Register value_reg, Register scratch1, Register scratch2, Label *miss_label)
static const int kHeaderSize
static Handle< T > null()
void GenerateStoreArrayLength()
void NonexistentHandlerFrontend(Handle< HeapType > type, Handle< JSObject > last, Handle< Name > name)
const int kHeapObjectTagSize
static const int kPrototypeOffset
static const int kFlagsNotUsedInLookup
Handle< Code > GetCode(Code::Kind kind, Code::StubType type, Handle< Name > name)
void GenerateStoreTransition(MacroAssembler *masm, Handle< JSObject > object, LookupResult *lookup, Handle< Map > transition, Handle< Name > name, Register receiver_reg, Register name_reg, Register value_reg, Register scratch1, Register scratch2, Register scratch3, Label *miss_label, Label *slow)
static const int kNativeContextOffset
Register HandlerFrontend(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name)
Handle< Code > CompileStorePolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_stubs, MapHandleList *transitioned_maps)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static void GenerateLoadViaGetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > getter)
static const int kInterceptorArgsHolderIndex
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Handle< Code > GetICCode(Code::Kind kind, Code::StubType type, Handle< Name > name, InlineCacheState state=MONOMORPHIC)
static JSObject * cast(Object *obj)
static const int kInstanceTypeOffset
static Handle< Map > TypeToMap(HeapType *type, Isolate *isolate)
bool IncludesNumberType(TypeHandleList *types)
static JSFunction * cast(Object *obj)