32 #if defined(V8_TARGET_ARCH_MIPS)
48 #define __ ACCESS_MASM(masm)
51 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
53 Label* global_object) {
64 static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
79 __ JumpIfSmi(receiver, miss);
82 __ GetObjectType(receiver, scratch0, scratch1);
88 GenerateGlobalInstanceTypeCheck(masm, scratch1, miss);
94 __ Branch(miss,
ne, scratch1, Operand(zero_reg));
98 __ LoadRoot(scratch0, Heap::kHashTableMapRootIndex);
99 __ Branch(miss,
ne, scratch1, Operand(scratch0));
118 static void GenerateDictionaryLoad(MacroAssembler* masm,
145 const int kDetailsOffset = kElementsStartOffset + 2 *
kPointerSize;
149 Operand(PropertyDetails::TypeField::kMask <<
kSmiTagSize));
150 __ Branch(miss,
ne, at, Operand(zero_reg));
171 static void GenerateDictionaryStore(MacroAssembler* masm,
198 const int kDetailsOffset = kElementsStartOffset + 2 *
kPointerSize;
199 const int kTypeAndReadOnlyMask =
200 (PropertyDetails::TypeField::kMask |
203 __ And(at, scratch1, Operand(kTypeAndReadOnlyMask));
204 __ Branch(miss,
ne, at, Operand(zero_reg));
207 const int kValueOffset = kElementsStartOffset +
kPointerSize;
212 __ mov(scratch1, value);
227 StubCompiler::GenerateLoadArrayLength(masm, a0, a3, &miss);
229 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
242 StubCompiler::GenerateLoadStringLength(masm, a0, a1, a3, &miss,
246 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
259 StubCompiler::GenerateLoadFunctionPrototype(masm, a0, a1, a3, &miss);
261 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
267 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
274 __ JumpIfSmi(receiver, slow);
280 __ Branch(slow,
ne, at, Operand(zero_reg));
293 static void GenerateFastArrayLoad(MacroAssembler* masm,
300 Label* not_fast_array,
301 Label* out_of_range) {
325 if (not_fast_array !=
NULL) {
328 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
329 __ Branch(not_fast_array,
ne, scratch1, Operand(at));
331 __ AssertFastElements(elements);
336 __ Branch(out_of_range,
hs, key, Operand(scratch1));
339 __ Addu(scratch1, elements,
344 __ addu(at, at, scratch1);
347 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
350 __ Branch(out_of_range,
eq, scratch2, Operand(at));
351 __ mov(result, scratch2);
357 static void GenerateKeyStringCheck(MacroAssembler* masm,
365 __ GetObjectType(key, map, hash);
371 __ Branch(index_string,
eq, at, Operand(zero_reg));
378 __ Branch(not_symbol,
eq, at, Operand(zero_reg));
383 Object* CallIC_Miss(Arguments args);
395 Label number, non_number, non_string, boolean, probe, miss;
403 Isolate::Current()->stub_cache()->GenerateProbe(
404 masm, flags, a1, a2, a3, t0, t1, t2);
412 __ JumpIfSmi(a1, &number, t1);
413 __ GetObjectType(a1, a3, a3);
416 StubCompiler::GenerateLoadGlobalFunctionPrototype(
421 __ bind(&non_number);
423 StubCompiler::GenerateLoadGlobalFunctionPrototype(
428 __ bind(&non_string);
429 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
430 __ Branch(&
boolean,
eq, a1, Operand(t0));
431 __ LoadRoot(t1, Heap::kFalseValueRootIndex);
432 __ Branch(&miss,
ne, a1, Operand(t1));
434 StubCompiler::GenerateLoadGlobalFunctionPrototype(
439 Isolate::Current()->stub_cache()->GenerateProbe(
440 masm, flags, a1, a2, a3, t0, t1, t2);
446 static void GenerateFunctionTailCall(MacroAssembler* masm,
453 __ JumpIfSmi(a1, miss);
456 __ GetObjectType(a1, scratch, scratch);
460 ParameterCount actual(argc);
476 GenerateStringDictionaryReceiverCheck(masm, a1, a0, a3, t0, &miss);
480 GenerateDictionaryLoad(masm, &miss, a0, a2, a1, a3, t0);
482 GenerateFunctionTailCall(masm, argc, &miss, t0);
497 Isolate*
isolate = masm->isolate();
499 if (
id == IC::kCallIC_Miss) {
500 __ IncrementCounter(isolate->counters()->call_miss(), 1, a3, t0);
502 __ IncrementCounter(isolate->counters()->keyed_call_miss(), 1, a3, t0);
515 __ PrepareCEntryArgs(2);
516 __ PrepareCEntryFunction(ExternalReference(IC_Utility(
id), isolate));
527 if (
id == IC::kCallIC_Miss) {
528 Label invoke, global;
530 __ JumpIfSmi(a2, &invoke);
531 __ GetObjectType(a2, a3, a3);
545 ParameterCount actual(argc);
546 __ InvokeFunction(a1,
578 Label do_call, slow_call, slow_load, slow_reload_receiver;
579 Label check_number_dictionary, check_string, lookup_monomorphic_cache;
580 Label index_smi, index_string;
583 __ JumpIfNotSmi(a2, &check_string);
588 GenerateKeyedLoadReceiverCheck(
591 GenerateFastArrayLoad(
592 masm, a1, a2, t0, a3, a0, a1, &check_number_dictionary, &slow_load);
593 Counters* counters = masm->isolate()->counters();
594 __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1, a0, a3);
601 GenerateFunctionTailCall(masm, argc, &slow_call, a0);
603 __ bind(&check_number_dictionary);
608 __ LoadRoot(at, Heap::kHashTableMapRootIndex);
609 __ Branch(&slow_load,
ne, a3, Operand(at));
612 __ LoadFromNumberDictionary(&slow_load, t0, a2, a1, a0, a3, t1);
613 __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1, a0, a3);
619 __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1, a0, a3);
624 __ CallRuntime(Runtime::kKeyedGetProperty, 2);
630 __ bind(&check_string);
631 GenerateKeyStringCheck(masm, a2, a0, a3, &index_string, &slow_call);
637 GenerateKeyedLoadReceiverCheck(
642 __ LoadRoot(at, Heap::kHashTableMapRootIndex);
643 __ Branch(&lookup_monomorphic_cache,
ne, a3, Operand(at));
645 GenerateDictionaryLoad(masm, &slow_load, a0, a2, a1, a3, t0);
646 __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1, a0, a3);
649 __ bind(&lookup_monomorphic_cache);
650 __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1, a0, a3);
664 __ IncrementCounter(counters->keyed_call_generic_slow(), 1, a0, a3);
667 __ bind(&index_string);
668 __ IndexFromHash(a3, a2);
682 __ JumpIfSmi(a2, &miss);
683 __ IsObjectJSStringType(a2, a0, &miss);
692 Object* LoadIC_Miss(Arguments args);
704 Isolate::Current()->stub_cache()->GenerateProbe(
705 masm, flags, a0, a2, a3, t0, t1, t2);
721 GenerateStringDictionaryReceiverCheck(masm, a0, a1, a3, t0, &miss);
724 GenerateDictionaryLoad(masm, &miss, a1, a2, v0, a3, t0);
740 Isolate* isolate = masm->isolate();
742 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, a3, t0);
748 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate);
749 __ TailCallExternalReference(ref, 2, 1);
753 static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm,
759 Label* unmapped_case,
764 __ JumpIfSmi(
object, slow_case);
766 __ GetObjectType(
object, scratch1, scratch2);
770 __ And(scratch1, key, Operand(0x80000001));
771 __ Branch(slow_case,
ne, scratch1, Operand(zero_reg));
775 __ CheckMap(scratch1,
777 Heap::kNonStrictArgumentsElementsMapRootIndex,
790 __ li(scratch3, Operand(kPointerSize >> 1));
791 __ Mul(scratch3, key, scratch3);
792 __ Addu(scratch3, scratch3, Operand(kOffset));
794 __ Addu(scratch2, scratch1, scratch3);
796 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
797 __ Branch(unmapped_case,
eq, scratch2, Operand(scratch3));
803 __ li(scratch3, Operand(kPointerSize >> 1));
804 __ Mul(scratch3, scratch2, scratch3);
806 __ Addu(scratch2, scratch1, scratch3);
811 static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
813 Register parameter_map,
821 Register backing_store = parameter_map;
823 __ CheckMap(backing_store,
825 Heap::kFixedArrayMapRootIndex,
830 __ li(scratch, Operand(kPointerSize >> 1));
831 __ Mul(scratch, key, scratch);
835 __ Addu(scratch, backing_store, scratch);
848 GenerateMappedArgumentsLookup(masm, a1, a0, a2, a3, t0, ¬in, &slow);
850 __ lw(v0, mapped_location);
854 GenerateUnmappedArgumentsLookup(masm, a0, a2, a3, &slow);
855 __ lw(a2, unmapped_location);
856 __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
857 __ Branch(&slow,
eq, a2, Operand(a3));
875 GenerateMappedArgumentsLookup(masm, a2, a1, a3, t0, t1, ¬in, &slow);
876 __ sw(a0, mapped_location);
879 __ RecordWrite(a3, mapped_location.rm(), t5,
887 GenerateUnmappedArgumentsLookup(masm, a1, a3, t0, &slow);
888 __ sw(a0, unmapped_location);
890 ASSERT_EQ(unmapped_location.offset(), 0);
891 __ RecordWrite(a3, unmapped_location.rm(), t5,
910 GenerateMappedArgumentsLookup(masm, a1, a2, a3, t0, t1, ¬in, &slow);
911 __ lw(a1, mapped_location);
912 GenerateFunctionTailCall(masm, argc, &slow, a3);
916 GenerateUnmappedArgumentsLookup(masm, a2, a3, t0, &slow);
917 __ lw(a1, unmapped_location);
918 __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
919 __ Branch(&slow,
eq, a1, Operand(a3));
920 GenerateFunctionTailCall(masm, argc, &slow, a3);
926 Object* KeyedLoadIC_Miss(Arguments args);
935 Isolate* isolate = masm->isolate();
937 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, a3, t0);
942 ExternalReference ref = force_generic
943 ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric), isolate)
944 : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate);
946 __ TailCallExternalReference(ref, 2, 1);
959 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
969 Label slow, check_string, index_smi, index_string, property_array_property;
970 Label probe_dictionary, check_number_dictionary;
973 Register receiver = a1;
975 Isolate* isolate = masm->isolate();
978 __ JumpIfNotSmi(key, &check_string);
983 GenerateKeyedLoadReceiverCheck(
987 __ CheckFastElements(a2, a3, &check_number_dictionary);
989 GenerateFastArrayLoad(
990 masm, receiver, key, t0, a3, a2, v0,
NULL, &slow);
992 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, a2, a3);
995 __ bind(&check_number_dictionary);
1003 __ LoadRoot(at, Heap::kHashTableMapRootIndex);
1004 __ Branch(&slow,
ne, a3, Operand(at));
1006 __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1);
1011 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(),
1017 __ bind(&check_string);
1018 GenerateKeyStringCheck(masm, key, a2, a3, &index_string, &slow);
1020 GenerateKeyedLoadReceiverCheck(
1028 __ LoadRoot(at, Heap::kHashTableMapRootIndex);
1029 __ Branch(&probe_dictionary,
eq, t0, Operand(at));
1037 __ xor_(a3, a3, at);
1039 __ And(a3, a3, Operand(mask));
1043 Label load_in_object_property;
1045 Label hit_on_nth_entry[kEntriesPerBucket];
1046 ExternalReference cache_keys =
1047 ExternalReference::keyed_lookup_cache_keys(isolate);
1048 __ li(t0, Operand(cache_keys));
1050 __ addu(t0, t0, at);
1052 for (
int i = 0; i < kEntriesPerBucket - 1; i++) {
1053 Label try_next_entry;
1055 __ Branch(&try_next_entry,
ne, a2, Operand(t1));
1057 __ Branch(&hit_on_nth_entry[i],
eq, a0, Operand(t1));
1058 __ bind(&try_next_entry);
1061 __ lw(t1,
MemOperand(t0, kPointerSize * (kEntriesPerBucket - 1) * 2));
1062 __ Branch(&slow,
ne, a2, Operand(t1));
1063 __ lw(t1,
MemOperand(t0, kPointerSize * ((kEntriesPerBucket - 1) * 2 + 1)));
1064 __ Branch(&slow,
ne, a0, Operand(t1));
1071 ExternalReference cache_field_offsets =
1072 ExternalReference::keyed_lookup_cache_field_offsets(isolate);
1075 for (
int i = kEntriesPerBucket - 1; i >= 0; i--) {
1076 __ bind(&hit_on_nth_entry[i]);
1077 __ li(t0, Operand(cache_field_offsets));
1079 __ addu(at, t0, at);
1082 __ Subu(t1, t1, t2);
1083 __ Branch(&property_array_property,
ge, t1, Operand(zero_reg));
1085 __ Branch(&load_in_object_property);
1090 __ bind(&load_in_object_property);
1092 __ addu(t2, t2, t1);
1093 __ Subu(a1, a1, Operand(kHeapObjectTag));
1095 __ addu(at, a1, at);
1097 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1104 __ bind(&property_array_property);
1108 __ Addu(t0, t0, a1);
1110 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1119 __ bind(&probe_dictionary);
1125 GenerateGlobalInstanceTypeCheck(masm, a2, &slow);
1127 GenerateDictionaryLoad(masm, &slow, a3, a0, v0, a2, t0);
1128 __ IncrementCounter(isolate->counters()->keyed_load_generic_symbol(),
1134 __ bind(&index_string);
1135 __ IndexFromHash(a3, key);
1137 __ Branch(&index_smi);
1149 Register receiver = a1;
1150 Register index = a0;
1151 Register scratch = a3;
1152 Register result = v0;
1154 StringCharAtGenerator char_at_generator(receiver,
1162 char_at_generator.GenerateFast(masm);
1165 StubRuntimeCallHelper call_helper;
1166 char_at_generator.GenerateSlow(masm, call_helper);
1183 __ Push(a2, a1, a0);
1188 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1192 static void KeyedStoreGenerateGenericHelper(
1193 MacroAssembler* masm,
1202 Register receiver_map,
1203 Register elements_map,
1204 Register elements) {
1205 Label transition_smi_elements;
1206 Label finish_object_store, non_double_value, transition_double_elements;
1207 Label fast_double_without_map_check;
1210 __ bind(fast_object);
1211 Register scratch_value = t0;
1212 Register address = t1;
1215 __ Branch(fast_double,
ne, elements_map,
1216 Operand(masm->isolate()->factory()->fixed_array_map()));
1219 Label non_smi_value;
1220 __ JumpIfNotSmi(value, &non_smi_value);
1230 __ Addu(address, address, scratch_value);
1234 __ bind(&non_smi_value);
1236 __ CheckFastObjectElements(receiver_map, scratch_value,
1237 &transition_smi_elements);
1240 __ bind(&finish_object_store);
1248 __ Addu(address, address, scratch_value);
1251 __ mov(scratch_value, value);
1252 __ RecordWrite(elements,
1261 __ bind(fast_double);
1265 __ LoadRoot(at, Heap::kFixedDoubleArrayMapRootIndex);
1266 __ Branch(slow,
ne, elements_map, Operand(at));
1268 __ bind(&fast_double_without_map_check);
1269 __ StoreNumberToDoubleElements(value,
1277 &transition_double_elements);
1285 __ bind(&transition_smi_elements);
1288 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
1289 __ Branch(&non_double_value,
ne, t0, Operand(at));
1298 ASSERT(receiver_map.is(a3));
1301 __ jmp(&fast_double_without_map_check);
1303 __ bind(&non_double_value);
1310 ASSERT(receiver_map.is(a3));
1313 __ jmp(&finish_object_store);
1315 __ bind(&transition_double_elements);
1324 ASSERT(receiver_map.is(a3));
1327 __ jmp(&finish_object_store);
1339 Label slow, fast_object, fast_object_grow;
1340 Label fast_double, fast_double_grow;
1341 Label array, extra, check_if_double_array;
1344 Register value = a0;
1346 Register receiver = a2;
1347 Register receiver_map = a3;
1348 Register elements_map = t2;
1349 Register elements = t3;
1353 __ JumpIfNotSmi(key, &slow);
1355 __ JumpIfSmi(receiver, &slow);
1362 __ Branch(&slow,
ne, t0, Operand(zero_reg));
1373 __ Branch(&fast_object,
lo, key, Operand(t0));
1389 __ Branch(&slow,
ne, key, Operand(t0));
1393 __ Branch(&slow,
hs, key, Operand(t0));
1396 &check_if_double_array,
ne, elements_map, Heap::kFixedArrayMapRootIndex);
1398 __ jmp(&fast_object_grow);
1400 __ bind(&check_if_double_array);
1401 __ Branch(&slow,
ne, elements_map, Heap::kFixedDoubleArrayMapRootIndex);
1402 __ jmp(&fast_double_grow);
1412 __ Branch(&extra,
hs, key, Operand(t0));
1414 KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double,
1416 value, key, receiver, receiver_map,
1417 elements_map, elements);
1418 KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
1420 value, key, receiver, receiver_map,
1421 elements_map, elements);
1434 __ JumpIfSmi(a1, &slow);
1438 __ Branch(&slow,
ne, t0, Operand(zero_reg));
1452 __ TailCallExternalReference(ExternalReference(
1453 IC_Utility(kKeyedLoadPropertyWithInterceptor), masm->isolate()), 2, 1);
1469 __ Push(a2, a1, a0);
1471 ExternalReference ref = force_generic
1472 ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
1474 : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1475 __ TailCallExternalReference(ref, 3, 1);
1489 __ Push(a2, a1, a0);
1493 ExternalReference ref =
1494 ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1496 __ TailCallExternalReference(ref, 3, 1);
1507 if (!FLAG_trace_elements_transitions) {
1516 __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
1521 MacroAssembler* masm) {
1528 if (!FLAG_trace_elements_transitions) {
1537 __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
1553 Isolate::Current()->stub_cache()->GenerateProbe(
1554 masm, flags, a1, a2, a3, t0, t1, t2);
1569 __ Push(a1, a2, a0);
1571 ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_Miss),
1573 __ TailCallExternalReference(ref, 3, 1);
1592 Register receiver = a1;
1593 Register value = a0;
1594 Register scratch = a3;
1597 __ JumpIfSmi(receiver, &miss);
1600 __ GetObjectType(receiver, scratch, scratch);
1607 __ GetObjectType(scratch, scratch, scratch);
1614 __ LoadRoot(at, Heap::kHashTableMapRootIndex);
1615 __ Branch(&miss,
eq, scratch, Operand(at));
1618 __ JumpIfNotSmi(value, &miss);
1621 __ Push(receiver, value);
1623 ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_ArrayLength),
1625 __ TailCallExternalReference(ref, 2, 1);
1642 GenerateStringDictionaryReceiverCheck(masm, a1, a3, t0, t1, &miss);
1644 GenerateDictionaryStore(masm, &miss, a3, a2, a0, t0, t1);
1645 Counters* counters = masm->isolate()->counters();
1646 __ IncrementCounter(counters->store_normal_hit(), 1, t0, t1);
1650 __ IncrementCounter(counters->store_normal_miss(), 1, t0, t1);
1664 __ Push(a1, a2, a0);
1671 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1680 case Token::EQ_STRICT:
1700 Handle<Code> rewritten;
1701 State previous_state = GetState();
1702 State state = TargetState(previous_state,
false, x, y);
1705 rewritten = stub.GetCode();
1707 ICCompareStub stub(op_, state);
1709 stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
1711 rewritten = stub.GetCode();
1716 if (FLAG_trace_ic) {
1717 PrintF(
"[CompareIC (%s->%s)#%s]\n",
1732 Address andi_instruction_address =
1754 if (FLAG_trace_ic) {
1755 PrintF(
"[ patching ic at %p, andi=%p, delta=%d\n",
1756 address, andi_instruction_address, delta);
1763 Instr branch_instr =
1773 CodePatcher patcher(patch_address, 2);
1782 patcher.masm()->andi(at, reg, 0);
1786 patcher.ChangeBranchCondition(
ne);
1789 patcher.ChangeBranchCondition(
eq);
1796 #endif // V8_TARGET_ARCH_MIPS
static bool IsBranch(Instr instr)
static void GenerateTransitionElementsDoubleToObject(MacroAssembler *masm)
static const int kBitFieldOffset
static void GenerateRuntimeGetProperty(MacroAssembler *masm)
const intptr_t kSmiTagMask
const intptr_t kSmiSignMask
static uint32_t GetRt(Instr instr)
static const int kElementsStartIndex
static void GenerateNormal(MacroAssembler *masm, int argc)
static const int kMapHashShift
void PrintF(const char *format,...)
static void GenerateGeneric(MacroAssembler *masm, StrictModeFlag strict_mode)
static const char * Name(Value tok)
static Smi * FromInt(int value)
static void GenerateMiss(MacroAssembler *masm, int argc, IC::UtilityId id, Code::ExtraICState extra_state)
static uint32_t GetImmediate16(Instr instr)
static const int kGlobalReceiverOffset
static void GenerateGlobalProxy(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateMegamorphic(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateStringLength(MacroAssembler *masm, bool support_wrappers)
static const int kHasNamedInterceptor
static const int kIsAccessCheckNeeded
static uint32_t GetRs(Instr instr)
static void GenerateMegamorphic(MacroAssembler *masm)
#define ASSERT(condition)
const int kPointerSizeLog2
static const int kInstanceSizeOffset
Isolate * isolate() const
static const ExtraICState kNoExtraICState
void PatchInlinedSmiCode(Address address, InlinedSmiCheck check)
static void GenerateArrayLength(MacroAssembler *masm)
static const int kHashFieldOffset
static const int kHasIndexedInterceptor
void UpdateCaches(Handle< Object > x, Handle< Object > y)
static Condition ComputeCondition(Token::Value op)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
const uint32_t kIsSymbolMask
static void GenerateGeneric(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm)
static bool decode(uint32_t value)
static const int kPropertiesOffset
static Register from_code(int code)
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
static const int kInObjectPropertiesOffset
static void GenerateSmiToDouble(MacroAssembler *masm, Label *fail)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
static const int kElementsOffset
static const int kContainsCachedArrayIndexMask
static const int kCallTargetAddressOffset
static void GenerateMegamorphic(MacroAssembler *masm, int argc)
static const int kLengthOffset
static const int kHeaderSize
static void GenerateSlow(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, int argc, Code::ExtraICState extra_state)
static const int kMapOffset
static void GenerateRuntimeSetProperty(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateDoubleToObject(MacroAssembler *masm, Label *fail)
static const int kLengthOffset
static const int kSlowCaseBitFieldMask
KeyedStoreIncrementLength
static void GenerateNormal(MacroAssembler *masm, int argc)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
MemOperand FieldMemOperand(Register object, int offset)
static void GenerateMonomorphicCacheProbe(MacroAssembler *masm, int argc, Code::Kind kind, Code::ExtraICState extra_state)
static void GenerateString(MacroAssembler *masm)
#define ASSERT_EQ(v1, v2)
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMegamorphic(MacroAssembler *masm, int argc, Code::ExtraICState extra_ic_state)
static void GenerateNormal(MacroAssembler *masm)
static bool IsBne(Instr instr)
static void GenerateFunctionPrototype(MacroAssembler *masm)
static void GenerateIndexedInterceptor(MacroAssembler *masm)
static bool IsBeq(Instr instr)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
void set_target(Code *code)
const uint32_t kSymbolTag
static const int kCapacityMask
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, bool force_generic)
static void GenerateMiss(MacroAssembler *masm)
static void GenerateTransitionElementsSmiToDouble(MacroAssembler *masm)
static const int kHashShift
static void GenerateNonStrictArguments(MacroAssembler *masm, int argc)
static void GenerateArrayLength(MacroAssembler *masm)
static const char * GetStateName(State state)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
void check(i::Vector< const char > string)
static const int kHashMask
static void GenerateMiss(MacroAssembler *masm, int argc)
static const int kInstanceTypeOffset
static const int kEntriesPerBucket
static bool IsAndImmediate(Instr instr)
static void GenerateNormal(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, bool force_generic)