30 #if defined(V8_TARGET_ARCH_ARM)
48 #define __ ACCESS_MASM(masm)
51 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
53 Label* global_object) {
57 __ b(
eq, global_object);
59 __ b(
eq, global_object);
61 __ b(
eq, global_object);
67 static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
82 __ JumpIfSmi(receiver, miss);
91 GenerateGlobalInstanceTypeCheck(masm, t1, miss);
101 __ LoadRoot(
ip, Heap::kHashTableMapRootIndex);
120 static void GenerateDictionaryLoad(MacroAssembler* masm,
147 const int kDetailsOffset = kElementsStartOffset + 2 *
kPointerSize;
149 __ tst(scratch1, Operand(PropertyDetails::TypeField::kMask <<
kSmiTagSize));
169 static void GenerateDictionaryStore(MacroAssembler* masm,
196 const int kDetailsOffset = kElementsStartOffset + 2 *
kPointerSize;
197 const int kTypeAndReadOnlyMask =
198 (PropertyDetails::TypeField::kMask |
201 __ tst(scratch1, Operand(kTypeAndReadOnlyMask));
205 const int kValueOffset = kElementsStartOffset +
kPointerSize;
210 __ mov(scratch1, value);
225 StubCompiler::GenerateLoadArrayLength(masm,
r0,
r3, &miss);
227 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
240 StubCompiler::GenerateLoadStringLength(masm,
r0,
r1,
r3, &miss,
244 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
257 StubCompiler::GenerateLoadFunctionPrototype(masm,
r0,
r1,
r3, &miss);
259 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
265 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
272 __ JumpIfSmi(receiver, slow);
293 static void GenerateFastArrayLoad(MacroAssembler* masm,
300 Label* not_fast_array,
301 Label* out_of_range) {
325 if (not_fast_array !=
NULL) {
328 __ LoadRoot(
ip, Heap::kFixedArrayMapRootIndex);
329 __ cmp(scratch1,
ip);
330 __ b(
ne, not_fast_array);
332 __ AssertFastElements(elements);
336 __ cmp(key, Operand(scratch1));
337 __ b(
hs, out_of_range);
344 __ LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
345 __ cmp(scratch2,
ip);
348 __ b(
eq, out_of_range);
349 __ mov(result, scratch2);
355 static void GenerateKeyStringCheck(MacroAssembler* masm,
364 __ b(
ge, not_symbol);
369 __ b(
eq, index_string);
376 __ b(
eq, not_symbol);
381 Object* CallIC_Miss(Arguments args);
393 Label number, non_number, non_string, boolean, probe, miss;
401 Isolate::Current()->stub_cache()->GenerateProbe(
410 __ JumpIfSmi(
r1, &number);
412 __ b(
ne, &non_number);
414 StubCompiler::GenerateLoadGlobalFunctionPrototype(
419 __ bind(&non_number);
421 __ b(
hs, &non_string);
422 StubCompiler::GenerateLoadGlobalFunctionPrototype(
427 __ bind(&non_string);
428 __ LoadRoot(
ip, Heap::kTrueValueRootIndex);
431 __ LoadRoot(
ip, Heap::kFalseValueRootIndex);
435 StubCompiler::GenerateLoadGlobalFunctionPrototype(
440 Isolate::Current()->stub_cache()->GenerateProbe(
447 static void GenerateFunctionTailCall(MacroAssembler* masm,
454 __ JumpIfSmi(
r1, miss);
461 ParameterCount actual(argc);
477 GenerateStringDictionaryReceiverCheck(masm,
r1,
r0,
r3,
r4, &miss);
481 GenerateDictionaryLoad(masm, &miss,
r0,
r2,
r1,
r3,
r4);
483 GenerateFunctionTailCall(masm, argc, &miss,
r4);
497 Isolate*
isolate = masm->isolate();
499 if (
id == IC::kCallIC_Miss) {
500 __ IncrementCounter(isolate->counters()->call_miss(), 1,
r3,
r4);
502 __ IncrementCounter(isolate->counters()->keyed_call_miss(), 1,
r3,
r4);
515 __ mov(
r0, Operand(2));
516 __ mov(
r1, Operand(ExternalReference(IC_Utility(
id), isolate)));
527 if (
id == IC::kCallIC_Miss) {
528 Label invoke, global;
530 __ JumpIfSmi(
r2, &invoke);
547 ParameterCount actual(argc);
548 __ InvokeFunction(
r1,
580 Label do_call, slow_call, slow_load, slow_reload_receiver;
581 Label check_number_dictionary, check_string, lookup_monomorphic_cache;
582 Label index_smi, index_string;
585 __ JumpIfNotSmi(
r2, &check_string);
590 GenerateKeyedLoadReceiverCheck(
593 GenerateFastArrayLoad(
594 masm,
r1,
r2,
r4,
r3,
r0,
r1, &check_number_dictionary, &slow_load);
595 Counters* counters = masm->isolate()->counters();
596 __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1,
r0,
r3);
602 GenerateFunctionTailCall(masm, argc, &slow_call,
r0);
604 __ bind(&check_number_dictionary);
609 __ LoadRoot(
ip, Heap::kHashTableMapRootIndex);
611 __ b(
ne, &slow_load);
615 __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1,
r0,
r3);
621 __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1,
r0,
r3);
626 __ CallRuntime(Runtime::kKeyedGetProperty, 2);
632 __ bind(&check_string);
633 GenerateKeyStringCheck(masm,
r2,
r0,
r3, &index_string, &slow_call);
639 GenerateKeyedLoadReceiverCheck(
644 __ LoadRoot(
ip, Heap::kHashTableMapRootIndex);
646 __ b(
ne, &lookup_monomorphic_cache);
648 GenerateDictionaryLoad(masm, &slow_load,
r0,
r2,
r1,
r3,
r4);
649 __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1,
r0,
r3);
652 __ bind(&lookup_monomorphic_cache);
653 __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1,
r0,
r3);
667 __ IncrementCounter(counters->keyed_call_generic_slow(), 1,
r0,
r3);
670 __ bind(&index_string);
685 __ JumpIfSmi(
r2, &miss);
686 __ IsObjectJSStringType(
r2,
r0, &miss);
695 Object* LoadIC_Miss(Arguments args);
708 Isolate::Current()->stub_cache()->GenerateProbe(
725 GenerateStringDictionaryReceiverCheck(masm,
r0,
r1,
r3,
r4, &miss);
728 GenerateDictionaryLoad(masm, &miss,
r1,
r2,
r0,
r3,
r4);
744 Isolate* isolate = masm->isolate();
746 __ IncrementCounter(isolate->counters()->load_miss(), 1,
r3,
r4);
752 ExternalReference ref =
753 ExternalReference(IC_Utility(kLoadIC_Miss), isolate);
754 __ TailCallExternalReference(ref, 2, 1);
758 static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm,
764 Label* unmapped_case,
766 Heap* heap = masm->isolate()->heap();
771 __ JumpIfSmi(
object, slow_case);
777 __ tst(key, Operand(0x80000001));
781 Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
789 __ cmp(key, Operand(scratch2));
790 __ b(
cs, unmapped_case);
796 __ mov(scratch3, Operand(kPointerSize >> 1));
797 __ mul(scratch3, key, scratch3);
798 __ add(scratch3, scratch3, Operand(kOffset));
801 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
802 __ cmp(scratch2, scratch3);
803 __ b(
eq, unmapped_case);
809 __ mov(scratch3, Operand(kPointerSize >> 1));
810 __ mul(scratch3, scratch2, scratch3);
816 static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
818 Register parameter_map,
826 Register backing_store = parameter_map;
828 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
829 __ CheckMap(backing_store, scratch, fixed_array_map, slow_case,
832 __ cmp(key, Operand(scratch));
834 __ mov(scratch, Operand(kPointerSize >> 1));
835 __ mul(scratch, key, scratch);
851 GenerateMappedArgumentsLookup(masm,
r1,
r0,
r2,
r3,
r4, ¬in, &slow);
852 __ ldr(
r0, mapped_location);
857 GenerateUnmappedArgumentsLookup(masm,
r0,
r2,
r3, &slow);
858 __ ldr(
r2, unmapped_location);
859 __ LoadRoot(
r3, Heap::kTheHoleValueRootIndex);
878 GenerateMappedArgumentsLookup(masm,
r2,
r1,
r3,
r4,
r5, ¬in, &slow);
879 __ str(
r0, mapped_location);
887 GenerateUnmappedArgumentsLookup(masm,
r1,
r3,
r4, &slow);
888 __ str(
r0, unmapped_location);
908 GenerateMappedArgumentsLookup(masm,
r1,
r2,
r3,
r4,
r5, ¬in, &slow);
909 __ ldr(
r1, mapped_location);
910 GenerateFunctionTailCall(masm, argc, &slow,
r3);
914 GenerateUnmappedArgumentsLookup(masm,
r2,
r3,
r4, &slow);
915 __ ldr(
r1, unmapped_location);
916 __ LoadRoot(
r3, Heap::kTheHoleValueRootIndex);
919 GenerateFunctionTailCall(masm, argc, &slow,
r3);
925 Object* KeyedLoadIC_Miss(Arguments args);
934 Isolate* isolate = masm->isolate();
936 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1,
r3,
r4);
941 ExternalReference ref = force_generic
942 ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric), isolate)
943 : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate);
945 __ TailCallExternalReference(ref, 2, 1);
958 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
968 Label slow, check_string, index_smi, index_string, property_array_property;
969 Label probe_dictionary, check_number_dictionary;
972 Register receiver =
r1;
974 Isolate* isolate = masm->isolate();
977 __ JumpIfNotSmi(key, &check_string);
982 GenerateKeyedLoadReceiverCheck(
986 __ CheckFastElements(
r2,
r3, &check_number_dictionary);
988 GenerateFastArrayLoad(
990 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1,
r2,
r3);
993 __ bind(&check_number_dictionary);
1001 __ LoadRoot(
ip, Heap::kHashTableMapRootIndex);
1010 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(),
1014 __ bind(&check_string);
1015 GenerateKeyStringCheck(masm, key,
r2,
r3, &index_string, &slow);
1017 GenerateKeyedLoadReceiverCheck(
1024 __ LoadRoot(
ip, Heap::kHashTableMapRootIndex);
1026 __ b(
eq, &probe_dictionary);
1035 __ And(
r3,
r3, Operand(mask));
1039 Label load_in_object_property;
1041 Label hit_on_nth_entry[kEntriesPerBucket];
1042 ExternalReference cache_keys =
1043 ExternalReference::keyed_lookup_cache_keys(isolate);
1045 __ mov(
r4, Operand(cache_keys));
1048 for (
int i = 0; i < kEntriesPerBucket - 1; i++) {
1049 Label try_next_entry;
1053 __ b(
ne, &try_next_entry);
1056 __ b(
eq, &hit_on_nth_entry[i]);
1057 __ bind(&try_next_entry);
1073 ExternalReference cache_field_offsets =
1074 ExternalReference::keyed_lookup_cache_field_offsets(isolate);
1077 for (
int i = kEntriesPerBucket - 1; i >= 0; i--) {
1078 __ bind(&hit_on_nth_entry[i]);
1079 __ mov(
r4, Operand(cache_field_offsets));
1081 __ add(
r3,
r3, Operand(i));
1086 __ b(
ge, &property_array_property);
1088 __ jmp(&load_in_object_property);
1093 __ bind(&load_in_object_property);
1096 __ sub(
r1,
r1, Operand(kHeapObjectTag));
1098 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1103 __ bind(&property_array_property);
1107 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1113 __ bind(&probe_dictionary);
1119 GenerateGlobalInstanceTypeCheck(masm,
r2, &slow);
1121 GenerateDictionaryLoad(masm, &slow,
r3,
r0,
r0,
r2,
r4);
1122 __ IncrementCounter(isolate->counters()->keyed_load_generic_symbol(),
1126 __ bind(&index_string);
1127 __ IndexFromHash(
r3, key);
1141 Register receiver =
r1;
1142 Register index =
r0;
1143 Register scratch =
r3;
1144 Register result =
r0;
1146 StringCharAtGenerator char_at_generator(receiver,
1154 char_at_generator.GenerateFast(masm);
1157 StubRuntimeCallHelper call_helper;
1158 char_at_generator.GenerateSlow(masm, call_helper);
1174 __ JumpIfSmi(
r1, &slow);
1194 __ TailCallExternalReference(
1195 ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor),
1216 ExternalReference ref = force_generic
1217 ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
1219 : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1220 __ TailCallExternalReference(ref, 3, 1);
1237 ExternalReference ref =
1238 ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1239 __ TailCallExternalReference(ref, 3, 1);
1250 if (!FLAG_trace_elements_transitions) {
1259 __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
1264 MacroAssembler* masm) {
1271 if (!FLAG_trace_elements_transitions) {
1280 __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
1300 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1304 static void KeyedStoreGenerateGenericHelper(
1305 MacroAssembler* masm,
1314 Register receiver_map,
1315 Register elements_map,
1316 Register elements) {
1317 Label transition_smi_elements;
1318 Label finish_object_store, non_double_value, transition_double_elements;
1319 Label fast_double_without_map_check;
1322 __ bind(fast_object);
1323 Register scratch_value =
r4;
1324 Register address =
r5;
1327 __ cmp(elements_map,
1328 Operand(masm->isolate()->factory()->fixed_array_map()));
1329 __ b(
ne, fast_double);
1332 Label non_smi_value;
1333 __ JumpIfNotSmi(value, &non_smi_value);
1346 __ bind(&non_smi_value);
1348 __ CheckFastObjectElements(receiver_map, scratch_value,
1349 &transition_smi_elements);
1352 __ bind(&finish_object_store);
1362 __ mov(scratch_value, value);
1363 __ RecordWrite(elements,
1372 __ bind(fast_double);
1376 __ CompareRoot(elements_map, Heap::kFixedDoubleArrayMapRootIndex);
1379 __ bind(&fast_double_without_map_check);
1380 __ StoreNumberToDoubleElements(value,
1388 &transition_double_elements);
1396 __ bind(&transition_smi_elements);
1399 __ CompareRoot(
r4, Heap::kHeapNumberMapRootIndex);
1400 __ b(
ne, &non_double_value);
1412 __ jmp(&fast_double_without_map_check);
1414 __ bind(&non_double_value);
1424 __ jmp(&finish_object_store);
1426 __ bind(&transition_double_elements);
1438 __ jmp(&finish_object_store);
1450 Label slow, fast_object, fast_object_grow;
1451 Label fast_double, fast_double_grow;
1452 Label array, extra, check_if_double_array;
1455 Register value =
r0;
1457 Register receiver =
r2;
1458 Register receiver_map =
r3;
1459 Register elements_map =
r6;
1460 Register elements =
r7;
1464 __ JumpIfNotSmi(key, &slow);
1466 __ JumpIfSmi(receiver, &slow);
1486 __ cmp(key, Operand(
ip));
1487 __ b(
lo, &fast_object);
1506 __ cmp(key, Operand(
ip));
1509 __ cmp(elements_map,
1510 Operand(masm->isolate()->factory()->fixed_array_map()));
1511 __ b(
ne, &check_if_double_array);
1512 __ jmp(&fast_object_grow);
1514 __ bind(&check_if_double_array);
1515 __ cmp(elements_map,
1516 Operand(masm->isolate()->factory()->fixed_double_array_map()));
1518 __ jmp(&fast_double_grow);
1528 __ cmp(key, Operand(
ip));
1531 KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double,
1533 value, key, receiver, receiver_map,
1534 elements_map, elements);
1535 KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
1537 value, key, receiver, receiver_map,
1538 elements_map, elements);
1555 Isolate::Current()->stub_cache()->GenerateProbe(
1574 ExternalReference ref =
1575 ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
1576 __ TailCallExternalReference(ref, 3, 1);
1595 Register receiver =
r1;
1596 Register value =
r0;
1597 Register scratch =
r3;
1600 __ JumpIfSmi(receiver, &miss);
1617 __ CompareRoot(scratch, Heap::kHashTableMapRootIndex);
1621 __ JumpIfNotSmi(value, &miss);
1624 __ Push(receiver, value);
1626 ExternalReference ref =
1627 ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate());
1628 __ TailCallExternalReference(ref, 2, 1);
1645 GenerateStringDictionaryReceiverCheck(masm,
r1,
r3,
r4,
r5, &miss);
1647 GenerateDictionaryStore(masm, &miss,
r3,
r2,
r0,
r4,
r5);
1648 Counters* counters = masm->isolate()->counters();
1649 __ IncrementCounter(counters->store_normal_hit(),
1654 __ IncrementCounter(counters->store_normal_miss(), 1,
r4,
r5);
1675 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1684 case Token::EQ_STRICT:
1704 Handle<Code> rewritten;
1705 State previous_state = GetState();
1706 State state = TargetState(previous_state,
false, x, y);
1709 rewritten = stub.GetCode();
1711 ICCompareStub stub(op_, state);
1713 stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
1715 rewritten = stub.GetCode();
1720 if (FLAG_trace_ic) {
1721 PrintF(
"[CompareIC (%s->%s)#%s]\n",
1736 Address cmp_instruction_address =
1737 Assembler::return_address_from_call_start(address);
1758 if (FLAG_trace_ic) {
1759 PrintF(
"[ patching ic at %p, cmp=%p, delta=%d\n",
1760 address, cmp_instruction_address, delta);
1767 Instr branch_instr =
1777 CodePatcher patcher(patch_address, 2);
1787 patcher.masm()->cmp(reg, reg);
1791 patcher.EmitCondition(
ne);
1794 patcher.EmitCondition(
eq);
1801 #endif // V8_TARGET_ARCH_ARM
static bool IsBranch(Instr instr)
static void GenerateTransitionElementsDoubleToObject(MacroAssembler *masm)
static const int kBitFieldOffset
static void GenerateRuntimeGetProperty(MacroAssembler *masm)
const intptr_t kSmiTagMask
const intptr_t kSmiSignMask
static const int kElementsStartIndex
static void GenerateNormal(MacroAssembler *masm, int argc)
static bool IsCmpRegister(Instr instr)
static const int kMapHashShift
void PrintF(const char *format,...)
static void GenerateGeneric(MacroAssembler *masm, StrictModeFlag strict_mode)
static const char * Name(Value tok)
static Smi * FromInt(int value)
static void GenerateMiss(MacroAssembler *masm, int argc, IC::UtilityId id, Code::ExtraICState extra_state)
static const int kGlobalReceiverOffset
static void GenerateGlobalProxy(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateMegamorphic(MacroAssembler *masm, StrictModeFlag strict_mode)
static int GetCmpImmediateRawImmediate(Instr instr)
static void GenerateStringLength(MacroAssembler *masm, bool support_wrappers)
static const int kHasNamedInterceptor
static const int kIsAccessCheckNeeded
static Register GetRm(Instr instr)
static void GenerateMegamorphic(MacroAssembler *masm)
#define ASSERT(condition)
static bool IsCmpImmediate(Instr instr)
const int kPointerSizeLog2
static const int kInstanceSizeOffset
Isolate * isolate() const
static const ExtraICState kNoExtraICState
void PatchInlinedSmiCode(Address address, InlinedSmiCheck check)
static void GenerateArrayLength(MacroAssembler *masm)
static const int kHashFieldOffset
static Condition GetCondition(Instr instr)
static const int kHasIndexedInterceptor
void UpdateCaches(Handle< Object > x, Handle< Object > y)
static Condition ComputeCondition(Token::Value op)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
const uint32_t kIsSymbolMask
static void GenerateGeneric(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm)
static bool decode(uint32_t value)
static const int kPropertiesOffset
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
static const int kInObjectPropertiesOffset
static void GenerateSmiToDouble(MacroAssembler *masm, Label *fail)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
static const int kElementsOffset
static const int kContainsCachedArrayIndexMask
static Register GetRn(Instr instr)
static void GenerateMegamorphic(MacroAssembler *masm, int argc)
static bool IsTstImmediate(Instr instr)
static const int kLengthOffset
static const int kHeaderSize
static void GenerateSlow(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, int argc, Code::ExtraICState extra_state)
static const int kMapOffset
static void GenerateRuntimeSetProperty(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateDoubleToObject(MacroAssembler *masm, Label *fail)
static const int kLengthOffset
static const int kSlowCaseBitFieldMask
KeyedStoreIncrementLength
static void GenerateNormal(MacroAssembler *masm, int argc)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
MemOperand FieldMemOperand(Register object, int offset)
static void GenerateMonomorphicCacheProbe(MacroAssembler *masm, int argc, Code::Kind kind, Code::ExtraICState extra_state)
static void GenerateString(MacroAssembler *masm)
static Register GetCmpImmediateRegister(Instr instr)
#define ASSERT_EQ(v1, v2)
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMegamorphic(MacroAssembler *masm, int argc, Code::ExtraICState extra_ic_state)
static void GenerateNormal(MacroAssembler *masm)
static void GenerateFunctionPrototype(MacroAssembler *masm)
static void GenerateIndexedInterceptor(MacroAssembler *masm)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
void set_target(Code *code)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
const uint32_t kSymbolTag
static const int kCapacityMask
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, bool force_generic)
static void GenerateMiss(MacroAssembler *masm)
static void GenerateTransitionElementsSmiToDouble(MacroAssembler *masm)
static const int kHashShift
static void GenerateNonStrictArguments(MacroAssembler *masm, int argc)
static void GenerateArrayLength(MacroAssembler *masm)
static const char * GetStateName(State state)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
void check(i::Vector< const char > string)
static const int kHashMask
static void GenerateMiss(MacroAssembler *masm, int argc)
static const int kInstanceTypeOffset
static const int kEntriesPerBucket
static void GenerateNormal(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, bool force_generic)