30 #if defined(V8_TARGET_ARCH_X64)
44 #define __ ACCESS_MASM(masm)
47 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
49 Label* global_object) {
63 static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
74 __ JumpIfSmi(receiver, miss);
85 GenerateGlobalInstanceTypeCheck(masm, r0, miss);
95 Heap::kHashTableMapRootIndex);
108 static void GenerateDictionaryLoad(MacroAssembler* masm,
142 const int kElementsStartOffset =
145 const int kDetailsOffset = kElementsStartOffset + 2 *
kPointerSize;
152 const int kValueOffset = kElementsStartOffset +
kPointerSize;
166 static void GenerateDictionaryStore(MacroAssembler* masm,
199 const int kElementsStartOffset =
202 const int kDetailsOffset = kElementsStartOffset + 2 *
kPointerSize;
203 const int kTypeAndReadOnlyMask =
204 (PropertyDetails::TypeField::kMask |
206 __ Test(Operand(elements,
214 const int kValueOffset = kElementsStartOffset +
kPointerSize;
215 __ lea(scratch1, Operand(elements,
219 __ movq(Operand(scratch1, 0), value);
222 __ movq(scratch0, value);
235 StubCompiler::GenerateLoadArrayLength(masm,
rax,
rdx, &miss);
237 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
249 StubCompiler::GenerateLoadStringLength(masm,
rax,
rdx,
rbx, &miss,
252 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
264 StubCompiler::GenerateLoadFunctionPrototype(masm,
rax,
rdx,
rbx, &miss);
266 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
272 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
283 __ JumpIfSmi(receiver, slow);
296 (1 << interceptor_bit)));
303 static void GenerateFastArrayLoad(MacroAssembler* masm,
309 Label* not_fast_array,
310 Label* out_of_range) {
331 if (not_fast_array !=
NULL) {
334 Heap::kFixedArrayMapRootIndex);
337 __ AssertFastElements(elements);
349 __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex);
353 if (!result.is(scratch)) {
354 __ movq(result, scratch);
361 static void GenerateKeyStringCheck(MacroAssembler* masm,
394 Label slow, check_string, index_smi, index_string, property_array_property;
395 Label probe_dictionary, check_number_dictionary;
398 __ JumpIfNotSmi(
rax, &check_string);
403 GenerateKeyedLoadReceiverCheck(
407 __ CheckFastElements(
rcx, &check_number_dictionary);
409 GenerateFastArrayLoad(masm,
417 Counters* counters = masm->isolate()->counters();
418 __ IncrementCounter(counters->keyed_load_generic_smi(), 1);
421 __ bind(&check_number_dictionary);
431 Heap::kHashTableMapRootIndex);
440 __ IncrementCounter(counters->keyed_load_generic_slow(), 1);
443 __ bind(&check_string);
444 GenerateKeyStringCheck(masm,
rax,
rcx,
rbx, &index_string, &slow);
446 GenerateKeyedLoadReceiverCheck(
453 Heap::kHashTableMapRootIndex);
454 __ j(
equal, &probe_dictionary);
465 __ and_(
rcx, Immediate(mask));
469 Label load_in_object_property;
471 Label hit_on_nth_entry[kEntriesPerBucket];
472 ExternalReference cache_keys
473 = ExternalReference::keyed_lookup_cache_keys(masm->isolate());
475 for (
int i = 0; i < kEntriesPerBucket - 1; i++) {
476 Label try_next_entry;
480 int off = kPointerSize * i * 2;
484 __ j(
equal, &hit_on_nth_entry[i]);
485 __ bind(&try_next_entry);
488 int off = kPointerSize * (kEntriesPerBucket - 1) * 2;
495 ExternalReference cache_field_offsets
496 = ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
499 for (
int i = kEntriesPerBucket - 1; i >= 0; i--) {
500 __ bind(&hit_on_nth_entry[i]);
502 __ addl(
rcx, Immediate(i));
510 __ jmp(&load_in_object_property);
515 __ bind(&load_in_object_property);
519 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
523 __ bind(&property_array_property);
527 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
532 __ bind(&probe_dictionary);
539 GenerateGlobalInstanceTypeCheck(masm,
rcx, &slow);
542 __ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
545 __ bind(&index_string);
559 Register receiver =
rdx;
560 Register index =
rax;
561 Register scratch =
rcx;
562 Register result =
rax;
564 StringCharAtGenerator char_at_generator(receiver,
572 char_at_generator.GenerateFast(masm);
575 StubRuntimeCallHelper call_helper;
576 char_at_generator.GenerateSlow(masm, call_helper);
592 __ JumpIfSmi(
rdx, &slow);
596 __ JumpUnlessNonNegativeSmi(
rax, &slow);
615 __ TailCallExternalReference(
616 ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor),
626 static void KeyedStoreGenerateGenericHelper(
627 MacroAssembler* masm,
633 Label transition_smi_elements;
634 Label finish_object_store, non_double_value, transition_double_elements;
635 Label fast_double_without_map_check;
637 __ bind(fast_object);
645 __ CompareRoot(
rdi, Heap::kFixedArrayMapRootIndex);
650 __ JumpIfNotSmi(
rax, &non_smi_value);
661 __ bind(&non_smi_value);
664 __ CheckFastObjectElements(
r9, &transition_smi_elements);
666 __ bind(&finish_object_store);
679 __ bind(fast_double);
684 __ CompareRoot(
rdi, Heap::kFixedDoubleArrayMapRootIndex);
687 __ bind(&fast_double_without_map_check);
689 &transition_double_elements);
697 __ bind(&transition_smi_elements);
702 __ CompareRoot(
r9, Heap::kHeapNumberMapRootIndex);
714 __ jmp(&fast_double_without_map_check);
716 __ bind(&non_double_value);
725 __ jmp(&finish_object_store);
727 __ bind(&transition_double_elements);
739 __ jmp(&finish_object_store);
751 Label slow, slow_with_tagged_index, fast_object, fast_object_grow;
752 Label fast_double, fast_double_grow;
753 Label array, extra, check_if_double_array;
756 __ JumpIfSmi(
rdx, &slow_with_tagged_index);
765 __ JumpIfNotSmi(
rcx, &slow_with_tagged_index);
789 __ bind(&slow_with_tagged_index);
807 __ CompareRoot(
rdi, Heap::kFixedArrayMapRootIndex);
809 __ jmp(&fast_object_grow);
811 __ bind(&check_if_double_array);
813 __ CompareRoot(
rdi, Heap::kFixedDoubleArrayMapRootIndex);
815 __ jmp(&fast_double_grow);
831 KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double,
833 KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
848 Label number, non_number, non_string, boolean, probe, miss;
856 Isolate::Current()->stub_cache()->GenerateProbe(masm, flags,
rdx,
rcx,
rbx,
865 __ JumpIfSmi(
rdx, &number);
869 StubCompiler::GenerateLoadGlobalFunctionPrototype(
874 __ bind(&non_number);
877 StubCompiler::GenerateLoadGlobalFunctionPrototype(
882 __ bind(&non_string);
883 __ CompareRoot(
rdx, Heap::kTrueValueRootIndex);
885 __ CompareRoot(
rdx, Heap::kFalseValueRootIndex);
888 StubCompiler::GenerateLoadGlobalFunctionPrototype(
893 Isolate::Current()->stub_cache()->GenerateProbe(masm, flags,
rdx,
rcx,
rbx,
900 static void GenerateFunctionTailCall(MacroAssembler* masm,
913 __ JumpIfSmi(
rdi, miss);
919 ParameterCount actual(argc);
939 __ movq(
rdx, Operand(
rsp, (argc + 1) * kPointerSize));
941 GenerateStringDictionaryReceiverCheck(masm,
rdx,
rax,
rbx, &miss);
947 GenerateFunctionTailCall(masm, argc, &miss);
967 Counters* counters = masm->isolate()->counters();
968 if (
id == IC::kCallIC_Miss) {
969 __ IncrementCounter(counters->call_miss(), 1);
971 __ IncrementCounter(counters->keyed_call_miss(), 1);
975 __ movq(
rdx, Operand(
rsp, (argc + 1) * kPointerSize));
988 __ LoadAddress(
rbx, ExternalReference(IC_Utility(
id), masm->isolate()));
997 if (
id == IC::kCallIC_Miss) {
998 Label invoke, global;
999 __ movq(
rdx, Operand(
rsp, (argc + 1) * kPointerSize));
1000 __ JumpIfSmi(
rdx, &invoke);
1009 __ movq(Operand(
rsp, (argc + 1) * kPointerSize),
rdx);
1017 ParameterCount actual(argc);
1018 __ InvokeFunction(
rdi,
1040 __ movq(
rdx, Operand(
rsp, (argc + 1) * kPointerSize));
1058 __ movq(
rdx, Operand(
rsp, (argc + 1) * kPointerSize));
1060 Label do_call, slow_call, slow_load;
1061 Label check_number_dictionary, check_string, lookup_monomorphic_cache;
1062 Label index_smi, index_string;
1065 __ JumpIfNotSmi(
rcx, &check_string);
1067 __ bind(&index_smi);
1071 GenerateKeyedLoadReceiverCheck(
1074 GenerateFastArrayLoad(
1076 Counters* counters = masm->isolate()->counters();
1077 __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1);
1083 GenerateFunctionTailCall(masm, argc, &slow_call);
1085 __ bind(&check_number_dictionary);
1090 Heap::kHashTableMapRootIndex);
1095 __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1);
1098 __ bind(&slow_load);
1101 __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1);
1107 __ CallRuntime(Runtime::kKeyedGetProperty, 2);
1113 __ bind(&check_string);
1114 GenerateKeyStringCheck(masm,
rcx,
rax,
rbx, &index_string, &slow_call);
1120 GenerateKeyedLoadReceiverCheck(
1125 Heap::kHashTableMapRootIndex);
1129 __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1);
1132 __ bind(&lookup_monomorphic_cache);
1133 __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1);
1136 Code::KEYED_CALL_IC,
1140 __ bind(&slow_call);
1147 __ IncrementCounter(counters->keyed_call_generic_slow(), 1);
1150 __ bind(&index_string);
1170 __ JumpIfSmi(
rcx, &miss);
1179 static Operand GenerateMappedArgumentsLookup(MacroAssembler* masm,
1185 Label* unmapped_case,
1187 Heap* heap = masm->isolate()->heap();
1192 __ JumpIfSmi(
object, slow_case);
1203 Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
1210 __ cmpq(key, scratch2);
1215 __ SmiToInteger64(scratch3, key);
1220 __ CompareRoot(scratch2, Heap::kTheHoleValueRootIndex);
1227 __ SmiToInteger64(scratch3, scratch2);
1235 static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
1237 Register parameter_map,
1245 Register backing_store = parameter_map;
1246 __ movq(backing_store,
FieldOperand(parameter_map, kBackingStoreOffset));
1247 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
1250 __ cmpq(key, scratch);
1252 __ SmiToInteger64(scratch, key);
1267 Operand mapped_location =
1268 GenerateMappedArgumentsLookup(
1270 __ movq(
rax, mapped_location);
1274 Operand unmapped_location =
1275 GenerateUnmappedArgumentsLookup(masm,
rax,
rbx,
rcx, &slow);
1276 __ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex);
1278 __ movq(
rax, unmapped_location);
1293 Operand mapped_location = GenerateMappedArgumentsLookup(
1295 __ movq(mapped_location,
rax);
1296 __ lea(
r9, mapped_location);
1307 Operand unmapped_location =
1308 GenerateUnmappedArgumentsLookup(masm,
rcx,
rbx,
rdi, &slow);
1309 __ movq(unmapped_location,
rax);
1310 __ lea(
r9, unmapped_location);
1336 __ movq(
rdx, Operand(
rsp, (argc + 1) * kPointerSize));
1337 Operand mapped_location = GenerateMappedArgumentsLookup(
1339 __ movq(
rdi, mapped_location);
1340 GenerateFunctionTailCall(masm, argc, &slow);
1343 Operand unmapped_location =
1344 GenerateUnmappedArgumentsLookup(masm,
rcx,
rbx,
rax, &slow);
1345 __ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex);
1347 __ movq(
rdi, unmapped_location);
1348 GenerateFunctionTailCall(masm, argc, &slow);
1363 Isolate::Current()->stub_cache()->GenerateProbe(masm, flags,
rax,
rcx,
rbx,
1367 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
1379 GenerateStringDictionaryReceiverCheck(masm,
rax,
rdx,
rbx, &miss);
1399 Counters* counters = masm->isolate()->counters();
1400 __ IncrementCounter(counters->load_miss(), 1);
1408 ExternalReference ref =
1409 ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate());
1410 __ TailCallExternalReference(ref, 2, 1);
1421 Counters* counters = masm->isolate()->counters();
1422 __ IncrementCounter(counters->keyed_load_miss(), 1);
1430 ExternalReference ref = force_generic
1431 ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric),
1433 : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
1434 __ TailCallExternalReference(ref, 2, 1);
1451 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
1467 Isolate::Current()->stub_cache()->GenerateProbe(masm, flags,
rdx,
rcx,
rbx,
1490 ExternalReference ref =
1491 ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
1492 __ TailCallExternalReference(ref, 3, 1);
1511 Register receiver =
rdx;
1512 Register value =
rax;
1513 Register scratch =
rbx;
1516 __ JumpIfSmi(receiver, &miss);
1533 Heap::kHashTableMapRootIndex);
1537 __ JumpIfNotSmi(value, &miss);
1545 ExternalReference ref =
1546 ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate());
1547 __ TailCallExternalReference(ref, 2, 1);
1565 GenerateStringDictionaryReceiverCheck(masm,
rdx,
rbx,
rdi, &miss);
1567 GenerateDictionaryStore(masm, &miss,
rbx,
rcx,
rax,
r8,
r9);
1568 Counters* counters = masm->isolate()->counters();
1569 __ IncrementCounter(counters->store_normal_hit(), 1);
1573 __ IncrementCounter(counters->store_normal_miss(), 1);
1595 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1617 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1636 ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1637 __ TailCallExternalReference(ref, 3, 1);
1656 ExternalReference ref = force_generic
1657 ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
1659 : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1660 __ TailCallExternalReference(ref, 3, 1);
1671 if (!FLAG_trace_elements_transitions) {
1682 __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
1687 MacroAssembler* masm) {
1694 if (!FLAG_trace_elements_transitions) {
1705 __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
1714 case Token::EQ_STRICT:
1732 static bool HasInlinedSmiCode(
Address address) {
1734 Address test_instruction_address =
1745 Handle<Code> rewritten;
1746 State previous_state = GetState();
1748 State state = TargetState(previous_state, HasInlinedSmiCode(
address()), x, y);
1751 rewritten = stub.GetCode();
1753 ICCompareStub stub(op_, state);
1755 stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
1757 rewritten = stub.GetCode();
1762 if (FLAG_trace_ic) {
1763 PrintF(
"[CompareIC (%s->%s)#%s]\n",
1778 Address test_instruction_address =
1788 Address delta_address = test_instruction_address + 1;
1791 int8_t delta = *
reinterpret_cast<int8_t*
>(delta_address);
1792 if (FLAG_trace_ic) {
1793 PrintF(
"[ patching ic at %p, test=%p, delta=%d\n",
1794 address, test_instruction_address, delta);
1800 Address jmp_address = test_instruction_address - delta;
1815 #endif // V8_TARGET_ARCH_X64
static void GenerateTransitionElementsDoubleToObject(MacroAssembler *masm)
static const int kBitFieldOffset
static void GenerateRuntimeGetProperty(MacroAssembler *masm)
static const byte kJccShortPrefix
static const int kElementsStartIndex
static void GenerateNormal(MacroAssembler *masm, int argc)
static const int kMapHashShift
void PrintF(const char *format,...)
static void GenerateGeneric(MacroAssembler *masm, StrictModeFlag strict_mode)
static const char * Name(Value tok)
static Smi * FromInt(int value)
static void GenerateMiss(MacroAssembler *masm, int argc, IC::UtilityId id, Code::ExtraICState extra_state)
static const int kGlobalReceiverOffset
static void GenerateGlobalProxy(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateMegamorphic(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateStringLength(MacroAssembler *masm, bool support_wrappers)
static const int kHasNamedInterceptor
static const int kIsAccessCheckNeeded
static void GenerateMegamorphic(MacroAssembler *masm)
#define ASSERT(condition)
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static const ExtraICState kNoExtraICState
static const byte kTestAlByte
void PatchInlinedSmiCode(Address address, InlinedSmiCheck check)
static void GenerateArrayLength(MacroAssembler *masm)
static const int kHashFieldOffset
static const int kHasIndexedInterceptor
static const byte kJcShortOpcode
void UpdateCaches(Handle< Object > x, Handle< Object > y)
static Condition ComputeCondition(Token::Value op)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
const uint32_t kIsSymbolMask
static void GenerateGeneric(MacroAssembler *masm)
static const byte kNopByte
Operand FieldOperand(Register object, int offset)
static const byte kJzShortOpcode
static void GenerateMiss(MacroAssembler *masm)
static bool decode(uint32_t value)
static const int kPropertiesOffset
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
static const int kInObjectPropertiesOffset
static void GenerateSmiToDouble(MacroAssembler *masm, Label *fail)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
static const int kElementsOffset
static const int kContainsCachedArrayIndexMask
static const int kCallTargetAddressOffset
STATIC_ASSERT(kGrowICDelta==STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT-STORE_TRANSITION_SMI_TO_OBJECT)
static void GenerateMegamorphic(MacroAssembler *masm, int argc)
static const int kLengthOffset
static const int kHeaderSize
static void GenerateSlow(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, int argc, Code::ExtraICState extra_state)
static const int kMapOffset
static void GenerateRuntimeSetProperty(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateDoubleToObject(MacroAssembler *masm, Label *fail)
static const byte kJncShortOpcode
static const int kLengthOffset
static const int kSlowCaseBitFieldMask
KeyedStoreIncrementLength
static void GenerateNormal(MacroAssembler *masm, int argc)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
const Register kScratchRegister
static void GenerateMonomorphicCacheProbe(MacroAssembler *masm, int argc, Code::Kind kind, Code::ExtraICState extra_state)
static void GenerateString(MacroAssembler *masm)
Condition NegateCondition(Condition cond)
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMegamorphic(MacroAssembler *masm, int argc, Code::ExtraICState extra_ic_state)
static void GenerateNormal(MacroAssembler *masm)
static void GenerateFunctionPrototype(MacroAssembler *masm)
static void GenerateIndexedInterceptor(MacroAssembler *masm)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
void set_target(Code *code)
static const byte kJnzShortOpcode
const uint32_t kSymbolTag
static const int kCapacityMask
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, bool force_generic)
static void GenerateMiss(MacroAssembler *masm)
static void GenerateTransitionElementsSmiToDouble(MacroAssembler *masm)
static const int kHashShift
static void GenerateNonStrictArguments(MacroAssembler *masm, int argc)
static void GenerateArrayLength(MacroAssembler *masm)
static const char * GetStateName(State state)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
void check(i::Vector< const char > string)
static const int kHashMask
static void GenerateMiss(MacroAssembler *masm, int argc)
static const int kInstanceTypeOffset
static const int kEntriesPerBucket
static void GenerateNormal(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, bool force_generic)