28 #ifndef V8_X64_CODE_STUBS_X64_H_
29 #define V8_X64_CODE_STUBS_X64_H_
40 class TranscendentalCacheStub:
public CodeStub {
49 : type_(type), argument_type_(argument_type) {}
58 int MinorKey() {
return type_ | argument_type_; }
63 class StoreBufferOverflowStub:
public CodeStub {
66 : save_doubles_(save_fp) { }
77 Major MajorKey() {
return StoreBufferOverflow; }
78 int MinorKey() {
return (save_doubles_ ==
kSaveFPRegs) ? 1 : 0; }
89 class UnaryOpStub:
public CodeStub {
96 operand_type_(operand_type) {
108 class ModeBits:
public BitField<UnaryOverwriteMode, 0, 1> {};
109 class OpBits:
public BitField<Token::Value, 1, 7> {};
110 class OperandTypeInfoBits:
public BitField<UnaryOpIC::TypeInfo, 8, 3> {};
112 Major MajorKey() {
return UnaryOp; }
114 return ModeBits::encode(mode_)
115 | OpBits::encode(op_)
116 | OperandTypeInfoBits::encode(operand_type_);
121 void Generate(MacroAssembler* masm);
123 void GenerateTypeTransition(MacroAssembler* masm);
125 void GenerateSmiStub(MacroAssembler* masm);
126 void GenerateSmiStubSub(MacroAssembler* masm);
127 void GenerateSmiStubBitNot(MacroAssembler* masm);
128 void GenerateSmiCodeSub(MacroAssembler* masm,
131 Label::Distance non_smi_near = Label::kFar,
132 Label::Distance slow_near = Label::kFar);
133 void GenerateSmiCodeBitNot(MacroAssembler* masm,
135 Label::Distance non_smi_near);
137 void GenerateHeapNumberStub(MacroAssembler* masm);
138 void GenerateHeapNumberStubSub(MacroAssembler* masm);
139 void GenerateHeapNumberStubBitNot(MacroAssembler* masm);
140 void GenerateHeapNumberCodeSub(MacroAssembler* masm, Label* slow);
141 void GenerateHeapNumberCodeBitNot(MacroAssembler* masm, Label* slow);
143 void GenerateGenericStub(MacroAssembler* masm);
144 void GenerateGenericStubSub(MacroAssembler* masm);
145 void GenerateGenericStubBitNot(MacroAssembler* masm);
146 void GenerateGenericCodeFallback(MacroAssembler* masm);
148 virtual int GetCodeKind() {
return Code::UNARY_OP_IC; }
154 virtual void FinishCode(Handle<Code>
code) {
155 code->set_unary_op_type(operand_type_);
160 class BinaryOpStub:
public CodeStub {
174 : op_(OpBits::decode(key)),
175 mode_(ModeBits::decode(key)),
176 operands_type_(operands_type),
177 result_type_(result_type) { }
180 enum SmiCodeGenerateHeapNumberResults {
181 ALLOW_HEAPNUMBER_RESULTS,
182 NO_HEAPNUMBER_RESULTS
192 virtual void PrintName(StringStream* stream);
195 class ModeBits:
public BitField<OverwriteMode, 0, 2> {};
196 class OpBits:
public BitField<Token::Value, 2, 7> {};
197 class OperandTypeInfoBits:
public BitField<BinaryOpIC::TypeInfo, 9, 3> {};
198 class ResultTypeInfoBits:
public BitField<BinaryOpIC::TypeInfo, 12, 3> {};
200 Major MajorKey() {
return BinaryOp; }
202 return OpBits::encode(op_)
203 | ModeBits::encode(mode_)
204 | OperandTypeInfoBits::encode(operands_type_)
205 | ResultTypeInfoBits::encode(result_type_);
208 void Generate(MacroAssembler* masm);
209 void GenerateGeneric(MacroAssembler* masm);
210 void GenerateSmiCode(MacroAssembler* masm,
212 SmiCodeGenerateHeapNumberResults heapnumber_results);
213 void GenerateFloatingPointCode(MacroAssembler* masm,
214 Label* allocation_failure,
215 Label* non_numeric_failure);
216 void GenerateStringAddCode(MacroAssembler* masm);
217 void GenerateCallRuntimeCode(MacroAssembler* masm);
218 void GenerateLoadArguments(MacroAssembler* masm);
219 void GenerateReturn(MacroAssembler* masm);
220 void GenerateUninitializedStub(MacroAssembler* masm);
221 void GenerateSmiStub(MacroAssembler* masm);
222 void GenerateInt32Stub(MacroAssembler* masm);
223 void GenerateHeapNumberStub(MacroAssembler* masm);
224 void GenerateOddballStub(MacroAssembler* masm);
225 void GenerateStringStub(MacroAssembler* masm);
226 void GenerateBothStringStub(MacroAssembler* masm);
227 void GenerateGenericStub(MacroAssembler* masm);
229 void GenerateHeapResultAllocation(MacroAssembler* masm, Label* alloc_failure);
230 void GenerateRegisterArgsPush(MacroAssembler* masm);
231 void GenerateTypeTransition(MacroAssembler* masm);
232 void GenerateTypeTransitionWithSavedArgs(MacroAssembler* masm);
234 virtual int GetCodeKind() {
return Code::BINARY_OP_IC; }
240 virtual void FinishCode(Handle<Code>
code) {
241 code->set_binary_op_type(operands_type_);
242 code->set_binary_op_result_type(result_type_);
249 class StringHelper :
public AllStatic {
298 DISALLOW_IMPLICIT_CONSTRUCTORS(StringHelper);
315 class StringAddStub:
public CodeStub {
320 Major MajorKey() {
return StringAdd; }
321 int MinorKey() {
return flags_; }
323 void Generate(MacroAssembler* masm);
325 void GenerateConvertArgument(MacroAssembler* masm,
337 class SubStringStub:
public CodeStub {
343 int MinorKey() {
return 0; }
345 void Generate(MacroAssembler* masm);
349 class StringCompareStub:
public CodeStub {
371 virtual Major MajorKey() {
return StringCompare; }
372 virtual int MinorKey() {
return 0; }
373 virtual void Generate(MacroAssembler* masm);
375 static void GenerateAsciiCharsCompareLoop(
376 MacroAssembler* masm,
381 Label* chars_not_equal,
382 Label::Distance near_jump = Label::kFar);
386 class NumberToStringStub:
public CodeStub {
408 Major MajorKey() {
return NumberToString; }
409 int MinorKey() {
return 0; }
411 void Generate(MacroAssembler* masm);
415 class StringDictionaryLookupStub:
public CodeStub {
423 : dictionary_(dictionary), result_(result), index_(index), mode_(mode) { }
445 static const int kInlinedProbes = 4;
446 static const int kTotalProbes = 20;
448 static const int kCapacityOffset =
452 static const int kElementsStartOffset =
456 Major MajorKey() {
return StringDictionaryLookup; }
459 return DictionaryBits::encode(dictionary_.
code()) |
460 ResultBits::encode(result_.
code()) |
461 IndexBits::encode(index_.
code()) |
462 LookupModeBits::encode(mode_);
465 class DictionaryBits:
public BitField<int, 0, 4> {};
466 class ResultBits:
public BitField<int, 4, 4> {};
467 class IndexBits:
public BitField<int, 8, 4> {};
468 class LookupModeBits:
public BitField<LookupMode, 12, 1> {};
470 Register dictionary_;
477 class RecordWriteStub:
public CodeStub {
487 remembered_set_action_(remembered_set_action),
488 save_fp_regs_mode_(fp_mode),
555 class RegisterAllocation {
560 : object_orig_(object),
561 address_orig_(address),
562 scratch0_orig_(scratch0),
565 scratch0_(scratch0) {
567 scratch1_ = GetRegThatIsNotRcxOr(object_, address_, scratch0_);
568 if (scratch0.
is(
rcx)) {
569 scratch0_ = GetRegThatIsNotRcxOr(object_, address_, scratch1_);
571 if (
object.is(
rcx)) {
572 object_ = GetRegThatIsNotRcxOr(address_, scratch0_, scratch1_);
574 if (address.
is(
rcx)) {
575 address_ = GetRegThatIsNotRcxOr(object_, scratch0_, scratch1_);
580 void Save(MacroAssembler* masm) {
581 ASSERT(!address_orig_.is(object_));
582 ASSERT(object_.is(object_orig_) || address_.is(address_orig_));
589 if (!scratch0_.is(scratch0_orig_)) masm->push(scratch0_);
590 if (!
rcx.
is(scratch0_orig_) &&
591 !
rcx.
is(object_orig_) &&
592 !
rcx.
is(address_orig_)) {
595 masm->push(scratch1_);
596 if (!address_.is(address_orig_)) {
597 masm->push(address_);
598 masm->movq(address_, address_orig_);
600 if (!object_.is(object_orig_)) {
602 masm->movq(object_, object_orig_);
606 void Restore(MacroAssembler* masm) {
610 if (!object_.is(object_orig_)) {
611 masm->movq(object_orig_, object_);
614 if (!address_.is(address_orig_)) {
615 masm->movq(address_orig_, address_);
618 masm->pop(scratch1_);
619 if (!
rcx.
is(scratch0_orig_) &&
620 !
rcx.
is(object_orig_) &&
621 !
rcx.
is(address_orig_)) {
624 if (!scratch0_.is(scratch0_orig_)) masm->pop(scratch0_);
633 void SaveCallerSaveRegisters(MacroAssembler* masm,
SaveFPRegsMode mode) {
634 masm->PushCallerSaved(mode, scratch0_, scratch1_,
rcx);
637 inline void RestoreCallerSaveRegisters(MacroAssembler*masm,
639 masm->PopCallerSaved(mode, scratch0_, scratch1_,
rcx);
642 inline Register object() {
return object_; }
643 inline Register address() {
return address_; }
644 inline Register scratch0() {
return scratch0_; }
645 inline Register scratch1() {
return scratch1_; }
648 Register object_orig_;
649 Register address_orig_;
650 Register scratch0_orig_;
657 Register GetRegThatIsNotRcxOr(Register
r1,
662 if (candidate.is(
rcx))
continue;
663 if (candidate.is(r1))
continue;
664 if (candidate.is(r2))
continue;
665 if (candidate.is(r3))
continue;
674 enum OnNoNeedToInformIncrementalMarker {
675 kReturnOnNoNeedToInformIncrementalMarker,
676 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker
679 void Generate(MacroAssembler* masm);
680 void GenerateIncremental(MacroAssembler* masm,
Mode mode);
681 void CheckNeedsToInformIncrementalMarker(
682 MacroAssembler* masm,
683 OnNoNeedToInformIncrementalMarker on_no_need,
685 void InformIncrementalMarker(MacroAssembler* masm,
Mode mode);
687 Major MajorKey() {
return RecordWrite; }
690 return ObjectBits::encode(object_.
code()) |
691 ValueBits::encode(value_.
code()) |
692 AddressBits::encode(address_.
code()) |
693 RememberedSetActionBits::encode(remembered_set_action_) |
694 SaveFPRegsModeBits::encode(save_fp_regs_mode_);
697 void Activate(Code* code) {
698 code->GetHeap()->incremental_marking()->ActivateGeneratedStub(code);
701 class ObjectBits:
public BitField<int, 0, 4> {};
702 class ValueBits:
public BitField<int, 4, 4> {};
703 class AddressBits:
public BitField<int, 8, 4> {};
704 class RememberedSetActionBits:
public BitField<RememberedSetAction, 12, 1> {};
705 class SaveFPRegsModeBits:
public BitField<SaveFPRegsMode, 13, 1> {};
713 RegisterAllocation regs_;
719 #endif // V8_X64_CODE_STUBS_X64_H_
static Mode GetMode(Code *stub)
static void Patch(Code *stub, Mode mode)
static const int kElementsStartIndex
RecordWriteStub(Register object, Register value, Register address, RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode)
StringDictionaryLookupStub(Register dictionary, Register result, Register index, LookupMode mode)
friend class CodeGenerator
virtual bool SometimesSetsUpAFrame()
static void GenerateFixedRegStubsAheadOfTime()
static void GenerateCopyCharacters(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, bool ascii)
UnaryOpStub(Token::Value op, UnaryOverwriteMode mode, UnaryOpIC::TypeInfo operand_type=UnaryOpIC::UNINITIALIZED)
static const byte kTwoByteNopInstruction
void Generate(MacroAssembler *masm)
static void GenerateHashGetHash(MacroAssembler *masm, Register hash)
virtual bool SometimesSetsUpAFrame()
bool AreAliased(Register r1, Register r2, Register r3, Register r4)
#define ASSERT(condition)
static void GenerateCompareFlatAsciiStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
static void GenerateOperation(MacroAssembler *masm, TranscendentalCache::Type type)
BinaryOpStub(Token::Value op, OverwriteMode mode)
static State ToState(TypeInfo type_info)
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< String > name, Register scratch0)
Handle< String > SubString(Handle< String > str, int start, int end, PretenureFlag pretenure)
static void GenerateCopyCharactersREP(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, bool ascii)
static void GenerateFlatAsciiStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
byte * instruction_start()
virtual bool IsPregenerated()
static Register FromAllocationIndex(int index)
static const byte kTwoByteJumpInstruction
static const byte kFiveByteNopInstruction
static State ToState(TypeInfo type_info)
static void GenerateHashAddCharacter(MacroAssembler *masm, Register hash, Register character)
static const int kHeaderSize
bool is(Register reg) const
static void GenerateFixedRegStubsAheadOfTime()
void Generate(MacroAssembler *masm)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
BinaryOpStub(int key, BinaryOpIC::TypeInfo operands_type, BinaryOpIC::TypeInfo result_type=BinaryOpIC::UNINITIALIZED)
static const int kCapacityIndex
static const int kNumAllocatableRegisters
TranscendentalCacheStub(TranscendentalCache::Type type, ArgumentType argument_type)
static void GenerateLookupNumberStringCache(MacroAssembler *masm, Register object, Register result, Register scratch1, Register scratch2, Register scratch3, bool object_is_smi, Label *not_found)
static void GenerateTwoCharacterSymbolTableProbe(MacroAssembler *masm, Register c1, Register c2, Register scratch1, Register scratch2, Register scratch3, Register scratch4, Register scratch5, Label *not_found)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static void FlushICache(void *start, size_t size)
static const byte kFiveByteJumpInstruction
static const int kTranscendentalTypeBits
virtual bool SometimesSetsUpAFrame()
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
StringAddStub(StringAddFlags flags)
StoreBufferOverflowStub(SaveFPRegsMode save_fp)
virtual bool IsPregenerated()
static void GenerateHashInit(MacroAssembler *masm, Register hash, Register character)
void Generate(MacroAssembler *masm)