28 #ifndef V8_X64_REGEXP_MACRO_ASSEMBLER_X64_H_
29 #define V8_X64_REGEXP_MACRO_ASSEMBLER_X64_H_
40 #ifndef V8_INTERPRETED_REGEXP
50 virtual void Bind(Label* label);
78 Label* on_not_in_range);
83 virtual void CheckPosition(
int cp_offset, Label* on_outside_input);
88 virtual void GoTo(Label* label);
89 virtual void IfRegisterGE(
int reg,
int comparand, Label* if_ge);
90 virtual void IfRegisterLT(
int reg,
int comparand, Label* if_lt);
94 Label* on_end_of_input,
95 bool check_bounds =
true,
106 virtual void SetRegister(
int register_index,
int to);
115 int offsets_vector_length,
122 const byte* input_start,
123 const byte* input_end,
136 static const int kFramePointer = 0;
138 static const int kReturn_eip = kFramePointer +
kPointerSize;
139 static const int kFrameAlign = kReturn_eip +
kPointerSize;
146 static const int kInputString = kFrameAlign;
148 static const int kStartIndex = kInputString +
kPointerSize;
149 static const int kInputStart = kStartIndex +
kPointerSize;
150 static const int kInputEnd = kInputStart +
kPointerSize;
151 static const int kRegisterOutput = kInputEnd +
kPointerSize;
156 static const int kNumOutputRegisters = kRegisterOutput +
kPointerSize;
157 static const int kStackHighEnd = kNumOutputRegisters +
kPointerSize;
159 static const int kDirectCall = kStackHighEnd +
kPointerSize;
165 static const int kInputString = kFramePointer -
kPointerSize;
166 static const int kStartIndex = kInputString -
kPointerSize;
167 static const int kInputStart = kStartIndex -
kPointerSize;
168 static const int kInputEnd = kInputStart -
kPointerSize;
169 static const int kRegisterOutput = kInputEnd -
kPointerSize;
173 static const int kNumOutputRegisters = kRegisterOutput -
kPointerSize;
174 static const int kStackHighEnd = kFrameAlign;
175 static const int kDirectCall = kStackHighEnd +
kPointerSize;
182 static const int kBackup_rsi = kFramePointer -
kPointerSize;
183 static const int kBackup_rdi = kBackup_rsi -
kPointerSize;
184 static const int kBackup_rbx = kBackup_rdi -
kPointerSize;
185 static const int kLastCalleeSaveRegister = kBackup_rbx;
190 static const int kBackup_rbx = kNumOutputRegisters -
kPointerSize;
191 static const int kLastCalleeSaveRegister = kBackup_rbx;
194 static const int kSuccessfulCaptures = kLastCalleeSaveRegister -
kPointerSize;
197 static const int kInputStartMinusOne = kSuccessfulCaptures -
kPointerSize;
200 static const int kRegisterZero = kInputStartMinusOne -
kPointerSize;
203 static const size_t kRegExpCodeSize = 1024;
207 void LoadCurrentCharacterUnchecked(
int cp_offset,
int character_count);
210 void CheckPreemption();
213 void CheckStackLimit();
216 void CallCheckStackGuardState();
219 Operand register_location(
int register_index);
222 inline Register current_character() {
return rdx; }
226 inline Register backtrack_stackpointer() {
return rcx; }
229 inline Register code_object_pointer() {
return r8; }
232 inline int char_size() {
return static_cast<int>(mode_); }
236 void BranchOrBacktrack(
Condition condition, Label* to);
238 void MarkPositionForCodeRelativeFixup() {
242 void FixupCodeRelativePositions();
246 inline void SafeCall(Label* to);
247 inline void SafeCallTarget(Label* label);
248 inline void SafeReturn();
256 inline void Push(Immediate value);
261 inline void Push(Label* label);
274 MacroAssembler::NoRootArrayScope no_root_array_scope_;
286 int num_saved_registers_;
291 Label success_label_;
292 Label backtrack_label_;
294 Label check_preempt_label_;
295 Label stack_overflow_label_;
298 #endif // V8_INTERPRETED_REGEXP
302 #endif // V8_X64_REGEXP_MACRO_ASSEMBLER_X64_H_
virtual void CheckAtStart(Label *on_at_start)
static Result Execute(Code *code, String *input, int start_offset, const byte *input_start, const byte *input_end, int *output, bool at_start)
Isolate * isolate() const
virtual void IfRegisterLT(int reg, int comparand, Label *if_lt)
virtual void ReadCurrentPositionFromRegister(int reg)
virtual void SetCurrentPositionFromEnd(int by)
virtual void IfRegisterGE(int reg, int comparand, Label *if_ge)
virtual bool CheckSpecialCharacterClass(uc16 type, Label *on_no_match)
virtual void LoadCurrentCharacter(int cp_offset, Label *on_end_of_input, bool check_bounds=true, int characters=1)
virtual void CheckNotCharacterAfterMinusAnd(uc16 c, uc16 minus, uc16 mask, Label *on_not_equal)
virtual void Bind(Label *label)
virtual void CheckGreedyLoop(Label *on_tos_equals_current_position)
virtual void PushCurrentPosition()
virtual Handle< HeapObject > GetCode(Handle< String > source)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
virtual void ReadStackPointerFromRegister(int reg)
RegExpMacroAssemblerX64(Mode mode, int registers_to_save, Zone *zone)
virtual void PopRegister(int register_index)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
virtual void CheckNotCharacter(uint32_t c, Label *on_not_equal)
virtual IrregexpImplementation Implementation()
virtual void CheckCharacterAfterAnd(uint32_t c, uint32_t mask, Label *on_equal)
virtual void CheckNotBackReference(int start_reg, Label *on_no_match)
virtual void CheckNotAtStart(Label *on_not_at_start)
virtual void CheckCharacter(uint32_t c, Label *on_equal)
virtual void WriteStackPointerToRegister(int reg)
virtual void GoTo(Label *label)
virtual void SetRegister(int register_index, int to)
virtual void WriteCurrentPositionToRegister(int reg, int cp_offset)
static Result Match(Handle< Code > regexp, Handle< String > subject, int *offsets_vector, int offsets_vector_length, int previous_index, Isolate *isolate)
virtual void CheckCharacterInRange(uc16 from, uc16 to, Label *on_in_range)
virtual void CheckNotBackReferenceIgnoreCase(int start_reg, Label *on_no_match)
virtual void AdvanceCurrentPosition(int by)
virtual void PushRegister(int register_index, StackCheckFlag check_stack_limit)
virtual void PopCurrentPosition()
virtual void CheckCharacterNotInRange(uc16 from, uc16 to, Label *on_not_in_range)
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
virtual void IfRegisterEqPos(int reg, Label *if_eq)
virtual void AdvanceRegister(int reg, int by)
virtual int stack_limit_slack()
virtual void CheckBitInTable(Handle< ByteArray > table, Label *on_bit_set)
virtual void ClearRegisters(int reg_from, int reg_to)
virtual void PushBacktrack(Label *label)
static int CheckStackGuardState(Address *return_address, Code *re_code, Address re_frame)
virtual void CheckPosition(int cp_offset, Label *on_outside_input)
virtual void CheckCharacterGT(uc16 limit, Label *on_greater)
virtual ~RegExpMacroAssemblerX64()
virtual void CheckCharacterLT(uc16 limit, Label *on_less)
virtual void CheckNotCharacterAfterAnd(uint32_t c, uint32_t mask, Label *on_not_equal)