39 : slow_safe_compiler_(
false),
40 global_mode_(NOT_GLOBAL),
50 #ifdef V8_HOST_CAN_READ_UNALIGNED
58 #ifndef V8_INTERPRETED_REGEXP // Avoid unused code, e.g., on ARM.
70 return FLAG_enable_unaligned_accesses && !
slow_safe();
77 ASSERT(subject->IsExternalString() || subject->IsSeqString());
79 ASSERT(start_index <= subject->length());
82 if (StringShape(subject).IsExternal()) {
84 address =
reinterpret_cast<const byte*
>(data);
86 ASSERT(subject->IsSeqAsciiString());
88 address =
reinterpret_cast<const byte*
>(data);
90 return address + start_index;
93 if (StringShape(subject).IsExternal()) {
96 ASSERT(subject->IsSeqTwoByteString());
99 return reinterpret_cast<const byte*
>(data + start_index);
107 int offsets_vector_length,
111 ASSERT(subject->IsFlat());
112 ASSERT(previous_index >= 0);
113 ASSERT(previous_index <= subject->length());
119 String* subject_ptr = *subject;
121 int start_offset = previous_index;
122 int char_length = subject_ptr->
length() - start_offset;
123 int slice_offset = 0;
127 if (StringShape(subject_ptr).IsCons()) {
130 }
else if (StringShape(subject_ptr).IsSliced()) {
132 subject_ptr = slice->
parent();
133 slice_offset = slice->
offset();
137 ASSERT(subject_ptr->IsExternalString() || subject_ptr->IsSeqString());
139 int char_size_shift = is_ascii ? 0 : 1;
141 const byte* input_start =
143 int byte_length = char_length << char_size_shift;
144 const byte* input_end = input_start + byte_length;
151 offsets_vector_length,
161 const byte* input_start,
162 const byte* input_end,
166 ASSERT(isolate == Isolate::Current());
189 return static_cast<Result>(result);
194 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u,
195 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u,
196 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u,
197 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u,
199 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u,
200 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u,
201 0xffu, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu,
202 0xffu, 0xffu, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u,
204 0x00u, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu,
205 0xffu, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu,
206 0xffu, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu,
207 0xffu, 0xffu, 0xffu, 0x00u, 0x00u, 0x00u, 0x00u, 0xffu,
209 0x00u, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu,
210 0xffu, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu,
211 0xffu, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu, 0xffu,
212 0xffu, 0xffu, 0xffu, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u,
221 ASSERT(isolate == Isolate::Current());
227 ASSERT(byte_length % 2 == 0);
228 uc16* substring1 =
reinterpret_cast<uc16*
>(byte_offset1);
229 uc16* substring2 =
reinterpret_cast<uc16*
>(byte_offset2);
230 size_t length = byte_length >> 1;
232 for (
size_t i = 0; i < length; i++) {
237 canonicalize->
get(c1,
'\0', s1);
240 canonicalize->
get(c2,
'\0', s2);
241 if (s1[0] != s2[0]) {
254 ASSERT(isolate == Isolate::Current());
258 ASSERT(old_stack_base == *stack_base);
259 ASSERT(stack_pointer <= old_stack_base);
260 ASSERT(static_cast<size_t>(old_stack_base - stack_pointer) <= size);
262 if (new_stack_base ==
NULL) {
265 *stack_base = new_stack_base;
266 intptr_t stack_content_size = old_stack_base - stack_pointer;
267 return new_stack_base - stack_content_size;
270 #endif // V8_INTERPRETED_REGEXP
Failure * StackOverflow()
virtual bool CanReadUnaligned()
#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7, p8)
virtual ~NativeRegExpMacroAssembler()
Address EnsureCapacity(size_t size)
static Result Execute(Code *code, String *input, int start_offset, const byte *input_start, const byte *input_end, int *output, int output_size, Isolate *isolate)
bool IsAsciiRepresentation()
static ExternalTwoByteString * cast(Object *obj)
virtual ~RegExpMacroAssembler()
RegExpStack * regexp_stack()
#define ASSERT(condition)
RegExpMacroAssembler(Zone *zone)
RegExpStack * stack() const
static ExternalAsciiString * cast(Object *obj)
int get(uchar c, uchar n, uchar *result)
virtual bool CanReadUnaligned()
static SlicedString * cast(Object *obj)
unibrow::Mapping< unibrow::Ecma262Canonicalize > * regexp_macro_assembler_canonicalize()
static int CaseInsensitiveCompareUC16(Address byte_offset1, Address byte_offset2, size_t byte_length, Isolate *isolate)
static SeqAsciiString * cast(Object *obj)
static const byte word_character_map[128]
bool has_pending_exception()
activate correct semantics for inheriting readonliness false
static SeqTwoByteString * cast(Object *obj)
static const byte * StringCharacterPosition(String *subject, int start_index)
static Result Match(Handle< Code > regexp, Handle< String > subject, int *offsets_vector, int offsets_vector_length, int previous_index, Isolate *isolate)
#define ASSERT_EQ(v1, v2)
static Address GrowStack(Address stack_pointer, Address *stack_top, Isolate *isolate)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
NativeRegExpMacroAssembler(Zone *zone)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static ConsString * cast(Object *obj)
const uint16_t * GetChars()