30 #if V8_TARGET_ARCH_ARM64
44 #ifndef V8_INTERPRETED_REGEXP
134 #define __ ACCESS_MASM(masm_)
138 int registers_to_save,
140 : NativeRegExpMacroAssembler(zone),
141 masm_(new MacroAssembler(zone->isolate(),
NULL, kRegExpCodeSize)),
143 num_registers_(registers_to_save),
144 num_saved_registers_(registers_to_save),
150 __ SetStackPointer(csp);
156 __ Bind(&start_label_);
160 RegExpMacroAssemblerARM64::~RegExpMacroAssemblerARM64() {
163 entry_label_.Unuse();
164 start_label_.Unuse();
165 success_label_.Unuse();
166 backtrack_label_.Unuse();
168 check_preempt_label_.Unuse();
169 stack_overflow_label_.Unuse();
172 int RegExpMacroAssemblerARM64::stack_limit_slack() {
173 return RegExpStack::kStackLimitSlack;
177 void RegExpMacroAssemblerARM64::AdvanceCurrentPosition(
int by) {
179 __ Add(current_input_offset(),
180 current_input_offset(), by * char_size());
185 void RegExpMacroAssemblerARM64::AdvanceRegister(
int reg,
int by) {
186 ASSERT((reg >= 0) && (reg < num_registers_));
189 RegisterState register_state = GetRegisterState(reg);
190 switch (register_state) {
192 __ Ldr(w10, register_location(reg));
193 __ Add(w10, w10, by);
194 __ Str(w10, register_location(reg));
197 to_advance = GetCachedRegister(reg);
198 __ Add(to_advance, to_advance, by);
201 to_advance = GetCachedRegister(reg);
202 __ Add(to_advance, to_advance,
213 void RegExpMacroAssemblerARM64::Backtrack() {
216 __ Add(x10, code_pointer(), Operand(w10,
UXTW));
221 void RegExpMacroAssemblerARM64::Bind(Label* label) {
226 void RegExpMacroAssemblerARM64::CheckCharacter(uint32_t c, Label* on_equal) {
227 CompareAndBranchOrBacktrack(current_character(), c,
eq, on_equal);
231 void RegExpMacroAssemblerARM64::CheckCharacterGT(
uc16 limit,
233 CompareAndBranchOrBacktrack(current_character(), limit,
hi, on_greater);
237 void RegExpMacroAssemblerARM64::CheckAtStart(Label* on_at_start) {
240 CompareAndBranchOrBacktrack(start_offset(), 0,
ne, ¬_at_start);
242 __ Add(x10, input_end(), Operand(current_input_offset(),
SXTW));
243 __ Cmp(x10, input_start());
244 BranchOrBacktrack(
eq, on_at_start);
245 __ Bind(¬_at_start);
249 void RegExpMacroAssemblerARM64::CheckNotAtStart(Label* on_not_at_start) {
251 CompareAndBranchOrBacktrack(start_offset(), 0,
ne, on_not_at_start);
253 __ Add(x10, input_end(), Operand(current_input_offset(),
SXTW));
254 __ Cmp(x10, input_start());
255 BranchOrBacktrack(
ne, on_not_at_start);
259 void RegExpMacroAssemblerARM64::CheckCharacterLT(
uc16 limit, Label* on_less) {
260 CompareAndBranchOrBacktrack(current_character(), limit,
lo, on_less);
264 void RegExpMacroAssemblerARM64::CheckCharacters(Vector<const uc16> str,
267 bool check_end_of_string) {
270 if (check_end_of_string) {
272 CheckPosition(cp_offset + str.length() - 1, on_failure);
275 Register characters_address = x11;
277 __ Add(characters_address,
279 Operand(current_input_offset(),
SXTW));
280 if (cp_offset != 0) {
281 __ Add(characters_address, characters_address, cp_offset * char_size());
284 for (
int i = 0; i < str.length(); i++) {
285 if (mode_ == ASCII) {
287 ASSERT(str[i] <= String::kMaxOneByteCharCode);
291 CompareAndBranchOrBacktrack(w10, str[i],
ne, on_failure);
296 void RegExpMacroAssemblerARM64::CheckGreedyLoop(Label* on_equal) {
298 __ Cmp(current_input_offset(), w10);
300 __ Add(backtrack_stackpointer(),
302 BranchOrBacktrack(
eq, on_equal);
305 void RegExpMacroAssemblerARM64::CheckNotBackReferenceIgnoreCase(
307 Label* on_no_match) {
310 Register capture_start_offset = w10;
313 Register capture_length = w19;
317 ASSERT((start_reg % 2) == 0);
318 if (start_reg < kNumCachedRegisters) {
319 __ Mov(capture_start_offset.X(), GetCachedRegister(start_reg));
322 __ Ldp(w11, capture_start_offset, capture_location(start_reg, x10));
324 __ Sub(capture_length, w11, capture_start_offset);
326 __ Cbz(capture_length, &fallthrough);
329 __ Cmn(capture_length, current_input_offset());
330 BranchOrBacktrack(
gt, on_no_match);
332 if (mode_ == ASCII) {
337 Register capture_start_address = x12;
338 Register capture_end_addresss = x13;
339 Register current_position_address = x14;
341 __ Add(capture_start_address,
343 Operand(capture_start_offset,
SXTW));
344 __ Add(capture_end_addresss,
345 capture_start_address,
346 Operand(capture_length,
SXTW));
347 __ Add(current_position_address,
349 Operand(current_input_offset(),
SXTW));
356 __ B(
eq, &loop_check);
359 __ Orr(w10, w10, 0x20);
360 __ Orr(w11, w11, 0x20);
363 __ Sub(w10, w10,
'a');
364 __ Cmp(w10,
'z' -
'a');
365 __ B(
ls, &loop_check);
367 __ Sub(w10, w10, 224 -
'a');
368 __ Cmp(w10, 254 - 224);
372 __ Bind(&loop_check);
373 __ Cmp(capture_start_address, capture_end_addresss);
378 BranchOrBacktrack(
al, on_no_match);
382 __ Sub(current_input_offset().
X(), current_position_address, input_end());
383 if (masm_->emit_debug_code()) {
384 __ Cmp(current_input_offset().
X(), Operand(current_input_offset(),
SXTW));
385 __ Ccmp(current_input_offset(), 0,
NoFlag,
eq);
387 __ Check(
le, kOffsetOutOfRange);
391 int argument_count = 4;
394 CPURegList cached_registers(CPURegister::kRegister,
kXRegSizeInBits, 0, 7);
395 ASSERT((cached_registers.Count() * 2) == kNumCachedRegisters);
396 __ PushCPURegList(cached_registers);
406 __ Add(x0, input_end(), Operand(capture_start_offset,
SXTW));
408 __ Mov(w2, capture_length);
410 __ Add(x1, input_end(), Operand(current_input_offset(),
SXTW));
412 __ Mov(x3, ExternalReference::isolate_address(isolate()));
415 AllowExternalCallThatCantCauseGC scope(masm_);
416 ExternalReference
function =
417 ExternalReference::re_case_insensitive_compare_uc16(isolate());
418 __ CallCFunction(
function, argument_count);
422 CompareAndBranchOrBacktrack(x0, 0,
eq, on_no_match);
424 __ Add(current_input_offset(), current_input_offset(), capture_length);
426 __ PopCPURegList(cached_registers);
429 __ Bind(&fallthrough);
432 void RegExpMacroAssemblerARM64::CheckNotBackReference(
434 Label* on_no_match) {
437 Register capture_start_address = x12;
438 Register capture_end_address = x13;
439 Register current_position_address = x14;
440 Register capture_length = w15;
443 ASSERT((start_reg % 2) == 0);
444 if (start_reg < kNumCachedRegisters) {
445 __ Mov(x10, GetCachedRegister(start_reg));
448 __ Ldp(w11, w10, capture_location(start_reg, x10));
450 __ Sub(capture_length, w11, w10);
452 __ Cbz(capture_length, &fallthrough);
455 __ Cmn(capture_length, current_input_offset());
456 BranchOrBacktrack(
gt, on_no_match);
459 __ Add(capture_start_address, input_end(), Operand(w10,
SXTW));
460 __ Add(capture_end_address,
461 capture_start_address,
462 Operand(capture_length,
SXTW));
463 __ Add(current_position_address,
465 Operand(current_input_offset(),
SXTW));
469 if (mode_ == ASCII) {
478 BranchOrBacktrack(
ne, on_no_match);
479 __ Cmp(capture_start_address, capture_end_address);
483 __ Sub(current_input_offset().
X(), current_position_address, input_end());
484 if (masm_->emit_debug_code()) {
485 __ Cmp(current_input_offset().
X(), Operand(current_input_offset(),
SXTW));
486 __ Ccmp(current_input_offset(), 0,
NoFlag,
eq);
488 __ Check(
le, kOffsetOutOfRange);
490 __ Bind(&fallthrough);
494 void RegExpMacroAssemblerARM64::CheckNotCharacter(
unsigned c,
495 Label* on_not_equal) {
496 CompareAndBranchOrBacktrack(current_character(), c,
ne, on_not_equal);
500 void RegExpMacroAssemblerARM64::CheckCharacterAfterAnd(uint32_t c,
503 __ And(w10, current_character(), mask);
504 CompareAndBranchOrBacktrack(w10, c,
eq, on_equal);
508 void RegExpMacroAssemblerARM64::CheckNotCharacterAfterAnd(
unsigned c,
510 Label* on_not_equal) {
511 __ And(w10, current_character(), mask);
512 CompareAndBranchOrBacktrack(w10, c,
ne, on_not_equal);
516 void RegExpMacroAssemblerARM64::CheckNotCharacterAfterMinusAnd(
520 Label* on_not_equal) {
521 ASSERT(minus < String::kMaxUtf16CodeUnit);
522 __ Sub(w10, current_character(), minus);
523 __ And(w10, w10, mask);
524 CompareAndBranchOrBacktrack(w10, c,
ne, on_not_equal);
528 void RegExpMacroAssemblerARM64::CheckCharacterInRange(
531 Label* on_in_range) {
532 __ Sub(w10, current_character(), from);
534 CompareAndBranchOrBacktrack(w10, to - from,
ls, on_in_range);
538 void RegExpMacroAssemblerARM64::CheckCharacterNotInRange(
541 Label* on_not_in_range) {
542 __ Sub(w10, current_character(), from);
544 CompareAndBranchOrBacktrack(w10, to - from,
hi, on_not_in_range);
548 void RegExpMacroAssemblerARM64::CheckBitInTable(
549 Handle<ByteArray> table,
551 __ Mov(x11, Operand(table));
552 if ((mode_ != ASCII) || (kTableMask != String::kMaxOneByteCharCode)) {
553 __ And(w10, current_character(), kTableMask);
559 CompareAndBranchOrBacktrack(w11, 0,
ne, on_bit_set);
563 bool RegExpMacroAssemblerARM64::CheckSpecialCharacterClass(
uc16 type,
564 Label* on_no_match) {
570 if (mode_ == ASCII) {
574 __ Cmp(current_character(),
' ');
575 __ Ccmp(current_character(), 0x00a0,
ZFlag,
ne);
578 __ Sub(w10, current_character(),
'\t');
579 CompareAndBranchOrBacktrack(w10,
'\r' -
'\t',
hi, on_no_match);
589 __ Sub(w10, current_character(),
'0');
590 CompareAndBranchOrBacktrack(w10,
'9' -
'0',
hi, on_no_match);
594 __ Sub(w10, current_character(),
'0');
595 CompareAndBranchOrBacktrack(w10,
'9' -
'0',
ls, on_no_match);
602 __ Cmp(current_character(), 0x0a);
603 __ Ccmp(current_character(), 0x0d,
ZFlag,
ne);
605 __ Sub(w10, current_character(), 0x2028);
609 BranchOrBacktrack(
ls, on_no_match);
611 BranchOrBacktrack(
eq, on_no_match);
619 __ Cmp(current_character(), 0x0a);
620 __ Ccmp(current_character(), 0x0d,
ZFlag,
ne);
622 __ Sub(w10, current_character(), 0x2028);
626 BranchOrBacktrack(
hi, on_no_match);
628 BranchOrBacktrack(
ne, on_no_match);
633 if (mode_ != ASCII) {
635 CompareAndBranchOrBacktrack(current_character(),
'z',
hi, on_no_match);
637 ExternalReference
map = ExternalReference::re_word_character_map();
640 CompareAndBranchOrBacktrack(w10, 0,
eq, on_no_match);
645 if (mode_ != ASCII) {
647 __ Cmp(current_character(),
'z');
650 ExternalReference map = ExternalReference::re_word_character_map();
653 CompareAndBranchOrBacktrack(w10, 0,
ne, on_no_match);
673 Handle<HeapObject> RegExpMacroAssemblerARM64::GetCode(Handle<String> source) {
679 __ Bind(&entry_label_);
697 FrameScope scope(masm_, StackFrame::MANUAL);
700 CPURegList argument_registers(x0, x5, x6, x7);
704 registers_to_retain.Combine(
lr);
707 __ PushCPURegList(registers_to_retain);
708 __ PushCPURegList(argument_registers);
711 __ Add(frame_pointer(), csp, argument_registers.Count() *
kPointerSize);
714 __ Mov(start_offset(), w1);
715 __ Mov(input_start(), x2);
716 __ Mov(input_end(), x3);
717 __ Mov(output_array(), x4);
722 int num_wreg_to_allocate = num_registers_ - kNumCachedRegisters;
724 if (num_wreg_to_allocate < 0) { num_wreg_to_allocate = 0; }
726 num_wreg_to_allocate += 2;
729 int alignment = masm_->ActivationFrameAlignment();
731 int align_mask = (alignment /
kWRegSize) - 1;
732 num_wreg_to_allocate = (num_wreg_to_allocate + align_mask) & ~align_mask;
735 Label stack_limit_hit;
738 ExternalReference stack_limit =
739 ExternalReference::address_of_stack_limit(isolate());
740 __ Mov(x10, stack_limit);
742 __ Subs(x10, csp, x10);
745 __ B(
ls, &stack_limit_hit);
754 __ Mov(w0, EXCEPTION);
757 __ Bind(&stack_limit_hit);
758 CallCheckStackGuardState(x10);
760 __ Cbnz(w0, &return_w0);
768 __ Str(wzr,
MemOperand(frame_pointer(), kSuccessCounter));
771 __ Sub(x10, input_start(), input_end());
772 if (masm_->emit_debug_code()) {
775 __ Cmp(x11, (1<<30) - 1);
776 __ Check(
ls, kInputStringTooLong);
778 __ Mov(current_input_offset(), w10);
783 __ Sub(non_position_value(), current_input_offset(), char_size());
784 __ Sub(non_position_value(), non_position_value(),
785 Operand(start_offset(),
LSL, (mode_ == UC16) ? 1 : 0));
788 __ Orr(twice_non_position_value(),
789 non_position_value().
X(),
793 __ Mov(code_pointer(), Operand(masm_->CodeObject()));
795 Label load_char_start_regexp, start_regexp;
797 __ Cbnz(start_offset(), &load_char_start_regexp);
798 __ Mov(current_character(),
'\n');
802 __ Bind(&load_char_start_regexp);
804 LoadCurrentCharacterUnchecked(-1, 1);
805 __ Bind(&start_regexp);
807 if (num_saved_registers_ > 0) {
808 ClearRegisters(0, num_saved_registers_ - 1);
812 __ Ldr(backtrack_stackpointer(),
MemOperand(frame_pointer(), kStackBase));
817 if (backtrack_label_.is_linked()) {
818 __ Bind(&backtrack_label_);
822 if (success_label_.is_linked()) {
823 Register first_capture_start = w15;
826 __ Bind(&success_label_);
828 if (num_saved_registers_ > 0) {
830 Register capture_start = w12;
831 Register capture_end = w13;
832 Register input_length = w14;
837 __ Sub(x10, input_end(), input_start());
838 if (masm_->emit_debug_code()) {
840 __ Cmp(x10, (1<<30) - 1);
841 __ Check(
ls, kInputStringTooLong);
846 __ Add(input_length, start_offset(), Operand(w10,
LSR, 1));
848 __ Add(input_length, start_offset(), w10);
853 (i < num_saved_registers_) && (i < kNumCachedRegisters);
855 __ Mov(capture_start.X(), GetCachedRegister(i));
857 if ((i == 0) && global_with_zero_length_check()) {
859 __ Mov(first_capture_start, capture_start);
863 __ Add(capture_start, input_length, Operand(capture_start,
ASR, 1));
864 __ Add(capture_end, input_length, Operand(capture_end,
ASR, 1));
866 __ Add(capture_start, input_length, capture_start);
867 __ Add(capture_end, input_length, capture_end);
870 __ Stp(capture_start,
877 int num_registers_left_on_stack =
878 num_saved_registers_ - kNumCachedRegisters;
879 if (num_registers_left_on_stack > 0) {
883 ASSERT_EQ(0, num_registers_left_on_stack % 2);
884 __ Add(base, frame_pointer(), kFirstCaptureOnStack);
889 if (num_registers_left_on_stack <= kNumRegistersToUnroll) {
890 for (
int i = 0; i < num_registers_left_on_stack / 2; i++) {
894 if ((i == 0) && global_with_zero_length_check()) {
896 __ Mov(first_capture_start, capture_start);
900 __ Add(capture_start,
902 Operand(capture_start,
ASR, 1));
903 __ Add(capture_end, input_length, Operand(capture_end,
ASR, 1));
905 __ Add(capture_start, input_length, capture_start);
906 __ Add(capture_end, input_length, capture_end);
909 __ Stp(capture_start,
915 __ Mov(x11, num_registers_left_on_stack);
920 if (global_with_zero_length_check()) {
921 __ Mov(first_capture_start, capture_start);
931 __ Add(capture_start, input_length, Operand(capture_start,
ASR, 1));
932 __ Add(capture_end, input_length, Operand(capture_end,
ASR, 1));
934 __ Add(capture_start, input_length, capture_start);
935 __ Add(capture_end, input_length, capture_end);
938 __ Stp(capture_start,
948 Register success_counter = w0;
949 Register output_size = x10;
953 __ Ldr(success_counter,
MemOperand(frame_pointer(), kSuccessCounter));
954 __ Add(success_counter, success_counter, 1);
955 __ Str(success_counter,
MemOperand(frame_pointer(), kSuccessCounter));
959 __ Ldr(output_size,
MemOperand(frame_pointer(), kOutputSize));
960 __ Sub(output_size, output_size, num_saved_registers_);
962 __ Cmp(output_size, num_saved_registers_);
963 __ B(
lt, &return_w0);
968 __ Str(output_size,
MemOperand(frame_pointer(), kOutputSize));
970 if (global_with_zero_length_check()) {
972 __ Cmp(current_input_offset(), first_capture_start);
974 __ B(
ne, &load_char_start_regexp);
976 __ Cbz(current_input_offset(), &return_w0);
978 __ Add(current_input_offset(),
979 current_input_offset(),
980 Operand((mode_ == UC16) ? 2 : 1));
983 __ B(&load_char_start_regexp);
989 if (exit_label_.is_linked()) {
991 __ Bind(&exit_label_);
993 __ Ldr(w0,
MemOperand(frame_pointer(), kSuccessCounter));
1004 __ PopCPURegList(registers_to_retain);
1008 Label exit_with_exception;
1011 CPURegList cached_registers(CPURegister::kRegister,
kXRegSizeInBits, 0, 7);
1012 ASSERT((cached_registers.Count() * 2) == kNumCachedRegisters);
1014 if (check_preempt_label_.is_linked()) {
1015 __ Bind(&check_preempt_label_);
1018 __ PushCPURegList(cached_registers);
1019 CallCheckStackGuardState(x10);
1022 __ Cbnz(w0, &return_w0);
1024 __ PopCPURegList(cached_registers);
1025 RestoreLinkRegister();
1029 if (stack_overflow_label_.is_linked()) {
1030 __ Bind(&stack_overflow_label_);
1033 __ PushCPURegList(cached_registers);
1035 __ Mov(x2, ExternalReference::isolate_address(isolate()));
1036 __ Add(x1, frame_pointer(), kStackBase);
1037 __ Mov(x0, backtrack_stackpointer());
1038 ExternalReference grow_stack =
1039 ExternalReference::re_grow_stack(isolate());
1040 __ CallCFunction(grow_stack, 3);
1045 __ Cbz(w0, &exit_with_exception);
1047 __ Mov(backtrack_stackpointer(), x0);
1049 __ PopCPURegList(cached_registers);
1050 RestoreLinkRegister();
1054 if (exit_with_exception.is_linked()) {
1055 __ Bind(&exit_with_exception);
1056 __ Mov(w0, EXCEPTION);
1061 masm_->GetCode(&code_desc);
1062 Handle<Code>
code = isolate()->factory()->NewCode(
1063 code_desc, Code::ComputeFlags(Code::REGEXP), masm_->CodeObject());
1064 PROFILE(masm_->isolate(), RegExpCodeCreateEvent(*code, *source));
1065 return Handle<HeapObject>::cast(code);
1069 void RegExpMacroAssemblerARM64::GoTo(Label* to) {
1070 BranchOrBacktrack(
al, to);
1073 void RegExpMacroAssemblerARM64::IfRegisterGE(
int reg,
int comparand,
1075 Register to_compare = GetRegister(reg, w10);
1076 CompareAndBranchOrBacktrack(to_compare, comparand,
ge, if_ge);
1080 void RegExpMacroAssemblerARM64::IfRegisterLT(
int reg,
int comparand,
1082 Register to_compare = GetRegister(reg, w10);
1083 CompareAndBranchOrBacktrack(to_compare, comparand,
lt, if_lt);
1087 void RegExpMacroAssemblerARM64::IfRegisterEqPos(
int reg, Label* if_eq) {
1088 Register to_compare = GetRegister(reg, w10);
1089 __ Cmp(to_compare, current_input_offset());
1090 BranchOrBacktrack(
eq, if_eq);
1093 RegExpMacroAssembler::IrregexpImplementation
1094 RegExpMacroAssemblerARM64::Implementation() {
1095 return kARM64Implementation;
1099 void RegExpMacroAssemblerARM64::LoadCurrentCharacter(
int cp_offset,
1100 Label* on_end_of_input,
1107 ASSERT(cp_offset < (1<<30));
1109 CheckPosition(cp_offset + characters - 1, on_end_of_input);
1111 LoadCurrentCharacterUnchecked(cp_offset, characters);
1115 void RegExpMacroAssemblerARM64::PopCurrentPosition() {
1116 Pop(current_input_offset());
1120 void RegExpMacroAssemblerARM64::PopRegister(
int register_index) {
1122 StoreRegister(register_index, w10);
1126 void RegExpMacroAssemblerARM64::PushBacktrack(Label* label) {
1127 if (label->is_bound()) {
1128 int target = label->pos();
1132 __ Sub(x10, x10, code_pointer());
1133 if (masm_->emit_debug_code()) {
1136 __ Check(
ls, kOffsetOutOfRange);
1144 void RegExpMacroAssemblerARM64::PushCurrentPosition() {
1145 Push(current_input_offset());
1149 void RegExpMacroAssemblerARM64::PushRegister(
int register_index,
1150 StackCheckFlag check_stack_limit) {
1151 Register to_push = GetRegister(register_index, w10);
1153 if (check_stack_limit) CheckStackLimit();
1157 void RegExpMacroAssemblerARM64::ReadCurrentPositionFromRegister(
int reg) {
1158 Register cached_register;
1159 RegisterState register_state = GetRegisterState(reg);
1160 switch (register_state) {
1162 __ Ldr(current_input_offset(), register_location(reg));
1165 cached_register = GetCachedRegister(reg);
1166 __ Mov(current_input_offset(), cached_register.W());
1169 cached_register = GetCachedRegister(reg);
1179 void RegExpMacroAssemblerARM64::ReadStackPointerFromRegister(
int reg) {
1180 Register read_from = GetRegister(reg, w10);
1182 __ Add(backtrack_stackpointer(), x11, Operand(read_from,
SXTW));
1186 void RegExpMacroAssemblerARM64::SetCurrentPositionFromEnd(
int by) {
1187 Label after_position;
1188 __ Cmp(current_input_offset(), -by * char_size());
1189 __ B(
ge, &after_position);
1190 __ Mov(current_input_offset(), -by * char_size());
1194 LoadCurrentCharacterUnchecked(-1, 1);
1195 __ Bind(&after_position);
1199 void RegExpMacroAssemblerARM64::SetRegister(
int register_index,
int to) {
1200 ASSERT(register_index >= num_saved_registers_);
1201 Register set_to = wzr;
1206 StoreRegister(register_index, set_to);
1210 bool RegExpMacroAssemblerARM64::Succeed() {
1211 __ B(&success_label_);
1216 void RegExpMacroAssemblerARM64::WriteCurrentPositionToRegister(
int reg,
1218 Register position = current_input_offset();
1219 if (cp_offset != 0) {
1221 __ Add(position, current_input_offset(), cp_offset * char_size());
1223 StoreRegister(reg, position);
1227 void RegExpMacroAssemblerARM64::ClearRegisters(
int reg_from,
int reg_to) {
1228 ASSERT(reg_from <= reg_to);
1229 int num_registers = reg_to - reg_from + 1;
1233 if ((reg_from < kNumCachedRegisters) && ((reg_from % 2) != 0)) {
1234 StoreRegister(reg_from, non_position_value());
1240 while ((num_registers >= 2) && (reg_from < kNumCachedRegisters)) {
1241 ASSERT(GetRegisterState(reg_from) == CACHED_LSW);
1242 __ Mov(GetCachedRegister(reg_from), twice_non_position_value());
1247 if ((num_registers % 2) == 1) {
1248 StoreRegister(reg_from, non_position_value());
1253 if (num_registers > 0) {
1255 ASSERT(reg_from >= kNumCachedRegisters);
1259 reg_from -= kNumCachedRegisters;
1260 reg_to -= kNumCachedRegisters;
1264 int base_offset = kFirstRegisterOnStack -
1266 if (num_registers > kNumRegistersToUnroll) {
1267 Register base = x10;
1268 __ Add(base, frame_pointer(), base_offset);
1271 __ Mov(x11, num_registers);
1273 __ Str(twice_non_position_value(),
1275 __ Sub(x11, x11, 2);
1276 __ Cbnz(x11, &loop);
1278 for (
int i = reg_from; i <= reg_to; i += 2) {
1279 __ Str(twice_non_position_value(),
1288 void RegExpMacroAssemblerARM64::WriteStackPointerToRegister(
int reg) {
1290 __ Sub(x10, backtrack_stackpointer(), x10);
1291 if (masm_->emit_debug_code()) {
1292 __ Cmp(x10, Operand(w10,
SXTW));
1294 __ Check(
eq, kOffsetOutOfRange);
1296 StoreRegister(reg, w10);
1301 template <
typename T>
1302 static T& frame_entry(
Address re_frame,
int frame_offset) {
1303 return *
reinterpret_cast<T*
>(re_frame + frame_offset);
1307 int RegExpMacroAssemblerARM64::CheckStackGuardState(
Address* return_address,
1311 const byte** input_start,
1312 const byte** input_end) {
1313 Isolate* isolate = frame_entry<Isolate*>(re_frame, kIsolate);
1314 if (isolate->stack_guard()->IsStackOverflow()) {
1315 isolate->StackOverflow();
1324 if (frame_entry<int>(re_frame, kDirectCall) == 1) {
1329 HandleScope handles(isolate);
1330 Handle<Code> code_handle(re_code);
1332 Handle<String> subject(frame_entry<String*>(re_frame, kInput));
1335 bool is_ascii = subject->IsOneByteRepresentationUnderneath();
1337 ASSERT(re_code->instruction_start() <= *return_address);
1338 ASSERT(*return_address <=
1339 re_code->instruction_start() + re_code->instruction_size());
1341 MaybeObject* result = Execution::HandleStackGuardInterrupt(isolate);
1343 if (*code_handle != re_code) {
1344 int delta = code_handle->address() - re_code->address();
1346 *return_address += delta;
1349 if (result->IsException()) {
1353 Handle<String> subject_tmp = subject;
1354 int slice_offset = 0;
1357 if (StringShape(*subject_tmp).IsCons()) {
1358 subject_tmp = Handle<String>(ConsString::cast(*subject_tmp)->first());
1359 }
else if (StringShape(*subject_tmp).IsSliced()) {
1360 SlicedString* slice = SlicedString::cast(*subject_tmp);
1361 subject_tmp = Handle<String>(slice->parent());
1362 slice_offset = slice->offset();
1366 if (subject_tmp->IsOneByteRepresentation() != is_ascii) {
1377 ASSERT(StringShape(*subject_tmp).IsSequential() ||
1378 StringShape(*subject_tmp).IsExternal());
1381 const byte* start_address = *input_start;
1385 const byte* new_address = StringCharacterPosition(*subject_tmp,
1386 start_offset + slice_offset);
1388 if (start_address != new_address) {
1391 const byte* end_address = *input_end;
1392 int byte_length =
static_cast<int>(end_address - start_address);
1393 frame_entry<const String*>(re_frame, kInput) = *subject;
1394 *input_start = new_address;
1395 *input_end = new_address + byte_length;
1396 }
else if (frame_entry<const String*>(re_frame, kInput) != *subject) {
1400 frame_entry<const String*>(re_frame, kInput) = *subject;
1407 void RegExpMacroAssemblerARM64::CheckPosition(
int cp_offset,
1408 Label* on_outside_input) {
1409 CompareAndBranchOrBacktrack(current_input_offset(),
1410 -cp_offset * char_size(),
1416 bool RegExpMacroAssemblerARM64::CanReadUnaligned() {
1418 return !slow_safe();
1424 void RegExpMacroAssemblerARM64::CallCheckStackGuardState(Register scratch) {
1429 int alignment = masm_->ActivationFrameAlignment();
1431 int align_mask = (alignment /
kXRegSize) - 1;
1432 int xreg_to_claim = (3 + align_mask) & ~align_mask;
1435 __ Claim(xreg_to_claim);
1443 __ Mov(w3, start_offset());
1445 __ Mov(x2, frame_pointer());
1447 __ Mov(x1, Operand(masm_->CodeObject()));
1454 ExternalReference check_stack_guard_state =
1455 ExternalReference::re_check_stack_guard_state(isolate());
1456 __ Mov(scratch, check_stack_guard_state);
1457 DirectCEntryStub stub;
1458 stub.GenerateCall(masm_, scratch);
1465 __ Drop(xreg_to_claim);
1468 __ Mov(code_pointer(), Operand(masm_->CodeObject()));
1471 void RegExpMacroAssemblerARM64::BranchOrBacktrack(
Condition condition,
1473 if (condition ==
al) {
1482 to = &backtrack_label_;
1487 __ B(inverted_condition, &no_branch);
1489 __ Bind(&no_branch);
1492 void RegExpMacroAssemblerARM64::CompareAndBranchOrBacktrack(Register reg,
1496 if ((immediate == 0) && ((condition ==
eq) || (condition ==
ne))) {
1498 to = &backtrack_label_;
1502 if (condition ==
eq) {
1503 __ Cbnz(reg, &no_branch);
1505 __ Cbz(reg, &no_branch);
1508 __ Bind(&no_branch);
1510 __ Cmp(reg, immediate);
1511 BranchOrBacktrack(condition, to);
1516 void RegExpMacroAssemblerARM64::CheckPreemption() {
1518 ExternalReference stack_limit =
1519 ExternalReference::address_of_stack_limit(isolate());
1520 __ Mov(x10, stack_limit);
1524 CallIf(&check_preempt_label_,
ls);
1528 void RegExpMacroAssemblerARM64::CheckStackLimit() {
1529 ExternalReference stack_limit =
1530 ExternalReference::address_of_regexp_stack_limit(isolate());
1531 __ Mov(x10, stack_limit);
1533 __ Cmp(backtrack_stackpointer(), x10);
1534 CallIf(&stack_overflow_label_,
ls);
1538 void RegExpMacroAssemblerARM64::Push(Register source) {
1539 ASSERT(source.Is32Bits());
1540 ASSERT(!source.is(backtrack_stackpointer()));
1548 void RegExpMacroAssemblerARM64::Pop(Register target) {
1549 ASSERT(target.Is32Bits());
1550 ASSERT(!target.is(backtrack_stackpointer()));
1556 Register RegExpMacroAssemblerARM64::GetCachedRegister(
int register_index) {
1557 ASSERT(register_index < kNumCachedRegisters);
1562 Register RegExpMacroAssemblerARM64::GetRegister(
int register_index,
1563 Register maybe_result) {
1564 ASSERT(maybe_result.Is32Bits());
1565 ASSERT(register_index >= 0);
1566 if (num_registers_ <= register_index) {
1567 num_registers_ = register_index + 1;
1570 RegisterState register_state = GetRegisterState(register_index);
1571 switch (register_state) {
1573 __ Ldr(maybe_result, register_location(register_index));
1574 result = maybe_result;
1577 result = GetCachedRegister(register_index).W();
1580 __ Lsr(maybe_result.X(), GetCachedRegister(register_index),
1582 result = maybe_result;
1588 ASSERT(result.Is32Bits());
1593 void RegExpMacroAssemblerARM64::StoreRegister(
int register_index,
1595 ASSERT(source.Is32Bits());
1596 ASSERT(register_index >= 0);
1597 if (num_registers_ <= register_index) {
1598 num_registers_ = register_index + 1;
1601 Register cached_register;
1602 RegisterState register_state = GetRegisterState(register_index);
1603 switch (register_state) {
1605 __ Str(source, register_location(register_index));
1608 cached_register = GetCachedRegister(register_index);
1609 if (!source.Is(cached_register.W())) {
1614 cached_register = GetCachedRegister(register_index);
1624 void RegExpMacroAssemblerARM64::CallIf(Label* to,
Condition condition) {
1628 __ Bind(&skip_call);
1632 void RegExpMacroAssemblerARM64::RestoreLinkRegister() {
1635 __ Add(
lr,
lr, Operand(masm_->CodeObject()));
1639 void RegExpMacroAssemblerARM64::SaveLinkRegister() {
1641 __ Sub(
lr,
lr, Operand(masm_->CodeObject()));
1646 MemOperand RegExpMacroAssemblerARM64::register_location(
int register_index) {
1647 ASSERT(register_index < (1<<30));
1648 ASSERT(register_index >= kNumCachedRegisters);
1649 if (num_registers_ <= register_index) {
1650 num_registers_ = register_index + 1;
1652 register_index -= kNumCachedRegisters;
1653 int offset = kFirstRegisterOnStack - register_index *
kWRegSize;
1657 MemOperand RegExpMacroAssemblerARM64::capture_location(
int register_index,
1659 ASSERT(register_index < (1<<30));
1660 ASSERT(register_index < num_saved_registers_);
1661 ASSERT(register_index >= kNumCachedRegisters);
1663 register_index -= kNumCachedRegisters;
1664 int offset = kFirstCaptureOnStack - register_index *
kWRegSize;
1667 if (is_int7(offset)) {
1670 __ Add(scratch, frame_pointer(), offset);
1675 void RegExpMacroAssemblerARM64::LoadCurrentCharacterUnchecked(
int cp_offset,
1677 Register offset = current_input_offset();
1687 if (!CanReadUnaligned()) {
1691 if (cp_offset != 0) {
1692 if (masm_->emit_debug_code()) {
1693 __ Mov(x10, cp_offset * char_size());
1694 __ Add(x10, x10, Operand(current_input_offset(),
SXTW));
1695 __ Cmp(x10, Operand(w10,
SXTW));
1697 __ Check(
eq, kOffsetOutOfRange);
1699 __ Add(w10, current_input_offset(), cp_offset * char_size());
1704 if (mode_ == ASCII) {
1705 if (characters == 4) {
1707 }
else if (characters == 2) {
1715 if (characters == 2) {
1724 #endif // V8_INTERPRETED_REGEXP
1728 #endif // V8_TARGET_ARCH_ARM64
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
void Fail(const v8::FunctionCallbackInfo< v8::Value > &args)
#define PROFILE(IsolateGetter, Call)
const unsigned kXRegSizeInBits
#define ASSERT(condition)
const unsigned kWRegSizeInBits
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
Condition InvertCondition(Condition cond)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
#define T(name, string, precedence)
RegExpMacroAssemblerARM64(Mode mode, int registers_to_save, Zone *zone)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
#define ASSERT_EQ(v1, v2)
#define STATIC_ASSERT(test)
const unsigned kWRegSizeLog2