30 #if defined(V8_TARGET_ARCH_X64)
43 #ifndef V8_INTERPRETED_REGEXP
116 #define __ ACCESS_MASM((&masm_))
120 int registers_to_save,
122 : NativeRegExpMacroAssembler(zone),
123 masm_(Isolate::Current(),
NULL, kRegExpCodeSize),
124 no_root_array_scope_(&masm_),
125 code_relative_fixup_positions_(4, zone),
127 num_registers_(registers_to_save),
128 num_saved_registers_(registers_to_save),
135 __ jmp(&entry_label_);
136 __ bind(&start_label_);
140 RegExpMacroAssemblerX64::~RegExpMacroAssemblerX64() {
142 entry_label_.Unuse();
143 start_label_.Unuse();
144 success_label_.Unuse();
145 backtrack_label_.Unuse();
147 check_preempt_label_.Unuse();
148 stack_overflow_label_.Unuse();
152 int RegExpMacroAssemblerX64::stack_limit_slack() {
153 return RegExpStack::kStackLimitSlack;
157 void RegExpMacroAssemblerX64::AdvanceCurrentPosition(
int by) {
159 __ addq(
rdi, Immediate(by * char_size()));
164 void RegExpMacroAssemblerX64::AdvanceRegister(
int reg,
int by) {
166 ASSERT(reg < num_registers_);
168 __ addq(register_location(reg), Immediate(by));
173 void RegExpMacroAssemblerX64::Backtrack() {
177 __ addq(
rbx, code_object_pointer());
182 void RegExpMacroAssemblerX64::Bind(Label* label) {
187 void RegExpMacroAssemblerX64::CheckCharacter(uint32_t c, Label* on_equal) {
188 __ cmpl(current_character(), Immediate(c));
189 BranchOrBacktrack(
equal, on_equal);
193 void RegExpMacroAssemblerX64::CheckCharacterGT(
uc16 limit, Label* on_greater) {
194 __ cmpl(current_character(), Immediate(limit));
195 BranchOrBacktrack(
greater, on_greater);
199 void RegExpMacroAssemblerX64::CheckAtStart(Label* on_at_start) {
202 __ cmpl(Operand(
rbp, kStartIndex), Immediate(0));
203 BranchOrBacktrack(
not_equal, ¬_at_start);
206 __ cmpq(
rax, Operand(
rbp, kInputStart));
207 BranchOrBacktrack(
equal, on_at_start);
208 __ bind(¬_at_start);
212 void RegExpMacroAssemblerX64::CheckNotAtStart(Label* on_not_at_start) {
214 __ cmpl(Operand(
rbp, kStartIndex), Immediate(0));
215 BranchOrBacktrack(
not_equal, on_not_at_start);
218 __ cmpq(
rax, Operand(
rbp, kInputStart));
219 BranchOrBacktrack(
not_equal, on_not_at_start);
223 void RegExpMacroAssemblerX64::CheckCharacterLT(
uc16 limit, Label* on_less) {
224 __ cmpl(current_character(), Immediate(limit));
225 BranchOrBacktrack(
less, on_less);
229 void RegExpMacroAssemblerX64::CheckCharacters(Vector<const uc16> str,
232 bool check_end_of_string) {
236 if (mode_ == ASCII) {
237 ASSERT(String::IsAscii(str.start(), str.length()));
240 int byte_length = str.length() * char_size();
241 int byte_offset = cp_offset * char_size();
242 if (check_end_of_string) {
244 __ cmpl(
rdi, Immediate(-(byte_offset + byte_length)));
245 BranchOrBacktrack(
greater, on_failure);
248 if (on_failure ==
NULL) {
250 on_failure = &backtrack_label_;
258 if (mode_ == ASCII) {
260 Immediate(static_cast<int8_t>(str[0])));
266 __ cmpl(
rax, Immediate(static_cast<int32_t>(str[0])));
268 BranchOrBacktrack(
not_equal, on_failure);
271 for (
int i = 1, n = str.length(); i < n; ) {
272 if (mode_ == ASCII) {
274 uint64_t combined_chars =
275 (
static_cast<uint64_t
>(str[i + 0]) << 0) ||
276 (
static_cast<uint64_t
>(str[i + 1]) << 8) ||
277 (
static_cast<uint64_t
>(str[i + 2]) << 16) ||
278 (
static_cast<uint64_t
>(str[i + 3]) << 24) ||
279 (
static_cast<uint64_t
>(str[i + 4]) << 32) ||
280 (
static_cast<uint64_t
>(str[i + 5]) << 40) ||
281 (
static_cast<uint64_t
>(str[i + 6]) << 48) ||
282 (
static_cast<uint64_t
>(str[i + 7]) << 56);
284 __ cmpq(
rax, Operand(
rbx, byte_offset + i));
286 }
else if (i + 4 <= n) {
287 uint32_t combined_chars =
288 (
static_cast<uint32_t
>(str[i + 0]) << 0) ||
289 (
static_cast<uint32_t
>(str[i + 1]) << 8) ||
290 (
static_cast<uint32_t
>(str[i + 2]) << 16) ||
291 (
static_cast<uint32_t
>(str[i + 3]) << 24);
292 __ cmpl(Operand(
rbx, byte_offset + i), Immediate(combined_chars));
295 __ cmpb(Operand(
rbx, byte_offset + i),
296 Immediate(static_cast<int8_t>(str[i])));
302 uint64_t combined_chars = *
reinterpret_cast<const uint64_t*
>(&str[i]);
307 }
else if (i + 2 <= n) {
308 uint32_t combined_chars = *
reinterpret_cast<const uint32_t*
>(&str[i]);
310 Immediate(combined_chars));
315 __ cmpl(
rax, Immediate(str[i]));
319 BranchOrBacktrack(
not_equal, on_failure);
324 void RegExpMacroAssemblerX64::CheckGreedyLoop(Label* on_equal) {
326 __ cmpl(
rdi, Operand(backtrack_stackpointer(), 0));
330 __ bind(&fallthrough);
334 void RegExpMacroAssemblerX64::CheckNotBackReferenceIgnoreCase(
336 Label* on_no_match) {
338 __ movq(
rdx, register_location(start_reg));
339 __ movq(
rbx, register_location(start_reg + 1));
362 BranchOrBacktrack(
greater, on_no_match);
364 if (mode_ == ASCII) {
365 Label loop_increment;
366 if (on_no_match ==
NULL) {
367 on_no_match = &backtrack_label_;
380 __ movzxbl(
rdx, Operand(
r9, 0));
390 __ or_(
rax, Immediate(0x20));
391 __ or_(
rdx, Immediate(0x20));
394 __ subb(
rax, Immediate(
'a'));
395 __ cmpb(
rax, Immediate(
'z' -
'a'));
398 __ bind(&loop_increment);
400 __ addq(
r11, Immediate(1));
401 __ addq(
r9, Immediate(1));
417 __ push(backtrack_stackpointer());
419 static const int num_arguments = 4;
420 __ PrepareCallCFunction(num_arguments);
435 __ LoadAddress(
r9, ExternalReference::isolate_address());
436 #else // AMD64 calling convention
446 __ LoadAddress(
rcx, ExternalReference::isolate_address());
451 AllowExternalCallThatCantCauseGC scope(&masm_);
452 ExternalReference compare =
453 ExternalReference::re_case_insensitive_compare_uc16(masm_.isolate());
454 __ CallCFunction(compare, num_arguments);
458 __ Move(code_object_pointer(), masm_.CodeObject());
459 __ pop(backtrack_stackpointer());
467 BranchOrBacktrack(
zero, on_no_match);
472 __ bind(&fallthrough);
476 void RegExpMacroAssemblerX64::CheckNotBackReference(
478 Label* on_no_match) {
482 __ movq(
rdx, register_location(start_reg));
483 __ movq(
rax, register_location(start_reg + 1));
501 BranchOrBacktrack(
greater, on_no_match);
515 if (mode_ == ASCII) {
523 BranchOrBacktrack(
not_equal, on_no_match);
525 __ addq(
rbx, Immediate(char_size()));
526 __ addq(
rdx, Immediate(char_size()));
536 __ bind(&fallthrough);
540 void RegExpMacroAssemblerX64::CheckNotCharacter(uint32_t c,
541 Label* on_not_equal) {
542 __ cmpl(current_character(), Immediate(c));
543 BranchOrBacktrack(
not_equal, on_not_equal);
547 void RegExpMacroAssemblerX64::CheckCharacterAfterAnd(uint32_t c,
551 __ testl(current_character(), Immediate(mask));
553 __ movl(
rax, Immediate(mask));
554 __ and_(
rax, current_character());
555 __ cmpl(
rax, Immediate(c));
557 BranchOrBacktrack(
equal, on_equal);
561 void RegExpMacroAssemblerX64::CheckNotCharacterAfterAnd(uint32_t c,
563 Label* on_not_equal) {
565 __ testl(current_character(), Immediate(mask));
567 __ movl(
rax, Immediate(mask));
568 __ and_(
rax, current_character());
569 __ cmpl(
rax, Immediate(c));
571 BranchOrBacktrack(
not_equal, on_not_equal);
575 void RegExpMacroAssemblerX64::CheckNotCharacterAfterMinusAnd(
579 Label* on_not_equal) {
580 ASSERT(minus < String::kMaxUtf16CodeUnit);
581 __ lea(
rax, Operand(current_character(), -minus));
582 __ and_(
rax, Immediate(mask));
583 __ cmpl(
rax, Immediate(c));
584 BranchOrBacktrack(
not_equal, on_not_equal);
588 void RegExpMacroAssemblerX64::CheckCharacterInRange(
591 Label* on_in_range) {
592 __ leal(
rax, Operand(current_character(), -from));
593 __ cmpl(
rax, Immediate(to - from));
598 void RegExpMacroAssemblerX64::CheckCharacterNotInRange(
601 Label* on_not_in_range) {
602 __ leal(
rax, Operand(current_character(), -from));
603 __ cmpl(
rax, Immediate(to - from));
604 BranchOrBacktrack(
above, on_not_in_range);
608 void RegExpMacroAssemblerX64::CheckBitInTable(
609 Handle<ByteArray> table,
612 Register index = current_character();
614 __ movq(
rbx, current_character());
615 __ and_(
rbx, Immediate(kTableMask));
620 BranchOrBacktrack(
not_equal, on_bit_set);
624 bool RegExpMacroAssemblerX64::CheckSpecialCharacterClass(
uc16 type,
625 Label* on_no_match) {
633 if (mode_ == ASCII) {
636 __ cmpl(current_character(), Immediate(
' '));
639 __ lea(
rax, Operand(current_character(), -
'\t'));
640 __ cmpl(
rax, Immediate(
'\r' -
'\t'));
641 BranchOrBacktrack(
above, on_no_match);
648 if (mode_ == ASCII) {
650 __ cmpl(current_character(), Immediate(
' '));
651 BranchOrBacktrack(
equal, on_no_match);
652 __ lea(
rax, Operand(current_character(), -
'\t'));
653 __ cmpl(
rax, Immediate(
'\r' -
'\t'));
660 __ lea(
rax, Operand(current_character(), -
'0'));
661 __ cmpl(
rax, Immediate(
'9' -
'0'));
662 BranchOrBacktrack(
above, on_no_match);
666 __ lea(
rax, Operand(current_character(), -
'0'));
667 __ cmpl(
rax, Immediate(
'9' -
'0'));
672 __ movl(
rax, current_character());
673 __ xor_(
rax, Immediate(0x01));
675 __ subl(
rax, Immediate(0x0b));
676 __ cmpl(
rax, Immediate(0x0c - 0x0b));
682 __ subl(
rax, Immediate(0x2028 - 0x0b));
683 __ cmpl(
rax, Immediate(0x2029 - 0x2028));
690 __ movl(
rax, current_character());
691 __ xor_(
rax, Immediate(0x01));
693 __ subl(
rax, Immediate(0x0b));
694 __ cmpl(
rax, Immediate(0x0c - 0x0b));
695 if (mode_ == ASCII) {
696 BranchOrBacktrack(
above, on_no_match);
703 __ subl(
rax, Immediate(0x2028 - 0x0b));
704 __ cmpl(
rax, Immediate(0x2029 - 0x2028));
705 BranchOrBacktrack(
above, on_no_match);
711 if (mode_ != ASCII) {
713 __ cmpl(current_character(), Immediate(
'z'));
714 BranchOrBacktrack(
above, on_no_match);
716 __ movq(
rbx, ExternalReference::re_word_character_map());
719 current_character());
720 BranchOrBacktrack(
zero, on_no_match);
725 if (mode_ != ASCII) {
727 __ cmpl(current_character(), Immediate(
'z'));
730 __ movq(
rbx, ExternalReference::re_word_character_map());
733 current_character());
734 BranchOrBacktrack(
not_zero, on_no_match);
735 if (mode_ != ASCII) {
754 __ Set(
rax, FAILURE);
756 __ jmp(&exit_label_);
760 Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
765 __ bind(&entry_label_);
769 FrameScope scope(&masm_, StackFrame::MANUAL);
779 __ movq(Operand(
rbp, kInputString),
rcx);
780 __ movq(Operand(
rbp, kStartIndex),
rdx);
781 __ movq(Operand(
rbp, kInputStart),
r8);
782 __ movq(Operand(
rbp, kInputEnd),
r9);
806 __ push(Immediate(0));
807 __ push(Immediate(0));
810 Label stack_limit_hit;
813 ExternalReference stack_limit =
814 ExternalReference::address_of_stack_limit(masm_.isolate());
826 __ Set(
rax, EXCEPTION);
829 __ bind(&stack_limit_hit);
830 __ Move(code_object_pointer(), masm_.CodeObject());
831 CallCheckStackGuardState();
841 __ movq(
rsi, Operand(
rbp, kInputEnd));
843 __ movq(
rdi, Operand(
rbp, kInputStart));
848 __ movq(
rbx, Operand(
rbp, kStartIndex));
857 __ movq(Operand(
rbp, kInputStartMinusOne),
rax);
862 const int kPageSize = 4096;
864 for (
int i = num_saved_registers_ + kRegistersPerPage - 1;
866 i += kRegistersPerPage) {
867 __ movq(register_location(i),
rax);
872 __ Move(code_object_pointer(), masm_.CodeObject());
874 Label load_char_start_regexp, start_regexp;
876 __ cmpl(Operand(
rbp, kStartIndex), Immediate(0));
877 __ j(
not_equal, &load_char_start_regexp, Label::kNear);
878 __ Set(current_character(),
'\n');
879 __ jmp(&start_regexp, Label::kNear);
882 __ bind(&load_char_start_regexp);
884 LoadCurrentCharacterUnchecked(-1, 1);
885 __ bind(&start_regexp);
888 if (num_saved_registers_ > 0) {
892 if (num_saved_registers_ > 8) {
893 __ Set(
rcx, kRegisterZero);
899 Immediate(kRegisterZero - num_saved_registers_ *
kPointerSize));
902 for (
int i = 0; i < num_saved_registers_; i++) {
903 __ movq(register_location(i),
rax);
909 __ movq(backtrack_stackpointer(), Operand(
rbp, kStackHighEnd));
911 __ jmp(&start_label_);
914 if (success_label_.is_linked()) {
916 __ bind(&success_label_);
917 if (num_saved_registers_ > 0) {
919 __ movq(
rdx, Operand(
rbp, kStartIndex));
920 __ movq(
rbx, Operand(
rbp, kRegisterOutput));
921 __ movq(
rcx, Operand(
rbp, kInputEnd));
922 __ subq(
rcx, Operand(
rbp, kInputStart));
928 for (
int i = 0; i < num_saved_registers_; i++) {
929 __ movq(
rax, register_location(i));
930 if (i == 0 && global_with_zero_length_check()) {
936 __ sar(
rax, Immediate(1));
945 __ incq(Operand(
rbp, kSuccessfulCaptures));
948 __ movsxlq(
rcx, Operand(
rbp, kNumOutputRegisters));
949 __ subq(
rcx, Immediate(num_saved_registers_));
951 __ cmpq(
rcx, Immediate(num_saved_registers_));
954 __ movq(Operand(
rbp, kNumOutputRegisters),
rcx);
956 __ addq(Operand(
rbp, kRegisterOutput),
957 Immediate(num_saved_registers_ *
kIntSize));
960 __ movq(
rax, Operand(
rbp, kInputStartMinusOne));
962 if (global_with_zero_length_check()) {
970 __ j(
zero, &exit_label_, Label::kNear);
973 __ addq(
rdi, Immediate(2));
979 __ jmp(&load_char_start_regexp);
981 __ movq(
rax, Immediate(SUCCESS));
985 __ bind(&exit_label_);
988 __ movq(
rax, Operand(
rbp, kSuccessfulCaptures));
991 __ bind(&return_rax);
994 __ lea(
rsp, Operand(
rbp, kLastCalleeSaveRegister));
1001 __ movq(
rbx, Operand(
rbp, kBackup_rbx));
1010 if (backtrack_label_.is_linked()) {
1011 __ bind(&backtrack_label_);
1015 Label exit_with_exception;
1018 if (check_preempt_label_.is_linked()) {
1019 SafeCallTarget(&check_preempt_label_);
1021 __ push(backtrack_stackpointer());
1024 CallCheckStackGuardState();
1031 __ Move(code_object_pointer(), masm_.CodeObject());
1033 __ pop(backtrack_stackpointer());
1035 __ movq(
rsi, Operand(
rbp, kInputEnd));
1040 if (stack_overflow_label_.is_linked()) {
1041 SafeCallTarget(&stack_overflow_label_);
1053 static const int num_arguments = 3;
1054 __ PrepareCallCFunction(num_arguments);
1058 __ lea(
rdx, Operand(
rbp, kStackHighEnd));
1059 __ LoadAddress(
r8, ExternalReference::isolate_address());
1062 __ movq(
rdi, backtrack_stackpointer());
1063 __ lea(
rsi, Operand(
rbp, kStackHighEnd));
1064 __ LoadAddress(
rdx, ExternalReference::isolate_address());
1066 ExternalReference grow_stack =
1067 ExternalReference::re_grow_stack(masm_.isolate());
1068 __ CallCFunction(grow_stack, num_arguments);
1072 __ j(
equal, &exit_with_exception);
1074 __ movq(backtrack_stackpointer(),
rax);
1076 __ Move(code_object_pointer(), masm_.CodeObject());
1084 if (exit_with_exception.is_linked()) {
1086 __ bind(&exit_with_exception);
1088 __ Set(
rax, EXCEPTION);
1089 __ jmp(&return_rax);
1092 FixupCodeRelativePositions();
1095 masm_.GetCode(&code_desc);
1097 Handle<Code>
code = isolate->factory()->NewCode(
1098 code_desc, Code::ComputeFlags(Code::REGEXP),
1099 masm_.CodeObject());
1100 PROFILE(isolate, RegExpCodeCreateEvent(*code, *source));
1101 return Handle<HeapObject>::cast(code);
1105 void RegExpMacroAssemblerX64::GoTo(Label* to) {
1110 void RegExpMacroAssemblerX64::IfRegisterGE(
int reg,
1113 __ cmpq(register_location(reg), Immediate(comparand));
1118 void RegExpMacroAssemblerX64::IfRegisterLT(
int reg,
1121 __ cmpq(register_location(reg), Immediate(comparand));
1122 BranchOrBacktrack(
less, if_lt);
1126 void RegExpMacroAssemblerX64::IfRegisterEqPos(
int reg,
1128 __ cmpq(
rdi, register_location(reg));
1129 BranchOrBacktrack(
equal, if_eq);
1133 RegExpMacroAssembler::IrregexpImplementation
1134 RegExpMacroAssemblerX64::Implementation() {
1135 return kX64Implementation;
1139 void RegExpMacroAssemblerX64::LoadCurrentCharacter(
int cp_offset,
1140 Label* on_end_of_input,
1144 ASSERT(cp_offset < (1<<30));
1146 CheckPosition(cp_offset + characters - 1, on_end_of_input);
1148 LoadCurrentCharacterUnchecked(cp_offset, characters);
1152 void RegExpMacroAssemblerX64::PopCurrentPosition() {
1157 void RegExpMacroAssemblerX64::PopRegister(
int register_index) {
1159 __ movq(register_location(register_index),
rax);
1163 void RegExpMacroAssemblerX64::PushBacktrack(Label* label) {
1169 void RegExpMacroAssemblerX64::PushCurrentPosition() {
1174 void RegExpMacroAssemblerX64::PushRegister(
int register_index,
1175 StackCheckFlag check_stack_limit) {
1176 __ movq(
rax, register_location(register_index));
1178 if (check_stack_limit) CheckStackLimit();
1182 void RegExpMacroAssemblerX64::ReadCurrentPositionFromRegister(
int reg) {
1183 __ movq(
rdi, register_location(reg));
1187 void RegExpMacroAssemblerX64::ReadStackPointerFromRegister(
int reg) {
1188 __ movq(backtrack_stackpointer(), register_location(reg));
1189 __ addq(backtrack_stackpointer(), Operand(
rbp, kStackHighEnd));
1193 void RegExpMacroAssemblerX64::SetCurrentPositionFromEnd(
int by) {
1194 Label after_position;
1195 __ cmpq(
rdi, Immediate(-by * char_size()));
1197 __ movq(
rdi, Immediate(-by * char_size()));
1201 LoadCurrentCharacterUnchecked(-1, 1);
1202 __ bind(&after_position);
1206 void RegExpMacroAssemblerX64::SetRegister(
int register_index,
int to) {
1207 ASSERT(register_index >= num_saved_registers_);
1208 __ movq(register_location(register_index), Immediate(to));
1212 bool RegExpMacroAssemblerX64::Succeed() {
1213 __ jmp(&success_label_);
1218 void RegExpMacroAssemblerX64::WriteCurrentPositionToRegister(
int reg,
1220 if (cp_offset == 0) {
1221 __ movq(register_location(reg),
rdi);
1223 __ lea(
rax, Operand(
rdi, cp_offset * char_size()));
1224 __ movq(register_location(reg),
rax);
1229 void RegExpMacroAssemblerX64::ClearRegisters(
int reg_from,
int reg_to) {
1230 ASSERT(reg_from <= reg_to);
1231 __ movq(
rax, Operand(
rbp, kInputStartMinusOne));
1232 for (
int reg = reg_from; reg <= reg_to; reg++) {
1233 __ movq(register_location(reg),
rax);
1238 void RegExpMacroAssemblerX64::WriteStackPointerToRegister(
int reg) {
1239 __ movq(
rax, backtrack_stackpointer());
1240 __ subq(
rax, Operand(
rbp, kStackHighEnd));
1241 __ movq(register_location(reg),
rax);
1247 void RegExpMacroAssemblerX64::CallCheckStackGuardState() {
1250 static const int num_arguments = 3;
1251 __ PrepareCallCFunction(num_arguments);
1254 __ movq(
rdx, code_object_pointer());
1264 __ movq(
rsi, code_object_pointer());
1269 ExternalReference stack_check =
1270 ExternalReference::re_check_stack_guard_state(masm_.isolate());
1271 __ CallCFunction(stack_check, num_arguments);
1276 template <
typename T>
1277 static T& frame_entry(
Address re_frame,
int frame_offset) {
1278 return reinterpret_cast<T&
>(Memory::int32_at(re_frame + frame_offset));
1282 int RegExpMacroAssemblerX64::CheckStackGuardState(
Address* return_address,
1285 Isolate* isolate = frame_entry<Isolate*>(re_frame, kIsolate);
1286 ASSERT(isolate == Isolate::Current());
1287 if (isolate->stack_guard()->IsStackOverflow()) {
1288 isolate->StackOverflow();
1297 if (frame_entry<int>(re_frame, kDirectCall) == 1) {
1302 HandleScope handles(isolate);
1303 Handle<Code> code_handle(re_code);
1305 Handle<String> subject(frame_entry<String*>(re_frame, kInputString));
1308 bool is_ascii = subject->IsAsciiRepresentationUnderneath();
1310 ASSERT(re_code->instruction_start() <= *return_address);
1311 ASSERT(*return_address <=
1312 re_code->instruction_start() + re_code->instruction_size());
1314 MaybeObject* result = Execution::HandleStackGuardInterrupt(isolate);
1316 if (*code_handle != re_code) {
1317 intptr_t delta = code_handle->address() - re_code->address();
1319 *return_address += delta;
1322 if (result->IsException()) {
1326 Handle<String> subject_tmp = subject;
1327 int slice_offset = 0;
1330 if (StringShape(*subject_tmp).IsCons()) {
1331 subject_tmp = Handle<String>(ConsString::cast(*subject_tmp)->first());
1332 }
else if (StringShape(*subject_tmp).IsSliced()) {
1333 SlicedString* slice = SlicedString::cast(*subject_tmp);
1334 subject_tmp = Handle<String>(slice->parent());
1335 slice_offset = slice->offset();
1339 if (subject_tmp->IsAsciiRepresentation() != is_ascii) {
1350 ASSERT(StringShape(*subject_tmp).IsSequential() ||
1351 StringShape(*subject_tmp).IsExternal());
1354 const byte* start_address = frame_entry<const byte*>(re_frame, kInputStart);
1358 int start_index = frame_entry<int>(re_frame, kStartIndex);
1359 const byte* new_address = StringCharacterPosition(*subject_tmp,
1360 start_index + slice_offset);
1362 if (start_address != new_address) {
1365 const byte* end_address = frame_entry<const byte* >(re_frame, kInputEnd);
1366 int byte_length =
static_cast<int>(end_address - start_address);
1367 frame_entry<const String*>(re_frame, kInputString) = *subject;
1368 frame_entry<const byte*>(re_frame, kInputStart) = new_address;
1369 frame_entry<const byte*>(re_frame, kInputEnd) = new_address + byte_length;
1370 }
else if (frame_entry<const String*>(re_frame, kInputString) != *subject) {
1374 frame_entry<const String*>(re_frame, kInputString) = *subject;
1381 Operand RegExpMacroAssemblerX64::register_location(
int register_index) {
1382 ASSERT(register_index < (1<<30));
1383 if (num_registers_ <= register_index) {
1384 num_registers_ = register_index + 1;
1390 void RegExpMacroAssemblerX64::CheckPosition(
int cp_offset,
1391 Label* on_outside_input) {
1392 __ cmpl(
rdi, Immediate(-cp_offset * char_size()));
1397 void RegExpMacroAssemblerX64::BranchOrBacktrack(
Condition condition,
1399 if (condition < 0) {
1408 __ j(condition, &backtrack_label_);
1411 __ j(condition, to);
1415 void RegExpMacroAssemblerX64::SafeCall(Label* to) {
1420 void RegExpMacroAssemblerX64::SafeCallTarget(Label* label) {
1422 __ subq(Operand(
rsp, 0), code_object_pointer());
1426 void RegExpMacroAssemblerX64::SafeReturn() {
1427 __ addq(Operand(
rsp, 0), code_object_pointer());
1432 void RegExpMacroAssemblerX64::Push(Register source) {
1433 ASSERT(!source.is(backtrack_stackpointer()));
1435 __ subq(backtrack_stackpointer(), Immediate(
kIntSize));
1436 __ movl(Operand(backtrack_stackpointer(), 0), source);
1440 void RegExpMacroAssemblerX64::Push(Immediate value) {
1442 __ subq(backtrack_stackpointer(), Immediate(
kIntSize));
1443 __ movl(Operand(backtrack_stackpointer(), 0), value);
1447 void RegExpMacroAssemblerX64::FixupCodeRelativePositions() {
1448 for (
int i = 0, n = code_relative_fixup_positions_.length(); i < n; i++) {
1449 int position = code_relative_fixup_positions_[i];
1453 int patch_position = position -
kIntSize;
1454 int offset = masm_.long_at(patch_position);
1455 masm_.long_at_put(patch_position,
1461 code_relative_fixup_positions_.Clear();
1465 void RegExpMacroAssemblerX64::Push(Label* backtrack_target) {
1466 __ subq(backtrack_stackpointer(), Immediate(kIntSize));
1467 __ movl(Operand(backtrack_stackpointer(), 0), backtrack_target);
1468 MarkPositionForCodeRelativeFixup();
1472 void RegExpMacroAssemblerX64::Pop(Register target) {
1473 ASSERT(!target.is(backtrack_stackpointer()));
1474 __ movsxlq(target, Operand(backtrack_stackpointer(), 0));
1476 __ addq(backtrack_stackpointer(), Immediate(kIntSize));
1480 void RegExpMacroAssemblerX64::Drop() {
1481 __ addq(backtrack_stackpointer(), Immediate(kIntSize));
1485 void RegExpMacroAssemblerX64::CheckPreemption() {
1488 ExternalReference stack_limit =
1489 ExternalReference::address_of_stack_limit(masm_.isolate());
1490 __ load_rax(stack_limit);
1494 SafeCall(&check_preempt_label_);
1496 __ bind(&no_preempt);
1500 void RegExpMacroAssemblerX64::CheckStackLimit() {
1501 Label no_stack_overflow;
1502 ExternalReference stack_limit =
1503 ExternalReference::address_of_regexp_stack_limit(masm_.isolate());
1504 __ load_rax(stack_limit);
1505 __ cmpq(backtrack_stackpointer(),
rax);
1506 __ j(
above, &no_stack_overflow);
1508 SafeCall(&stack_overflow_label_);
1510 __ bind(&no_stack_overflow);
1514 void RegExpMacroAssemblerX64::LoadCurrentCharacterUnchecked(
int cp_offset,
1516 if (mode_ == ASCII) {
1517 if (characters == 4) {
1519 }
else if (characters == 2) {
1520 __ movzxwl(current_character(), Operand(
rsi,
rdi,
times_1, cp_offset));
1523 __ movzxbl(current_character(), Operand(
rsi,
rdi,
times_1, cp_offset));
1527 if (characters == 2) {
1528 __ movl(current_character(),
1532 __ movzxwl(current_character(),
1540 #endif // V8_INTERPRETED_REGEXP
1544 #endif // V8_TARGET_ARCH_X64
v8::Handle< v8::Value > Fail(const v8::Arguments &args)
#define ASSERT(condition)
#define PROFILE(isolate, Call)
RegExpMacroAssemblerX64(Mode mode, int registers_to_save, Zone *zone)
Operand FieldOperand(Register object, int offset)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
#define T(name, string, precedence)
const Register kScratchRegister
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
#define STATIC_ASSERT(test)
const uc32 kMaxAsciiCharCode