30 #if V8_TARGET_ARCH_X64
44 #ifndef V8_INTERPRETED_REGEXP
117 #define __ ACCESS_MASM((&masm_))
121 int registers_to_save,
123 : NativeRegExpMacroAssembler(zone),
124 masm_(zone->isolate(),
NULL, kRegExpCodeSize),
125 no_root_array_scope_(&masm_),
126 code_relative_fixup_positions_(4, zone),
128 num_registers_(registers_to_save),
129 num_saved_registers_(registers_to_save),
136 __ jmp(&entry_label_);
137 __ bind(&start_label_);
141 RegExpMacroAssemblerX64::~RegExpMacroAssemblerX64() {
143 entry_label_.Unuse();
144 start_label_.Unuse();
145 success_label_.Unuse();
146 backtrack_label_.Unuse();
148 check_preempt_label_.Unuse();
149 stack_overflow_label_.Unuse();
153 int RegExpMacroAssemblerX64::stack_limit_slack() {
154 return RegExpStack::kStackLimitSlack;
158 void RegExpMacroAssemblerX64::AdvanceCurrentPosition(
int by) {
160 __ addq(
rdi, Immediate(by * char_size()));
165 void RegExpMacroAssemblerX64::AdvanceRegister(
int reg,
int by) {
167 ASSERT(reg < num_registers_);
169 __ addp(register_location(reg), Immediate(by));
174 void RegExpMacroAssemblerX64::Backtrack() {
178 __ addp(
rbx, code_object_pointer());
183 void RegExpMacroAssemblerX64::Bind(Label* label) {
188 void RegExpMacroAssemblerX64::CheckCharacter(uint32_t c, Label* on_equal) {
189 __ cmpl(current_character(), Immediate(c));
190 BranchOrBacktrack(
equal, on_equal);
194 void RegExpMacroAssemblerX64::CheckCharacterGT(
uc16 limit, Label* on_greater) {
195 __ cmpl(current_character(), Immediate(limit));
196 BranchOrBacktrack(
greater, on_greater);
200 void RegExpMacroAssemblerX64::CheckAtStart(Label* on_at_start) {
203 __ cmpl(Operand(
rbp, kStartIndex), Immediate(0));
204 BranchOrBacktrack(
not_equal, ¬_at_start);
207 __ cmpp(
rax, Operand(
rbp, kInputStart));
208 BranchOrBacktrack(
equal, on_at_start);
209 __ bind(¬_at_start);
213 void RegExpMacroAssemblerX64::CheckNotAtStart(Label* on_not_at_start) {
215 __ cmpl(Operand(
rbp, kStartIndex), Immediate(0));
216 BranchOrBacktrack(
not_equal, on_not_at_start);
219 __ cmpp(
rax, Operand(
rbp, kInputStart));
220 BranchOrBacktrack(
not_equal, on_not_at_start);
224 void RegExpMacroAssemblerX64::CheckCharacterLT(
uc16 limit, Label* on_less) {
225 __ cmpl(current_character(), Immediate(limit));
226 BranchOrBacktrack(
less, on_less);
230 void RegExpMacroAssemblerX64::CheckGreedyLoop(Label* on_equal) {
232 __ cmpl(
rdi, Operand(backtrack_stackpointer(), 0));
236 __ bind(&fallthrough);
240 void RegExpMacroAssemblerX64::CheckNotBackReferenceIgnoreCase(
242 Label* on_no_match) {
244 __ movq(
rdx, register_location(start_reg));
245 __ movq(
rbx, register_location(start_reg + 1));
268 BranchOrBacktrack(
greater, on_no_match);
270 if (mode_ == ASCII) {
271 Label loop_increment;
272 if (on_no_match ==
NULL) {
273 on_no_match = &backtrack_label_;
286 __ movzxbl(
rdx, Operand(
r9, 0));
296 __ orp(
rax, Immediate(0x20));
297 __ orp(
rdx, Immediate(0x20));
300 __ subb(
rax, Immediate(
'a'));
301 __ cmpb(
rax, Immediate(
'z' -
'a'));
304 __ subb(
rax, Immediate(224 -
'a'));
305 __ cmpb(
rax, Immediate(254 - 224));
307 __ cmpb(
rax, Immediate(247 - 224));
309 __ bind(&loop_increment);
311 __ addp(
r11, Immediate(1));
312 __ addp(
r9, Immediate(1));
328 __ pushq(backtrack_stackpointer());
330 static const int num_arguments = 4;
331 __ PrepareCallCFunction(num_arguments);
346 __ LoadAddress(
r9, ExternalReference::isolate_address(isolate()));
347 #else // AMD64 calling convention
357 __ LoadAddress(
rcx, ExternalReference::isolate_address(isolate()));
362 AllowExternalCallThatCantCauseGC scope(&masm_);
363 ExternalReference compare =
364 ExternalReference::re_case_insensitive_compare_uc16(isolate());
365 __ CallCFunction(compare, num_arguments);
369 __ Move(code_object_pointer(), masm_.CodeObject());
370 __ popq(backtrack_stackpointer());
378 BranchOrBacktrack(
zero, on_no_match);
383 __ bind(&fallthrough);
387 void RegExpMacroAssemblerX64::CheckNotBackReference(
389 Label* on_no_match) {
393 __ movq(
rdx, register_location(start_reg));
394 __ movq(
rax, register_location(start_reg + 1));
412 BranchOrBacktrack(
greater, on_no_match);
426 if (mode_ == ASCII) {
434 BranchOrBacktrack(
not_equal, on_no_match);
436 __ addp(
rbx, Immediate(char_size()));
437 __ addp(
rdx, Immediate(char_size()));
447 __ bind(&fallthrough);
451 void RegExpMacroAssemblerX64::CheckNotCharacter(uint32_t c,
452 Label* on_not_equal) {
453 __ cmpl(current_character(), Immediate(c));
454 BranchOrBacktrack(
not_equal, on_not_equal);
458 void RegExpMacroAssemblerX64::CheckCharacterAfterAnd(uint32_t c,
462 __ testl(current_character(), Immediate(mask));
464 __ movl(
rax, Immediate(mask));
465 __ andp(
rax, current_character());
466 __ cmpl(
rax, Immediate(c));
468 BranchOrBacktrack(
equal, on_equal);
472 void RegExpMacroAssemblerX64::CheckNotCharacterAfterAnd(uint32_t c,
474 Label* on_not_equal) {
476 __ testl(current_character(), Immediate(mask));
478 __ movl(
rax, Immediate(mask));
479 __ andp(
rax, current_character());
480 __ cmpl(
rax, Immediate(c));
482 BranchOrBacktrack(
not_equal, on_not_equal);
486 void RegExpMacroAssemblerX64::CheckNotCharacterAfterMinusAnd(
490 Label* on_not_equal) {
491 ASSERT(minus < String::kMaxUtf16CodeUnit);
492 __ leap(
rax, Operand(current_character(), -minus));
493 __ andp(
rax, Immediate(mask));
494 __ cmpl(
rax, Immediate(c));
495 BranchOrBacktrack(
not_equal, on_not_equal);
499 void RegExpMacroAssemblerX64::CheckCharacterInRange(
502 Label* on_in_range) {
503 __ leal(
rax, Operand(current_character(), -from));
504 __ cmpl(
rax, Immediate(to - from));
509 void RegExpMacroAssemblerX64::CheckCharacterNotInRange(
512 Label* on_not_in_range) {
513 __ leal(
rax, Operand(current_character(), -from));
514 __ cmpl(
rax, Immediate(to - from));
515 BranchOrBacktrack(
above, on_not_in_range);
519 void RegExpMacroAssemblerX64::CheckBitInTable(
520 Handle<ByteArray> table,
523 Register index = current_character();
524 if (mode_ != ASCII || kTableMask != String::kMaxOneByteCharCode) {
525 __ movp(
rbx, current_character());
526 __ andp(
rbx, Immediate(kTableMask));
531 BranchOrBacktrack(
not_equal, on_bit_set);
535 bool RegExpMacroAssemblerX64::CheckSpecialCharacterClass(
uc16 type,
536 Label* on_no_match) {
544 if (mode_ == ASCII) {
547 __ cmpl(current_character(), Immediate(
' '));
548 __ j(
equal, &success, Label::kNear);
550 __ leap(
rax, Operand(current_character(), -
'\t'));
551 __ cmpl(
rax, Immediate(
'\r' -
'\t'));
554 __ cmpl(
rax, Immediate(0x00a0 -
'\t'));
555 BranchOrBacktrack(
not_equal, on_no_match);
565 __ leap(
rax, Operand(current_character(), -
'0'));
566 __ cmpl(
rax, Immediate(
'9' -
'0'));
567 BranchOrBacktrack(
above, on_no_match);
571 __ leap(
rax, Operand(current_character(), -
'0'));
572 __ cmpl(
rax, Immediate(
'9' -
'0'));
577 __ movl(
rax, current_character());
578 __ xorp(
rax, Immediate(0x01));
580 __ subl(
rax, Immediate(0x0b));
581 __ cmpl(
rax, Immediate(0x0c - 0x0b));
587 __ subl(
rax, Immediate(0x2028 - 0x0b));
588 __ cmpl(
rax, Immediate(0x2029 - 0x2028));
595 __ movl(
rax, current_character());
596 __ xorp(
rax, Immediate(0x01));
598 __ subl(
rax, Immediate(0x0b));
599 __ cmpl(
rax, Immediate(0x0c - 0x0b));
600 if (mode_ == ASCII) {
601 BranchOrBacktrack(
above, on_no_match);
608 __ subl(
rax, Immediate(0x2028 - 0x0b));
609 __ cmpl(
rax, Immediate(0x2029 - 0x2028));
610 BranchOrBacktrack(
above, on_no_match);
616 if (mode_ != ASCII) {
618 __ cmpl(current_character(), Immediate(
'z'));
619 BranchOrBacktrack(
above, on_no_match);
621 __ Move(
rbx, ExternalReference::re_word_character_map());
624 current_character());
625 BranchOrBacktrack(
zero, on_no_match);
630 if (mode_ != ASCII) {
632 __ cmpl(current_character(), Immediate(
'z'));
635 __ Move(
rbx, ExternalReference::re_word_character_map());
638 current_character());
639 BranchOrBacktrack(
not_zero, on_no_match);
640 if (mode_ != ASCII) {
659 __ Set(
rax, FAILURE);
661 __ jmp(&exit_label_);
665 Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
670 __ bind(&entry_label_);
674 FrameScope scope(&masm_, StackFrame::MANUAL);
684 __ movq(Operand(
rbp, kInputString),
rcx);
685 __ movq(Operand(
rbp, kStartIndex),
rdx);
686 __ movq(Operand(
rbp, kInputStart),
r8);
687 __ movq(Operand(
rbp, kInputEnd),
r9);
711 __ Push(Immediate(0));
712 __ Push(Immediate(0));
715 Label stack_limit_hit;
718 ExternalReference stack_limit =
719 ExternalReference::address_of_stack_limit(isolate());
731 __ Set(
rax, EXCEPTION);
734 __ bind(&stack_limit_hit);
735 __ Move(code_object_pointer(), masm_.CodeObject());
736 CallCheckStackGuardState();
746 __ movp(
rsi, Operand(
rbp, kInputEnd));
748 __ movp(
rdi, Operand(
rbp, kInputStart));
753 __ movp(
rbx, Operand(
rbp, kStartIndex));
762 __ movp(Operand(
rbp, kInputStartMinusOne),
rax);
767 const int kPageSize = 4096;
769 for (
int i = num_saved_registers_ + kRegistersPerPage - 1;
771 i += kRegistersPerPage) {
772 __ movp(register_location(i),
rax);
777 __ Move(code_object_pointer(), masm_.CodeObject());
779 Label load_char_start_regexp, start_regexp;
781 __ cmpl(Operand(
rbp, kStartIndex), Immediate(0));
782 __ j(
not_equal, &load_char_start_regexp, Label::kNear);
783 __ Set(current_character(),
'\n');
784 __ jmp(&start_regexp, Label::kNear);
787 __ bind(&load_char_start_regexp);
789 LoadCurrentCharacterUnchecked(-1, 1);
790 __ bind(&start_regexp);
793 if (num_saved_registers_ > 0) {
797 if (num_saved_registers_ > 8) {
798 __ Set(
rcx, kRegisterZero);
804 Immediate(kRegisterZero - num_saved_registers_ *
kPointerSize));
807 for (
int i = 0; i < num_saved_registers_; i++) {
808 __ movp(register_location(i),
rax);
814 __ movp(backtrack_stackpointer(), Operand(
rbp, kStackHighEnd));
816 __ jmp(&start_label_);
819 if (success_label_.is_linked()) {
821 __ bind(&success_label_);
822 if (num_saved_registers_ > 0) {
824 __ movp(
rdx, Operand(
rbp, kStartIndex));
825 __ movp(
rbx, Operand(
rbp, kRegisterOutput));
826 __ movp(
rcx, Operand(
rbp, kInputEnd));
827 __ subp(
rcx, Operand(
rbp, kInputStart));
833 for (
int i = 0; i < num_saved_registers_; i++) {
834 __ movq(
rax, register_location(i));
835 if (i == 0 && global_with_zero_length_check()) {
841 __ sar(
rax, Immediate(1));
850 __ incp(Operand(
rbp, kSuccessfulCaptures));
853 __ movsxlq(
rcx, Operand(
rbp, kNumOutputRegisters));
854 __ subp(
rcx, Immediate(num_saved_registers_));
856 __ cmpp(
rcx, Immediate(num_saved_registers_));
859 __ movp(Operand(
rbp, kNumOutputRegisters),
rcx);
861 __ addp(Operand(
rbp, kRegisterOutput),
862 Immediate(num_saved_registers_ *
kIntSize));
865 __ movp(
rax, Operand(
rbp, kInputStartMinusOne));
867 if (global_with_zero_length_check()) {
875 __ j(
zero, &exit_label_, Label::kNear);
878 __ addq(
rdi, Immediate(2));
884 __ jmp(&load_char_start_regexp);
886 __ movp(
rax, Immediate(SUCCESS));
890 __ bind(&exit_label_);
893 __ movp(
rax, Operand(
rbp, kSuccessfulCaptures));
896 __ bind(&return_rax);
899 __ leap(
rsp, Operand(
rbp, kLastCalleeSaveRegister));
906 __ movp(
rbx, Operand(
rbp, kBackup_rbx));
915 if (backtrack_label_.is_linked()) {
916 __ bind(&backtrack_label_);
920 Label exit_with_exception;
923 if (check_preempt_label_.is_linked()) {
924 SafeCallTarget(&check_preempt_label_);
926 __ pushq(backtrack_stackpointer());
929 CallCheckStackGuardState();
936 __ Move(code_object_pointer(), masm_.CodeObject());
938 __ popq(backtrack_stackpointer());
940 __ movp(
rsi, Operand(
rbp, kInputEnd));
945 if (stack_overflow_label_.is_linked()) {
946 SafeCallTarget(&stack_overflow_label_);
958 static const int num_arguments = 3;
959 __ PrepareCallCFunction(num_arguments);
963 __ leap(
rdx, Operand(
rbp, kStackHighEnd));
964 __ LoadAddress(
r8, ExternalReference::isolate_address(isolate()));
967 __ movp(
rdi, backtrack_stackpointer());
968 __ leap(
rsi, Operand(
rbp, kStackHighEnd));
969 __ LoadAddress(
rdx, ExternalReference::isolate_address(isolate()));
971 ExternalReference grow_stack =
972 ExternalReference::re_grow_stack(isolate());
973 __ CallCFunction(grow_stack, num_arguments);
977 __ j(
equal, &exit_with_exception);
979 __ movp(backtrack_stackpointer(),
rax);
981 __ Move(code_object_pointer(), masm_.CodeObject());
989 if (exit_with_exception.is_linked()) {
991 __ bind(&exit_with_exception);
993 __ Set(
rax, EXCEPTION);
997 FixupCodeRelativePositions();
1000 masm_.GetCode(&code_desc);
1001 Isolate* isolate = this->isolate();
1002 Handle<Code>
code = isolate->factory()->NewCode(
1003 code_desc, Code::ComputeFlags(Code::REGEXP),
1004 masm_.CodeObject());
1005 PROFILE(isolate, RegExpCodeCreateEvent(*code, *source));
1006 return Handle<HeapObject>::cast(code);
1010 void RegExpMacroAssemblerX64::GoTo(Label* to) {
1015 void RegExpMacroAssemblerX64::IfRegisterGE(
int reg,
1018 __ cmpp(register_location(reg), Immediate(comparand));
1023 void RegExpMacroAssemblerX64::IfRegisterLT(
int reg,
1026 __ cmpp(register_location(reg), Immediate(comparand));
1027 BranchOrBacktrack(
less, if_lt);
1031 void RegExpMacroAssemblerX64::IfRegisterEqPos(
int reg,
1033 __ cmpp(
rdi, register_location(reg));
1034 BranchOrBacktrack(
equal, if_eq);
1038 RegExpMacroAssembler::IrregexpImplementation
1039 RegExpMacroAssemblerX64::Implementation() {
1040 return kX64Implementation;
1044 void RegExpMacroAssemblerX64::LoadCurrentCharacter(
int cp_offset,
1045 Label* on_end_of_input,
1049 ASSERT(cp_offset < (1<<30));
1051 CheckPosition(cp_offset + characters - 1, on_end_of_input);
1053 LoadCurrentCharacterUnchecked(cp_offset, characters);
1057 void RegExpMacroAssemblerX64::PopCurrentPosition() {
1062 void RegExpMacroAssemblerX64::PopRegister(
int register_index) {
1064 __ movp(register_location(register_index),
rax);
1068 void RegExpMacroAssemblerX64::PushBacktrack(Label* label) {
1074 void RegExpMacroAssemblerX64::PushCurrentPosition() {
1079 void RegExpMacroAssemblerX64::PushRegister(
int register_index,
1080 StackCheckFlag check_stack_limit) {
1081 __ movp(
rax, register_location(register_index));
1083 if (check_stack_limit) CheckStackLimit();
1087 void RegExpMacroAssemblerX64::ReadCurrentPositionFromRegister(
int reg) {
1088 __ movq(
rdi, register_location(reg));
1092 void RegExpMacroAssemblerX64::ReadStackPointerFromRegister(
int reg) {
1093 __ movq(backtrack_stackpointer(), register_location(reg));
1094 __ addp(backtrack_stackpointer(), Operand(
rbp, kStackHighEnd));
1098 void RegExpMacroAssemblerX64::SetCurrentPositionFromEnd(
int by) {
1099 Label after_position;
1100 __ cmpp(
rdi, Immediate(-by * char_size()));
1102 __ movq(
rdi, Immediate(-by * char_size()));
1106 LoadCurrentCharacterUnchecked(-1, 1);
1107 __ bind(&after_position);
1111 void RegExpMacroAssemblerX64::SetRegister(
int register_index,
int to) {
1112 ASSERT(register_index >= num_saved_registers_);
1113 __ movp(register_location(register_index), Immediate(to));
1117 bool RegExpMacroAssemblerX64::Succeed() {
1118 __ jmp(&success_label_);
1123 void RegExpMacroAssemblerX64::WriteCurrentPositionToRegister(
int reg,
1125 if (cp_offset == 0) {
1126 __ movp(register_location(reg),
rdi);
1128 __ leap(
rax, Operand(
rdi, cp_offset * char_size()));
1129 __ movp(register_location(reg),
rax);
1134 void RegExpMacroAssemblerX64::ClearRegisters(
int reg_from,
int reg_to) {
1135 ASSERT(reg_from <= reg_to);
1136 __ movp(
rax, Operand(
rbp, kInputStartMinusOne));
1137 for (
int reg = reg_from; reg <= reg_to; reg++) {
1138 __ movp(register_location(reg),
rax);
1143 void RegExpMacroAssemblerX64::WriteStackPointerToRegister(
int reg) {
1144 __ movp(
rax, backtrack_stackpointer());
1145 __ subp(
rax, Operand(
rbp, kStackHighEnd));
1146 __ movp(register_location(reg),
rax);
1152 void RegExpMacroAssemblerX64::CallCheckStackGuardState() {
1155 static const int num_arguments = 3;
1156 __ PrepareCallCFunction(num_arguments);
1159 __ movp(
rdx, code_object_pointer());
1169 __ movp(
rsi, code_object_pointer());
1174 ExternalReference stack_check =
1175 ExternalReference::re_check_stack_guard_state(isolate());
1176 __ CallCFunction(stack_check, num_arguments);
1181 template <
typename T>
1182 static T& frame_entry(
Address re_frame,
int frame_offset) {
1183 return reinterpret_cast<T&
>(Memory::int32_at(re_frame + frame_offset));
1187 int RegExpMacroAssemblerX64::CheckStackGuardState(
Address* return_address,
1190 Isolate* isolate = frame_entry<Isolate*>(re_frame, kIsolate);
1191 if (isolate->stack_guard()->IsStackOverflow()) {
1192 isolate->StackOverflow();
1201 if (frame_entry<int>(re_frame, kDirectCall) == 1) {
1206 HandleScope handles(isolate);
1207 Handle<Code> code_handle(re_code);
1209 Handle<String> subject(frame_entry<String*>(re_frame, kInputString));
1212 bool is_ascii = subject->IsOneByteRepresentationUnderneath();
1214 ASSERT(re_code->instruction_start() <= *return_address);
1215 ASSERT(*return_address <=
1216 re_code->instruction_start() + re_code->instruction_size());
1218 MaybeObject* result = Execution::HandleStackGuardInterrupt(isolate);
1220 if (*code_handle != re_code) {
1221 intptr_t delta = code_handle->address() - re_code->address();
1223 *return_address += delta;
1226 if (result->IsException()) {
1230 Handle<String> subject_tmp = subject;
1231 int slice_offset = 0;
1234 if (StringShape(*subject_tmp).IsCons()) {
1235 subject_tmp = Handle<String>(ConsString::cast(*subject_tmp)->first());
1236 }
else if (StringShape(*subject_tmp).IsSliced()) {
1237 SlicedString* slice = SlicedString::cast(*subject_tmp);
1238 subject_tmp = Handle<String>(slice->parent());
1239 slice_offset = slice->offset();
1243 if (subject_tmp->IsOneByteRepresentation() != is_ascii) {
1254 ASSERT(StringShape(*subject_tmp).IsSequential() ||
1255 StringShape(*subject_tmp).IsExternal());
1258 const byte* start_address = frame_entry<const byte*>(re_frame, kInputStart);
1262 int start_index = frame_entry<int>(re_frame, kStartIndex);
1263 const byte* new_address = StringCharacterPosition(*subject_tmp,
1264 start_index + slice_offset);
1266 if (start_address != new_address) {
1269 const byte* end_address = frame_entry<const byte* >(re_frame, kInputEnd);
1270 int byte_length =
static_cast<int>(end_address - start_address);
1271 frame_entry<const String*>(re_frame, kInputString) = *subject;
1272 frame_entry<const byte*>(re_frame, kInputStart) = new_address;
1273 frame_entry<const byte*>(re_frame, kInputEnd) = new_address + byte_length;
1274 }
else if (frame_entry<const String*>(re_frame, kInputString) != *subject) {
1278 frame_entry<const String*>(re_frame, kInputString) = *subject;
1285 Operand RegExpMacroAssemblerX64::register_location(
int register_index) {
1286 ASSERT(register_index < (1<<30));
1287 if (num_registers_ <= register_index) {
1288 num_registers_ = register_index + 1;
1294 void RegExpMacroAssemblerX64::CheckPosition(
int cp_offset,
1295 Label* on_outside_input) {
1296 __ cmpl(
rdi, Immediate(-cp_offset * char_size()));
1301 void RegExpMacroAssemblerX64::BranchOrBacktrack(
Condition condition,
1303 if (condition < 0) {
1312 __ j(condition, &backtrack_label_);
1315 __ j(condition, to);
1319 void RegExpMacroAssemblerX64::SafeCall(Label* to) {
1324 void RegExpMacroAssemblerX64::SafeCallTarget(Label* label) {
1326 __ subp(Operand(
rsp, 0), code_object_pointer());
1330 void RegExpMacroAssemblerX64::SafeReturn() {
1331 __ addp(Operand(
rsp, 0), code_object_pointer());
1336 void RegExpMacroAssemblerX64::Push(Register source) {
1337 ASSERT(!source.is(backtrack_stackpointer()));
1339 __ subp(backtrack_stackpointer(), Immediate(
kIntSize));
1340 __ movl(Operand(backtrack_stackpointer(), 0), source);
1344 void RegExpMacroAssemblerX64::Push(Immediate value) {
1346 __ subp(backtrack_stackpointer(), Immediate(
kIntSize));
1347 __ movl(Operand(backtrack_stackpointer(), 0), value);
1351 void RegExpMacroAssemblerX64::FixupCodeRelativePositions() {
1352 for (
int i = 0, n = code_relative_fixup_positions_.length(); i < n; i++) {
1353 int position = code_relative_fixup_positions_[i];
1357 int patch_position = position -
kIntSize;
1358 int offset = masm_.long_at(patch_position);
1359 masm_.long_at_put(patch_position,
1365 code_relative_fixup_positions_.Clear();
1369 void RegExpMacroAssemblerX64::Push(Label* backtrack_target) {
1370 __ subp(backtrack_stackpointer(), Immediate(kIntSize));
1371 __ movl(Operand(backtrack_stackpointer(), 0), backtrack_target);
1372 MarkPositionForCodeRelativeFixup();
1376 void RegExpMacroAssemblerX64::Pop(Register target) {
1377 ASSERT(!target.is(backtrack_stackpointer()));
1378 __ movsxlq(target, Operand(backtrack_stackpointer(), 0));
1380 __ addp(backtrack_stackpointer(), Immediate(kIntSize));
1384 void RegExpMacroAssemblerX64::Drop() {
1385 __ addp(backtrack_stackpointer(), Immediate(kIntSize));
1389 void RegExpMacroAssemblerX64::CheckPreemption() {
1392 ExternalReference stack_limit =
1393 ExternalReference::address_of_stack_limit(isolate());
1394 __ load_rax(stack_limit);
1398 SafeCall(&check_preempt_label_);
1400 __ bind(&no_preempt);
1404 void RegExpMacroAssemblerX64::CheckStackLimit() {
1405 Label no_stack_overflow;
1406 ExternalReference stack_limit =
1407 ExternalReference::address_of_regexp_stack_limit(isolate());
1408 __ load_rax(stack_limit);
1409 __ cmpp(backtrack_stackpointer(),
rax);
1410 __ j(
above, &no_stack_overflow);
1412 SafeCall(&stack_overflow_label_);
1414 __ bind(&no_stack_overflow);
1418 void RegExpMacroAssemblerX64::LoadCurrentCharacterUnchecked(
int cp_offset,
1420 if (mode_ == ASCII) {
1421 if (characters == 4) {
1423 }
else if (characters == 2) {
1424 __ movzxwl(current_character(), Operand(
rsi,
rdi,
times_1, cp_offset));
1427 __ movzxbl(current_character(), Operand(
rsi,
rdi,
times_1, cp_offset));
1431 if (characters == 2) {
1432 __ movl(current_character(),
1436 __ movzxwl(current_character(),
1444 #endif // V8_INTERPRETED_REGEXP
1448 #endif // V8_TARGET_ARCH_X64
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
void Fail(const v8::FunctionCallbackInfo< v8::Value > &args)
#define PROFILE(IsolateGetter, Call)
#define ASSERT(condition)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
RegExpMacroAssemblerX64(Mode mode, int registers_to_save, Zone *zone)
Operand FieldOperand(Register object, int offset)
#define T(name, string, precedence)
const Register kScratchRegister
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
#define ASSERT_EQ(v1, v2)
#define STATIC_ASSERT(test)