v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
regexp-macro-assembler-ia32.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if V8_TARGET_ARCH_IA32
31 
32 #include "cpu-profiler.h"
33 #include "unicode.h"
34 #include "log.h"
35 #include "regexp-stack.h"
36 #include "macro-assembler.h"
37 #include "regexp-macro-assembler.h"
39 
40 namespace v8 {
41 namespace internal {
42 
43 #ifndef V8_INTERPRETED_REGEXP
44 /*
45  * This assembler uses the following register assignment convention
46  * - edx : Current character. Must be loaded using LoadCurrentCharacter
47  * before using any of the dispatch methods. Temporarily stores the
48  * index of capture start after a matching pass for a global regexp.
49  * - edi : Current position in input, as negative offset from end of string.
50  * Please notice that this is the byte offset, not the character offset!
51  * - esi : end of input (points to byte after last character in input).
52  * - ebp : Frame pointer. Used to access arguments, local variables and
53  * RegExp registers.
54  * - esp : Points to tip of C stack.
55  * - ecx : Points to tip of backtrack stack
56  *
57  * The registers eax and ebx are free to use for computations.
58  *
59  * Each call to a public method should retain this convention.
60  * The stack will have the following structure:
61  * - Isolate* isolate (address of the current isolate)
62  * - direct_call (if 1, direct call from JavaScript code, if 0
63  * call through the runtime system)
64  * - stack_area_base (high end of the memory area to use as
65  * backtracking stack)
66  * - capture array size (may fit multiple sets of matches)
67  * - int* capture_array (int[num_saved_registers_], for output).
68  * - end of input (address of end of string)
69  * - start of input (address of first character in string)
70  * - start index (character index of start)
71  * - String* input_string (location of a handle containing the string)
72  * --- frame alignment (if applicable) ---
73  * - return address
74  * ebp-> - old ebp
75  * - backup of caller esi
76  * - backup of caller edi
77  * - backup of caller ebx
78  * - success counter (only for global regexps to count matches).
79  * - Offset of location before start of input (effectively character
80  * position -1). Used to initialize capture registers to a non-position.
81  * - register 0 ebp[-4] (only positions must be stored in the first
82  * - register 1 ebp[-8] num_saved_registers_ registers)
83  * - ...
84  *
85  * The first num_saved_registers_ registers are initialized to point to
86  * "character -1" in the string (i.e., char_size() bytes before the first
87  * character of the string). The remaining registers starts out as garbage.
88  *
89  * The data up to the return address must be placed there by the calling
90  * code, by calling the code entry as cast to a function with the signature:
91  * int (*match)(String* input_string,
92  * int start_index,
93  * Address start,
94  * Address end,
95  * int* capture_output_array,
96  * bool at_start,
97  * byte* stack_area_base,
98  * bool direct_call)
99  */
100 
101 #define __ ACCESS_MASM(masm_)
102 
104  Mode mode,
105  int registers_to_save,
106  Zone* zone)
107  : NativeRegExpMacroAssembler(zone),
108  masm_(new MacroAssembler(zone->isolate(), NULL, kRegExpCodeSize)),
109  mode_(mode),
110  num_registers_(registers_to_save),
111  num_saved_registers_(registers_to_save),
112  entry_label_(),
113  start_label_(),
114  success_label_(),
115  backtrack_label_(),
116  exit_label_() {
117  ASSERT_EQ(0, registers_to_save % 2);
118  __ jmp(&entry_label_); // We'll write the entry code later.
119  __ bind(&start_label_); // And then continue from here.
120 }
121 
122 
123 RegExpMacroAssemblerIA32::~RegExpMacroAssemblerIA32() {
124  delete masm_;
125  // Unuse labels in case we throw away the assembler without calling GetCode.
126  entry_label_.Unuse();
127  start_label_.Unuse();
128  success_label_.Unuse();
129  backtrack_label_.Unuse();
130  exit_label_.Unuse();
131  check_preempt_label_.Unuse();
132  stack_overflow_label_.Unuse();
133 }
134 
135 
136 int RegExpMacroAssemblerIA32::stack_limit_slack() {
137  return RegExpStack::kStackLimitSlack;
138 }
139 
140 
141 void RegExpMacroAssemblerIA32::AdvanceCurrentPosition(int by) {
142  if (by != 0) {
143  __ add(edi, Immediate(by * char_size()));
144  }
145 }
146 
147 
148 void RegExpMacroAssemblerIA32::AdvanceRegister(int reg, int by) {
149  ASSERT(reg >= 0);
150  ASSERT(reg < num_registers_);
151  if (by != 0) {
152  __ add(register_location(reg), Immediate(by));
153  }
154 }
155 
156 
157 void RegExpMacroAssemblerIA32::Backtrack() {
158  CheckPreemption();
159  // Pop Code* offset from backtrack stack, add Code* and jump to location.
160  Pop(ebx);
161  __ add(ebx, Immediate(masm_->CodeObject()));
162  __ jmp(ebx);
163 }
164 
165 
166 void RegExpMacroAssemblerIA32::Bind(Label* label) {
167  __ bind(label);
168 }
169 
170 
171 void RegExpMacroAssemblerIA32::CheckCharacter(uint32_t c, Label* on_equal) {
172  __ cmp(current_character(), c);
173  BranchOrBacktrack(equal, on_equal);
174 }
175 
176 
177 void RegExpMacroAssemblerIA32::CheckCharacterGT(uc16 limit, Label* on_greater) {
178  __ cmp(current_character(), limit);
179  BranchOrBacktrack(greater, on_greater);
180 }
181 
182 
183 void RegExpMacroAssemblerIA32::CheckAtStart(Label* on_at_start) {
184  Label not_at_start;
185  // Did we start the match at the start of the string at all?
186  __ cmp(Operand(ebp, kStartIndex), Immediate(0));
187  BranchOrBacktrack(not_equal, &not_at_start);
188  // If we did, are we still at the start of the input?
189  __ lea(eax, Operand(esi, edi, times_1, 0));
190  __ cmp(eax, Operand(ebp, kInputStart));
191  BranchOrBacktrack(equal, on_at_start);
192  __ bind(&not_at_start);
193 }
194 
195 
196 void RegExpMacroAssemblerIA32::CheckNotAtStart(Label* on_not_at_start) {
197  // Did we start the match at the start of the string at all?
198  __ cmp(Operand(ebp, kStartIndex), Immediate(0));
199  BranchOrBacktrack(not_equal, on_not_at_start);
200  // If we did, are we still at the start of the input?
201  __ lea(eax, Operand(esi, edi, times_1, 0));
202  __ cmp(eax, Operand(ebp, kInputStart));
203  BranchOrBacktrack(not_equal, on_not_at_start);
204 }
205 
206 
207 void RegExpMacroAssemblerIA32::CheckCharacterLT(uc16 limit, Label* on_less) {
208  __ cmp(current_character(), limit);
209  BranchOrBacktrack(less, on_less);
210 }
211 
212 
213 void RegExpMacroAssemblerIA32::CheckGreedyLoop(Label* on_equal) {
214  Label fallthrough;
215  __ cmp(edi, Operand(backtrack_stackpointer(), 0));
216  __ j(not_equal, &fallthrough);
217  __ add(backtrack_stackpointer(), Immediate(kPointerSize)); // Pop.
218  BranchOrBacktrack(no_condition, on_equal);
219  __ bind(&fallthrough);
220 }
221 
222 
223 void RegExpMacroAssemblerIA32::CheckNotBackReferenceIgnoreCase(
224  int start_reg,
225  Label* on_no_match) {
226  Label fallthrough;
227  __ mov(edx, register_location(start_reg)); // Index of start of capture
228  __ mov(ebx, register_location(start_reg + 1)); // Index of end of capture
229  __ sub(ebx, edx); // Length of capture.
230 
231  // The length of a capture should not be negative. This can only happen
232  // if the end of the capture is unrecorded, or at a point earlier than
233  // the start of the capture.
234  BranchOrBacktrack(less, on_no_match);
235 
236  // If length is zero, either the capture is empty or it is completely
237  // uncaptured. In either case succeed immediately.
238  __ j(equal, &fallthrough);
239 
240  // Check that there are sufficient characters left in the input.
241  __ mov(eax, edi);
242  __ add(eax, ebx);
243  BranchOrBacktrack(greater, on_no_match);
244 
245  if (mode_ == ASCII) {
246  Label success;
247  Label fail;
248  Label loop_increment;
249  // Save register contents to make the registers available below.
250  __ push(edi);
251  __ push(backtrack_stackpointer());
252  // After this, the eax, ecx, and edi registers are available.
253 
254  __ add(edx, esi); // Start of capture
255  __ add(edi, esi); // Start of text to match against capture.
256  __ add(ebx, edi); // End of text to match against capture.
257 
258  Label loop;
259  __ bind(&loop);
260  __ movzx_b(eax, Operand(edi, 0));
261  __ cmpb_al(Operand(edx, 0));
262  __ j(equal, &loop_increment);
263 
264  // Mismatch, try case-insensitive match (converting letters to lower-case).
265  __ or_(eax, 0x20); // Convert match character to lower-case.
266  __ lea(ecx, Operand(eax, -'a'));
267  __ cmp(ecx, static_cast<int32_t>('z' - 'a')); // Is eax a lowercase letter?
268  Label convert_capture;
269  __ j(below_equal, &convert_capture); // In range 'a'-'z'.
270  // Latin-1: Check for values in range [224,254] but not 247.
271  __ sub(ecx, Immediate(224 - 'a'));
272  __ cmp(ecx, Immediate(254 - 224));
273  __ j(above, &fail); // Weren't Latin-1 letters.
274  __ cmp(ecx, Immediate(247 - 224)); // Check for 247.
275  __ j(equal, &fail);
276  __ bind(&convert_capture);
277  // Also convert capture character.
278  __ movzx_b(ecx, Operand(edx, 0));
279  __ or_(ecx, 0x20);
280 
281  __ cmp(eax, ecx);
282  __ j(not_equal, &fail);
283 
284  __ bind(&loop_increment);
285  // Increment pointers into match and capture strings.
286  __ add(edx, Immediate(1));
287  __ add(edi, Immediate(1));
288  // Compare to end of match, and loop if not done.
289  __ cmp(edi, ebx);
290  __ j(below, &loop);
291  __ jmp(&success);
292 
293  __ bind(&fail);
294  // Restore original values before failing.
295  __ pop(backtrack_stackpointer());
296  __ pop(edi);
297  BranchOrBacktrack(no_condition, on_no_match);
298 
299  __ bind(&success);
300  // Restore original value before continuing.
301  __ pop(backtrack_stackpointer());
302  // Drop original value of character position.
303  __ add(esp, Immediate(kPointerSize));
304  // Compute new value of character position after the matched part.
305  __ sub(edi, esi);
306  } else {
307  ASSERT(mode_ == UC16);
308  // Save registers before calling C function.
309  __ push(esi);
310  __ push(edi);
311  __ push(backtrack_stackpointer());
312  __ push(ebx);
313 
314  static const int argument_count = 4;
315  __ PrepareCallCFunction(argument_count, ecx);
316  // Put arguments into allocated stack area, last argument highest on stack.
317  // Parameters are
318  // Address byte_offset1 - Address captured substring's start.
319  // Address byte_offset2 - Address of current character position.
320  // size_t byte_length - length of capture in bytes(!)
321  // Isolate* isolate
322 
323  // Set isolate.
324  __ mov(Operand(esp, 3 * kPointerSize),
325  Immediate(ExternalReference::isolate_address(isolate())));
326  // Set byte_length.
327  __ mov(Operand(esp, 2 * kPointerSize), ebx);
328  // Set byte_offset2.
329  // Found by adding negative string-end offset of current position (edi)
330  // to end of string.
331  __ add(edi, esi);
332  __ mov(Operand(esp, 1 * kPointerSize), edi);
333  // Set byte_offset1.
334  // Start of capture, where edx already holds string-end negative offset.
335  __ add(edx, esi);
336  __ mov(Operand(esp, 0 * kPointerSize), edx);
337 
338  {
339  AllowExternalCallThatCantCauseGC scope(masm_);
340  ExternalReference compare =
341  ExternalReference::re_case_insensitive_compare_uc16(isolate());
342  __ CallCFunction(compare, argument_count);
343  }
344  // Pop original values before reacting on result value.
345  __ pop(ebx);
346  __ pop(backtrack_stackpointer());
347  __ pop(edi);
348  __ pop(esi);
349 
350  // Check if function returned non-zero for success or zero for failure.
351  __ or_(eax, eax);
352  BranchOrBacktrack(zero, on_no_match);
353  // On success, increment position by length of capture.
354  __ add(edi, ebx);
355  }
356  __ bind(&fallthrough);
357 }
358 
359 
360 void RegExpMacroAssemblerIA32::CheckNotBackReference(
361  int start_reg,
362  Label* on_no_match) {
363  Label fallthrough;
364  Label success;
365  Label fail;
366 
367  // Find length of back-referenced capture.
368  __ mov(edx, register_location(start_reg));
369  __ mov(eax, register_location(start_reg + 1));
370  __ sub(eax, edx); // Length to check.
371  // Fail on partial or illegal capture (start of capture after end of capture).
372  BranchOrBacktrack(less, on_no_match);
373  // Succeed on empty capture (including no capture)
374  __ j(equal, &fallthrough);
375 
376  // Check that there are sufficient characters left in the input.
377  __ mov(ebx, edi);
378  __ add(ebx, eax);
379  BranchOrBacktrack(greater, on_no_match);
380 
381  // Save register to make it available below.
382  __ push(backtrack_stackpointer());
383 
384  // Compute pointers to match string and capture string
385  __ lea(ebx, Operand(esi, edi, times_1, 0)); // Start of match.
386  __ add(edx, esi); // Start of capture.
387  __ lea(ecx, Operand(eax, ebx, times_1, 0)); // End of match
388 
389  Label loop;
390  __ bind(&loop);
391  if (mode_ == ASCII) {
392  __ movzx_b(eax, Operand(edx, 0));
393  __ cmpb_al(Operand(ebx, 0));
394  } else {
395  ASSERT(mode_ == UC16);
396  __ movzx_w(eax, Operand(edx, 0));
397  __ cmpw_ax(Operand(ebx, 0));
398  }
399  __ j(not_equal, &fail);
400  // Increment pointers into capture and match string.
401  __ add(edx, Immediate(char_size()));
402  __ add(ebx, Immediate(char_size()));
403  // Check if we have reached end of match area.
404  __ cmp(ebx, ecx);
405  __ j(below, &loop);
406  __ jmp(&success);
407 
408  __ bind(&fail);
409  // Restore backtrack stackpointer.
410  __ pop(backtrack_stackpointer());
411  BranchOrBacktrack(no_condition, on_no_match);
412 
413  __ bind(&success);
414  // Move current character position to position after match.
415  __ mov(edi, ecx);
416  __ sub(edi, esi);
417  // Restore backtrack stackpointer.
418  __ pop(backtrack_stackpointer());
419 
420  __ bind(&fallthrough);
421 }
422 
423 
424 void RegExpMacroAssemblerIA32::CheckNotCharacter(uint32_t c,
425  Label* on_not_equal) {
426  __ cmp(current_character(), c);
427  BranchOrBacktrack(not_equal, on_not_equal);
428 }
429 
430 
431 void RegExpMacroAssemblerIA32::CheckCharacterAfterAnd(uint32_t c,
432  uint32_t mask,
433  Label* on_equal) {
434  if (c == 0) {
435  __ test(current_character(), Immediate(mask));
436  } else {
437  __ mov(eax, mask);
438  __ and_(eax, current_character());
439  __ cmp(eax, c);
440  }
441  BranchOrBacktrack(equal, on_equal);
442 }
443 
444 
445 void RegExpMacroAssemblerIA32::CheckNotCharacterAfterAnd(uint32_t c,
446  uint32_t mask,
447  Label* on_not_equal) {
448  if (c == 0) {
449  __ test(current_character(), Immediate(mask));
450  } else {
451  __ mov(eax, mask);
452  __ and_(eax, current_character());
453  __ cmp(eax, c);
454  }
455  BranchOrBacktrack(not_equal, on_not_equal);
456 }
457 
458 
459 void RegExpMacroAssemblerIA32::CheckNotCharacterAfterMinusAnd(
460  uc16 c,
461  uc16 minus,
462  uc16 mask,
463  Label* on_not_equal) {
464  ASSERT(minus < String::kMaxUtf16CodeUnit);
465  __ lea(eax, Operand(current_character(), -minus));
466  if (c == 0) {
467  __ test(eax, Immediate(mask));
468  } else {
469  __ and_(eax, mask);
470  __ cmp(eax, c);
471  }
472  BranchOrBacktrack(not_equal, on_not_equal);
473 }
474 
475 
476 void RegExpMacroAssemblerIA32::CheckCharacterInRange(
477  uc16 from,
478  uc16 to,
479  Label* on_in_range) {
480  __ lea(eax, Operand(current_character(), -from));
481  __ cmp(eax, to - from);
482  BranchOrBacktrack(below_equal, on_in_range);
483 }
484 
485 
486 void RegExpMacroAssemblerIA32::CheckCharacterNotInRange(
487  uc16 from,
488  uc16 to,
489  Label* on_not_in_range) {
490  __ lea(eax, Operand(current_character(), -from));
491  __ cmp(eax, to - from);
492  BranchOrBacktrack(above, on_not_in_range);
493 }
494 
495 
496 void RegExpMacroAssemblerIA32::CheckBitInTable(
497  Handle<ByteArray> table,
498  Label* on_bit_set) {
499  __ mov(eax, Immediate(table));
500  Register index = current_character();
501  if (mode_ != ASCII || kTableMask != String::kMaxOneByteCharCode) {
502  __ mov(ebx, kTableSize - 1);
503  __ and_(ebx, current_character());
504  index = ebx;
505  }
506  __ cmpb(FieldOperand(eax, index, times_1, ByteArray::kHeaderSize), 0);
507  BranchOrBacktrack(not_equal, on_bit_set);
508 }
509 
510 
511 bool RegExpMacroAssemblerIA32::CheckSpecialCharacterClass(uc16 type,
512  Label* on_no_match) {
513  // Range checks (c in min..max) are generally implemented by an unsigned
514  // (c - min) <= (max - min) check
515  switch (type) {
516  case 's':
517  // Match space-characters
518  if (mode_ == ASCII) {
519  // One byte space characters are '\t'..'\r', ' ' and \u00a0.
520  Label success;
521  __ cmp(current_character(), ' ');
522  __ j(equal, &success, Label::kNear);
523  // Check range 0x09..0x0d
524  __ lea(eax, Operand(current_character(), -'\t'));
525  __ cmp(eax, '\r' - '\t');
526  __ j(below_equal, &success, Label::kNear);
527  // \u00a0 (NBSP).
528  __ cmp(eax, 0x00a0 - '\t');
529  BranchOrBacktrack(not_equal, on_no_match);
530  __ bind(&success);
531  return true;
532  }
533  return false;
534  case 'S':
535  // The emitted code for generic character classes is good enough.
536  return false;
537  case 'd':
538  // Match ASCII digits ('0'..'9')
539  __ lea(eax, Operand(current_character(), -'0'));
540  __ cmp(eax, '9' - '0');
541  BranchOrBacktrack(above, on_no_match);
542  return true;
543  case 'D':
544  // Match non ASCII-digits
545  __ lea(eax, Operand(current_character(), -'0'));
546  __ cmp(eax, '9' - '0');
547  BranchOrBacktrack(below_equal, on_no_match);
548  return true;
549  case '.': {
550  // Match non-newlines (not 0x0a('\n'), 0x0d('\r'), 0x2028 and 0x2029)
551  __ mov(eax, current_character());
552  __ xor_(eax, Immediate(0x01));
553  // See if current character is '\n'^1 or '\r'^1, i.e., 0x0b or 0x0c
554  __ sub(eax, Immediate(0x0b));
555  __ cmp(eax, 0x0c - 0x0b);
556  BranchOrBacktrack(below_equal, on_no_match);
557  if (mode_ == UC16) {
558  // Compare original value to 0x2028 and 0x2029, using the already
559  // computed (current_char ^ 0x01 - 0x0b). I.e., check for
560  // 0x201d (0x2028 - 0x0b) or 0x201e.
561  __ sub(eax, Immediate(0x2028 - 0x0b));
562  __ cmp(eax, 0x2029 - 0x2028);
563  BranchOrBacktrack(below_equal, on_no_match);
564  }
565  return true;
566  }
567  case 'w': {
568  if (mode_ != ASCII) {
569  // Table is 128 entries, so all ASCII characters can be tested.
570  __ cmp(current_character(), Immediate('z'));
571  BranchOrBacktrack(above, on_no_match);
572  }
573  ASSERT_EQ(0, word_character_map[0]); // Character '\0' is not a word char.
574  ExternalReference word_map = ExternalReference::re_word_character_map();
575  __ test_b(current_character(),
576  Operand::StaticArray(current_character(), times_1, word_map));
577  BranchOrBacktrack(zero, on_no_match);
578  return true;
579  }
580  case 'W': {
581  Label done;
582  if (mode_ != ASCII) {
583  // Table is 128 entries, so all ASCII characters can be tested.
584  __ cmp(current_character(), Immediate('z'));
585  __ j(above, &done);
586  }
587  ASSERT_EQ(0, word_character_map[0]); // Character '\0' is not a word char.
588  ExternalReference word_map = ExternalReference::re_word_character_map();
589  __ test_b(current_character(),
590  Operand::StaticArray(current_character(), times_1, word_map));
591  BranchOrBacktrack(not_zero, on_no_match);
592  if (mode_ != ASCII) {
593  __ bind(&done);
594  }
595  return true;
596  }
597  // Non-standard classes (with no syntactic shorthand) used internally.
598  case '*':
599  // Match any character.
600  return true;
601  case 'n': {
602  // Match newlines (0x0a('\n'), 0x0d('\r'), 0x2028 or 0x2029).
603  // The opposite of '.'.
604  __ mov(eax, current_character());
605  __ xor_(eax, Immediate(0x01));
606  // See if current character is '\n'^1 or '\r'^1, i.e., 0x0b or 0x0c
607  __ sub(eax, Immediate(0x0b));
608  __ cmp(eax, 0x0c - 0x0b);
609  if (mode_ == ASCII) {
610  BranchOrBacktrack(above, on_no_match);
611  } else {
612  Label done;
613  BranchOrBacktrack(below_equal, &done);
614  ASSERT_EQ(UC16, mode_);
615  // Compare original value to 0x2028 and 0x2029, using the already
616  // computed (current_char ^ 0x01 - 0x0b). I.e., check for
617  // 0x201d (0x2028 - 0x0b) or 0x201e.
618  __ sub(eax, Immediate(0x2028 - 0x0b));
619  __ cmp(eax, 1);
620  BranchOrBacktrack(above, on_no_match);
621  __ bind(&done);
622  }
623  return true;
624  }
625  // No custom implementation (yet): s(UC16), S(UC16).
626  default:
627  return false;
628  }
629 }
630 
631 
633  STATIC_ASSERT(FAILURE == 0); // Return value for failure is zero.
634  if (!global()) {
635  __ Move(eax, Immediate(FAILURE));
636  }
637  __ jmp(&exit_label_);
638 }
639 
640 
641 Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
642  Label return_eax;
643  // Finalize code - write the entry point code now we know how many
644  // registers we need.
645 
646  // Entry code:
647  __ bind(&entry_label_);
648 
649  // Tell the system that we have a stack frame. Because the type is MANUAL, no
650  // code is generated.
651  FrameScope scope(masm_, StackFrame::MANUAL);
652 
653  // Actually emit code to start a new stack frame.
654  __ push(ebp);
655  __ mov(ebp, esp);
656  // Save callee-save registers. Order here should correspond to order of
657  // kBackup_ebx etc.
658  __ push(esi);
659  __ push(edi);
660  __ push(ebx); // Callee-save on MacOS.
661  __ push(Immediate(0)); // Number of successful matches in a global regexp.
662  __ push(Immediate(0)); // Make room for "input start - 1" constant.
663 
664  // Check if we have space on the stack for registers.
665  Label stack_limit_hit;
666  Label stack_ok;
667 
668  ExternalReference stack_limit =
669  ExternalReference::address_of_stack_limit(isolate());
670  __ mov(ecx, esp);
671  __ sub(ecx, Operand::StaticVariable(stack_limit));
672  // Handle it if the stack pointer is already below the stack limit.
673  __ j(below_equal, &stack_limit_hit);
674  // Check if there is room for the variable number of registers above
675  // the stack limit.
676  __ cmp(ecx, num_registers_ * kPointerSize);
677  __ j(above_equal, &stack_ok);
678  // Exit with OutOfMemory exception. There is not enough space on the stack
679  // for our working registers.
680  __ mov(eax, EXCEPTION);
681  __ jmp(&return_eax);
682 
683  __ bind(&stack_limit_hit);
684  CallCheckStackGuardState(ebx);
685  __ or_(eax, eax);
686  // If returned value is non-zero, we exit with the returned value as result.
687  __ j(not_zero, &return_eax);
688 
689  __ bind(&stack_ok);
690  // Load start index for later use.
691  __ mov(ebx, Operand(ebp, kStartIndex));
692 
693  // Allocate space on stack for registers.
694  __ sub(esp, Immediate(num_registers_ * kPointerSize));
695  // Load string length.
696  __ mov(esi, Operand(ebp, kInputEnd));
697  // Load input position.
698  __ mov(edi, Operand(ebp, kInputStart));
699  // Set up edi to be negative offset from string end.
700  __ sub(edi, esi);
701 
702  // Set eax to address of char before start of the string.
703  // (effectively string position -1).
704  __ neg(ebx);
705  if (mode_ == UC16) {
706  __ lea(eax, Operand(edi, ebx, times_2, -char_size()));
707  } else {
708  __ lea(eax, Operand(edi, ebx, times_1, -char_size()));
709  }
710  // Store this value in a local variable, for use when clearing
711  // position registers.
712  __ mov(Operand(ebp, kInputStartMinusOne), eax);
713 
714 #if V8_OS_WIN
715  // Ensure that we write to each stack page, in order. Skipping a page
716  // on Windows can cause segmentation faults. Assuming page size is 4k.
717  const int kPageSize = 4096;
718  const int kRegistersPerPage = kPageSize / kPointerSize;
719  for (int i = num_saved_registers_ + kRegistersPerPage - 1;
720  i < num_registers_;
721  i += kRegistersPerPage) {
722  __ mov(register_location(i), eax); // One write every page.
723  }
724 #endif // V8_OS_WIN
725 
726  Label load_char_start_regexp, start_regexp;
727  // Load newline if index is at start, previous character otherwise.
728  __ cmp(Operand(ebp, kStartIndex), Immediate(0));
729  __ j(not_equal, &load_char_start_regexp, Label::kNear);
730  __ mov(current_character(), '\n');
731  __ jmp(&start_regexp, Label::kNear);
732 
733  // Global regexp restarts matching here.
734  __ bind(&load_char_start_regexp);
735  // Load previous char as initial value of current character register.
736  LoadCurrentCharacterUnchecked(-1, 1);
737  __ bind(&start_regexp);
738 
739  // Initialize on-stack registers.
740  if (num_saved_registers_ > 0) { // Always is, if generated from a regexp.
741  // Fill saved registers with initial value = start offset - 1
742  // Fill in stack push order, to avoid accessing across an unwritten
743  // page (a problem on Windows).
744  if (num_saved_registers_ > 8) {
745  __ mov(ecx, kRegisterZero);
746  Label init_loop;
747  __ bind(&init_loop);
748  __ mov(Operand(ebp, ecx, times_1, 0), eax);
749  __ sub(ecx, Immediate(kPointerSize));
750  __ cmp(ecx, kRegisterZero - num_saved_registers_ * kPointerSize);
751  __ j(greater, &init_loop);
752  } else { // Unroll the loop.
753  for (int i = 0; i < num_saved_registers_; i++) {
754  __ mov(register_location(i), eax);
755  }
756  }
757  }
758 
759  // Initialize backtrack stack pointer.
760  __ mov(backtrack_stackpointer(), Operand(ebp, kStackHighEnd));
761 
762  __ jmp(&start_label_);
763 
764  // Exit code:
765  if (success_label_.is_linked()) {
766  // Save captures when successful.
767  __ bind(&success_label_);
768  if (num_saved_registers_ > 0) {
769  // copy captures to output
770  __ mov(ebx, Operand(ebp, kRegisterOutput));
771  __ mov(ecx, Operand(ebp, kInputEnd));
772  __ mov(edx, Operand(ebp, kStartIndex));
773  __ sub(ecx, Operand(ebp, kInputStart));
774  if (mode_ == UC16) {
775  __ lea(ecx, Operand(ecx, edx, times_2, 0));
776  } else {
777  __ add(ecx, edx);
778  }
779  for (int i = 0; i < num_saved_registers_; i++) {
780  __ mov(eax, register_location(i));
781  if (i == 0 && global_with_zero_length_check()) {
782  // Keep capture start in edx for the zero-length check later.
783  __ mov(edx, eax);
784  }
785  // Convert to index from start of string, not end.
786  __ add(eax, ecx);
787  if (mode_ == UC16) {
788  __ sar(eax, 1); // Convert byte index to character index.
789  }
790  __ mov(Operand(ebx, i * kPointerSize), eax);
791  }
792  }
793 
794  if (global()) {
795  // Restart matching if the regular expression is flagged as global.
796  // Increment success counter.
797  __ inc(Operand(ebp, kSuccessfulCaptures));
798  // Capture results have been stored, so the number of remaining global
799  // output registers is reduced by the number of stored captures.
800  __ mov(ecx, Operand(ebp, kNumOutputRegisters));
801  __ sub(ecx, Immediate(num_saved_registers_));
802  // Check whether we have enough room for another set of capture results.
803  __ cmp(ecx, Immediate(num_saved_registers_));
804  __ j(less, &exit_label_);
805 
806  __ mov(Operand(ebp, kNumOutputRegisters), ecx);
807  // Advance the location for output.
808  __ add(Operand(ebp, kRegisterOutput),
809  Immediate(num_saved_registers_ * kPointerSize));
810 
811  // Prepare eax to initialize registers with its value in the next run.
812  __ mov(eax, Operand(ebp, kInputStartMinusOne));
813 
814  if (global_with_zero_length_check()) {
815  // Special case for zero-length matches.
816  // edx: capture start index
817  __ cmp(edi, edx);
818  // Not a zero-length match, restart.
819  __ j(not_equal, &load_char_start_regexp);
820  // edi (offset from the end) is zero if we already reached the end.
821  __ test(edi, edi);
822  __ j(zero, &exit_label_, Label::kNear);
823  // Advance current position after a zero-length match.
824  if (mode_ == UC16) {
825  __ add(edi, Immediate(2));
826  } else {
827  __ inc(edi);
828  }
829  }
830 
831  __ jmp(&load_char_start_regexp);
832  } else {
833  __ mov(eax, Immediate(SUCCESS));
834  }
835  }
836 
837  __ bind(&exit_label_);
838  if (global()) {
839  // Return the number of successful captures.
840  __ mov(eax, Operand(ebp, kSuccessfulCaptures));
841  }
842 
843  __ bind(&return_eax);
844  // Skip esp past regexp registers.
845  __ lea(esp, Operand(ebp, kBackup_ebx));
846  // Restore callee-save registers.
847  __ pop(ebx);
848  __ pop(edi);
849  __ pop(esi);
850  // Exit function frame, restore previous one.
851  __ pop(ebp);
852  __ ret(0);
853 
854  // Backtrack code (branch target for conditional backtracks).
855  if (backtrack_label_.is_linked()) {
856  __ bind(&backtrack_label_);
857  Backtrack();
858  }
859 
860  Label exit_with_exception;
861 
862  // Preempt-code
863  if (check_preempt_label_.is_linked()) {
864  SafeCallTarget(&check_preempt_label_);
865 
866  __ push(backtrack_stackpointer());
867  __ push(edi);
868 
869  CallCheckStackGuardState(ebx);
870  __ or_(eax, eax);
871  // If returning non-zero, we should end execution with the given
872  // result as return value.
873  __ j(not_zero, &return_eax);
874 
875  __ pop(edi);
876  __ pop(backtrack_stackpointer());
877  // String might have moved: Reload esi from frame.
878  __ mov(esi, Operand(ebp, kInputEnd));
879  SafeReturn();
880  }
881 
882  // Backtrack stack overflow code.
883  if (stack_overflow_label_.is_linked()) {
884  SafeCallTarget(&stack_overflow_label_);
885  // Reached if the backtrack-stack limit has been hit.
886 
887  Label grow_failed;
888  // Save registers before calling C function
889  __ push(esi);
890  __ push(edi);
891 
892  // Call GrowStack(backtrack_stackpointer())
893  static const int num_arguments = 3;
894  __ PrepareCallCFunction(num_arguments, ebx);
895  __ mov(Operand(esp, 2 * kPointerSize),
896  Immediate(ExternalReference::isolate_address(isolate())));
897  __ lea(eax, Operand(ebp, kStackHighEnd));
898  __ mov(Operand(esp, 1 * kPointerSize), eax);
899  __ mov(Operand(esp, 0 * kPointerSize), backtrack_stackpointer());
900  ExternalReference grow_stack =
901  ExternalReference::re_grow_stack(isolate());
902  __ CallCFunction(grow_stack, num_arguments);
903  // If return NULL, we have failed to grow the stack, and
904  // must exit with a stack-overflow exception.
905  __ or_(eax, eax);
906  __ j(equal, &exit_with_exception);
907  // Otherwise use return value as new stack pointer.
908  __ mov(backtrack_stackpointer(), eax);
909  // Restore saved registers and continue.
910  __ pop(edi);
911  __ pop(esi);
912  SafeReturn();
913  }
914 
915  if (exit_with_exception.is_linked()) {
916  // If any of the code above needed to exit with an exception.
917  __ bind(&exit_with_exception);
918  // Exit with Result EXCEPTION(-1) to signal thrown exception.
919  __ mov(eax, EXCEPTION);
920  __ jmp(&return_eax);
921  }
922 
923  CodeDesc code_desc;
924  masm_->GetCode(&code_desc);
925  Handle<Code> code =
926  isolate()->factory()->NewCode(code_desc,
927  Code::ComputeFlags(Code::REGEXP),
928  masm_->CodeObject());
929  PROFILE(isolate(), RegExpCodeCreateEvent(*code, *source));
930  return Handle<HeapObject>::cast(code);
931 }
932 
933 
934 void RegExpMacroAssemblerIA32::GoTo(Label* to) {
935  BranchOrBacktrack(no_condition, to);
936 }
937 
938 
939 void RegExpMacroAssemblerIA32::IfRegisterGE(int reg,
940  int comparand,
941  Label* if_ge) {
942  __ cmp(register_location(reg), Immediate(comparand));
943  BranchOrBacktrack(greater_equal, if_ge);
944 }
945 
946 
947 void RegExpMacroAssemblerIA32::IfRegisterLT(int reg,
948  int comparand,
949  Label* if_lt) {
950  __ cmp(register_location(reg), Immediate(comparand));
951  BranchOrBacktrack(less, if_lt);
952 }
953 
954 
955 void RegExpMacroAssemblerIA32::IfRegisterEqPos(int reg,
956  Label* if_eq) {
957  __ cmp(edi, register_location(reg));
958  BranchOrBacktrack(equal, if_eq);
959 }
960 
961 
962 RegExpMacroAssembler::IrregexpImplementation
963  RegExpMacroAssemblerIA32::Implementation() {
964  return kIA32Implementation;
965 }
966 
967 
968 void RegExpMacroAssemblerIA32::LoadCurrentCharacter(int cp_offset,
969  Label* on_end_of_input,
970  bool check_bounds,
971  int characters) {
972  ASSERT(cp_offset >= -1); // ^ and \b can look behind one character.
973  ASSERT(cp_offset < (1<<30)); // Be sane! (And ensure negation works)
974  if (check_bounds) {
975  CheckPosition(cp_offset + characters - 1, on_end_of_input);
976  }
977  LoadCurrentCharacterUnchecked(cp_offset, characters);
978 }
979 
980 
981 void RegExpMacroAssemblerIA32::PopCurrentPosition() {
982  Pop(edi);
983 }
984 
985 
986 void RegExpMacroAssemblerIA32::PopRegister(int register_index) {
987  Pop(eax);
988  __ mov(register_location(register_index), eax);
989 }
990 
991 
992 void RegExpMacroAssemblerIA32::PushBacktrack(Label* label) {
993  Push(Immediate::CodeRelativeOffset(label));
994  CheckStackLimit();
995 }
996 
997 
998 void RegExpMacroAssemblerIA32::PushCurrentPosition() {
999  Push(edi);
1000 }
1001 
1002 
1003 void RegExpMacroAssemblerIA32::PushRegister(int register_index,
1004  StackCheckFlag check_stack_limit) {
1005  __ mov(eax, register_location(register_index));
1006  Push(eax);
1007  if (check_stack_limit) CheckStackLimit();
1008 }
1009 
1010 
1011 void RegExpMacroAssemblerIA32::ReadCurrentPositionFromRegister(int reg) {
1012  __ mov(edi, register_location(reg));
1013 }
1014 
1015 
1016 void RegExpMacroAssemblerIA32::ReadStackPointerFromRegister(int reg) {
1017  __ mov(backtrack_stackpointer(), register_location(reg));
1018  __ add(backtrack_stackpointer(), Operand(ebp, kStackHighEnd));
1019 }
1020 
1021 void RegExpMacroAssemblerIA32::SetCurrentPositionFromEnd(int by) {
1022  Label after_position;
1023  __ cmp(edi, -by * char_size());
1024  __ j(greater_equal, &after_position, Label::kNear);
1025  __ mov(edi, -by * char_size());
1026  // On RegExp code entry (where this operation is used), the character before
1027  // the current position is expected to be already loaded.
1028  // We have advanced the position, so it's safe to read backwards.
1029  LoadCurrentCharacterUnchecked(-1, 1);
1030  __ bind(&after_position);
1031 }
1032 
1033 
1034 void RegExpMacroAssemblerIA32::SetRegister(int register_index, int to) {
1035  ASSERT(register_index >= num_saved_registers_); // Reserved for positions!
1036  __ mov(register_location(register_index), Immediate(to));
1037 }
1038 
1039 
1040 bool RegExpMacroAssemblerIA32::Succeed() {
1041  __ jmp(&success_label_);
1042  return global();
1043 }
1044 
1045 
1046 void RegExpMacroAssemblerIA32::WriteCurrentPositionToRegister(int reg,
1047  int cp_offset) {
1048  if (cp_offset == 0) {
1049  __ mov(register_location(reg), edi);
1050  } else {
1051  __ lea(eax, Operand(edi, cp_offset * char_size()));
1052  __ mov(register_location(reg), eax);
1053  }
1054 }
1055 
1056 
1057 void RegExpMacroAssemblerIA32::ClearRegisters(int reg_from, int reg_to) {
1058  ASSERT(reg_from <= reg_to);
1059  __ mov(eax, Operand(ebp, kInputStartMinusOne));
1060  for (int reg = reg_from; reg <= reg_to; reg++) {
1061  __ mov(register_location(reg), eax);
1062  }
1063 }
1064 
1065 
1066 void RegExpMacroAssemblerIA32::WriteStackPointerToRegister(int reg) {
1067  __ mov(eax, backtrack_stackpointer());
1068  __ sub(eax, Operand(ebp, kStackHighEnd));
1069  __ mov(register_location(reg), eax);
1070 }
1071 
1072 
1073 // Private methods:
1074 
1075 void RegExpMacroAssemblerIA32::CallCheckStackGuardState(Register scratch) {
1076  static const int num_arguments = 3;
1077  __ PrepareCallCFunction(num_arguments, scratch);
1078  // RegExp code frame pointer.
1079  __ mov(Operand(esp, 2 * kPointerSize), ebp);
1080  // Code* of self.
1081  __ mov(Operand(esp, 1 * kPointerSize), Immediate(masm_->CodeObject()));
1082  // Next address on the stack (will be address of return address).
1083  __ lea(eax, Operand(esp, -kPointerSize));
1084  __ mov(Operand(esp, 0 * kPointerSize), eax);
1085  ExternalReference check_stack_guard =
1086  ExternalReference::re_check_stack_guard_state(isolate());
1087  __ CallCFunction(check_stack_guard, num_arguments);
1088 }
1089 
1090 
1091 // Helper function for reading a value out of a stack frame.
1092 template <typename T>
1093 static T& frame_entry(Address re_frame, int frame_offset) {
1094  return reinterpret_cast<T&>(Memory::int32_at(re_frame + frame_offset));
1095 }
1096 
1097 
1098 int RegExpMacroAssemblerIA32::CheckStackGuardState(Address* return_address,
1099  Code* re_code,
1100  Address re_frame) {
1101  Isolate* isolate = frame_entry<Isolate*>(re_frame, kIsolate);
1102  if (isolate->stack_guard()->IsStackOverflow()) {
1103  isolate->StackOverflow();
1104  return EXCEPTION;
1105  }
1106 
1107  // If not real stack overflow the stack guard was used to interrupt
1108  // execution for another purpose.
1109 
1110  // If this is a direct call from JavaScript retry the RegExp forcing the call
1111  // through the runtime system. Currently the direct call cannot handle a GC.
1112  if (frame_entry<int>(re_frame, kDirectCall) == 1) {
1113  return RETRY;
1114  }
1115 
1116  // Prepare for possible GC.
1117  HandleScope handles(isolate);
1118  Handle<Code> code_handle(re_code);
1119 
1120  Handle<String> subject(frame_entry<String*>(re_frame, kInputString));
1121 
1122  // Current string.
1123  bool is_ascii = subject->IsOneByteRepresentationUnderneath();
1124 
1125  ASSERT(re_code->instruction_start() <= *return_address);
1126  ASSERT(*return_address <=
1127  re_code->instruction_start() + re_code->instruction_size());
1128 
1129  MaybeObject* result = Execution::HandleStackGuardInterrupt(isolate);
1130 
1131  if (*code_handle != re_code) { // Return address no longer valid
1132  int delta = code_handle->address() - re_code->address();
1133  // Overwrite the return address on the stack.
1134  *return_address += delta;
1135  }
1136 
1137  if (result->IsException()) {
1138  return EXCEPTION;
1139  }
1140 
1141  Handle<String> subject_tmp = subject;
1142  int slice_offset = 0;
1143 
1144  // Extract the underlying string and the slice offset.
1145  if (StringShape(*subject_tmp).IsCons()) {
1146  subject_tmp = Handle<String>(ConsString::cast(*subject_tmp)->first());
1147  } else if (StringShape(*subject_tmp).IsSliced()) {
1148  SlicedString* slice = SlicedString::cast(*subject_tmp);
1149  subject_tmp = Handle<String>(slice->parent());
1150  slice_offset = slice->offset();
1151  }
1152 
1153  // String might have changed.
1154  if (subject_tmp->IsOneByteRepresentation() != is_ascii) {
1155  // If we changed between an ASCII and an UC16 string, the specialized
1156  // code cannot be used, and we need to restart regexp matching from
1157  // scratch (including, potentially, compiling a new version of the code).
1158  return RETRY;
1159  }
1160 
1161  // Otherwise, the content of the string might have moved. It must still
1162  // be a sequential or external string with the same content.
1163  // Update the start and end pointers in the stack frame to the current
1164  // location (whether it has actually moved or not).
1165  ASSERT(StringShape(*subject_tmp).IsSequential() ||
1166  StringShape(*subject_tmp).IsExternal());
1167 
1168  // The original start address of the characters to match.
1169  const byte* start_address = frame_entry<const byte*>(re_frame, kInputStart);
1170 
1171  // Find the current start address of the same character at the current string
1172  // position.
1173  int start_index = frame_entry<int>(re_frame, kStartIndex);
1174  const byte* new_address = StringCharacterPosition(*subject_tmp,
1175  start_index + slice_offset);
1176 
1177  if (start_address != new_address) {
1178  // If there is a difference, update the object pointer and start and end
1179  // addresses in the RegExp stack frame to match the new value.
1180  const byte* end_address = frame_entry<const byte* >(re_frame, kInputEnd);
1181  int byte_length = static_cast<int>(end_address - start_address);
1182  frame_entry<const String*>(re_frame, kInputString) = *subject;
1183  frame_entry<const byte*>(re_frame, kInputStart) = new_address;
1184  frame_entry<const byte*>(re_frame, kInputEnd) = new_address + byte_length;
1185  } else if (frame_entry<const String*>(re_frame, kInputString) != *subject) {
1186  // Subject string might have been a ConsString that underwent
1187  // short-circuiting during GC. That will not change start_address but
1188  // will change pointer inside the subject handle.
1189  frame_entry<const String*>(re_frame, kInputString) = *subject;
1190  }
1191 
1192  return 0;
1193 }
1194 
1195 
1196 Operand RegExpMacroAssemblerIA32::register_location(int register_index) {
1197  ASSERT(register_index < (1<<30));
1198  if (num_registers_ <= register_index) {
1199  num_registers_ = register_index + 1;
1200  }
1201  return Operand(ebp, kRegisterZero - register_index * kPointerSize);
1202 }
1203 
1204 
1205 void RegExpMacroAssemblerIA32::CheckPosition(int cp_offset,
1206  Label* on_outside_input) {
1207  __ cmp(edi, -cp_offset * char_size());
1208  BranchOrBacktrack(greater_equal, on_outside_input);
1209 }
1210 
1211 
1212 void RegExpMacroAssemblerIA32::BranchOrBacktrack(Condition condition,
1213  Label* to) {
1214  if (condition < 0) { // No condition
1215  if (to == NULL) {
1216  Backtrack();
1217  return;
1218  }
1219  __ jmp(to);
1220  return;
1221  }
1222  if (to == NULL) {
1223  __ j(condition, &backtrack_label_);
1224  return;
1225  }
1226  __ j(condition, to);
1227 }
1228 
1229 
1230 void RegExpMacroAssemblerIA32::SafeCall(Label* to) {
1231  Label return_to;
1232  __ push(Immediate::CodeRelativeOffset(&return_to));
1233  __ jmp(to);
1234  __ bind(&return_to);
1235 }
1236 
1237 
1238 void RegExpMacroAssemblerIA32::SafeReturn() {
1239  __ pop(ebx);
1240  __ add(ebx, Immediate(masm_->CodeObject()));
1241  __ jmp(ebx);
1242 }
1243 
1244 
1245 void RegExpMacroAssemblerIA32::SafeCallTarget(Label* name) {
1246  __ bind(name);
1247 }
1248 
1249 
1250 void RegExpMacroAssemblerIA32::Push(Register source) {
1251  ASSERT(!source.is(backtrack_stackpointer()));
1252  // Notice: This updates flags, unlike normal Push.
1253  __ sub(backtrack_stackpointer(), Immediate(kPointerSize));
1254  __ mov(Operand(backtrack_stackpointer(), 0), source);
1255 }
1256 
1257 
1258 void RegExpMacroAssemblerIA32::Push(Immediate value) {
1259  // Notice: This updates flags, unlike normal Push.
1260  __ sub(backtrack_stackpointer(), Immediate(kPointerSize));
1261  __ mov(Operand(backtrack_stackpointer(), 0), value);
1262 }
1263 
1264 
1265 void RegExpMacroAssemblerIA32::Pop(Register target) {
1266  ASSERT(!target.is(backtrack_stackpointer()));
1267  __ mov(target, Operand(backtrack_stackpointer(), 0));
1268  // Notice: This updates flags, unlike normal Pop.
1269  __ add(backtrack_stackpointer(), Immediate(kPointerSize));
1270 }
1271 
1272 
1273 void RegExpMacroAssemblerIA32::CheckPreemption() {
1274  // Check for preemption.
1275  Label no_preempt;
1276  ExternalReference stack_limit =
1277  ExternalReference::address_of_stack_limit(isolate());
1278  __ cmp(esp, Operand::StaticVariable(stack_limit));
1279  __ j(above, &no_preempt);
1280 
1281  SafeCall(&check_preempt_label_);
1282 
1283  __ bind(&no_preempt);
1284 }
1285 
1286 
1287 void RegExpMacroAssemblerIA32::CheckStackLimit() {
1288  Label no_stack_overflow;
1289  ExternalReference stack_limit =
1290  ExternalReference::address_of_regexp_stack_limit(isolate());
1291  __ cmp(backtrack_stackpointer(), Operand::StaticVariable(stack_limit));
1292  __ j(above, &no_stack_overflow);
1293 
1294  SafeCall(&stack_overflow_label_);
1295 
1296  __ bind(&no_stack_overflow);
1297 }
1298 
1299 
1300 void RegExpMacroAssemblerIA32::LoadCurrentCharacterUnchecked(int cp_offset,
1301  int characters) {
1302  if (mode_ == ASCII) {
1303  if (characters == 4) {
1304  __ mov(current_character(), Operand(esi, edi, times_1, cp_offset));
1305  } else if (characters == 2) {
1306  __ movzx_w(current_character(), Operand(esi, edi, times_1, cp_offset));
1307  } else {
1308  ASSERT(characters == 1);
1309  __ movzx_b(current_character(), Operand(esi, edi, times_1, cp_offset));
1310  }
1311  } else {
1312  ASSERT(mode_ == UC16);
1313  if (characters == 2) {
1314  __ mov(current_character(),
1315  Operand(esi, edi, times_1, cp_offset * sizeof(uc16)));
1316  } else {
1317  ASSERT(characters == 1);
1318  __ movzx_w(current_character(),
1319  Operand(esi, edi, times_1, cp_offset * sizeof(uc16)));
1320  }
1321  }
1322 }
1323 
1324 
1325 #undef __
1326 
1327 #endif // V8_INTERPRETED_REGEXP
1328 
1329 }} // namespace v8::internal
1330 
1331 #endif // V8_TARGET_ARCH_IA32
byte * Address
Definition: globals.h:186
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
void Fail(const v8::FunctionCallbackInfo< v8::Value > &args)
unsigned char byte
Definition: disasm.h:33
#define PROFILE(IsolateGetter, Call)
Definition: cpu-profiler.h:194
const Register esp
#define ASSERT(condition)
Definition: checks.h:329
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
const Register edi
const Register ebp
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
RegExpMacroAssemblerIA32(Mode mode, int registers_to_save, Zone *zone)
const Register eax
const int kPointerSize
Definition: globals.h:268
Operand FieldOperand(Register object, int offset)
const Register ecx
#define __
#define T(name, string, precedence)
Definition: token.cc:48
const Register ebx
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
uint16_t uc16
Definition: globals.h:309
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
const Register esi
#define STATIC_ASSERT(test)
Definition: checks.h:341
const Register edx