v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
code-stubs-ia32.h
Go to the documentation of this file.
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_IA32_CODE_STUBS_IA32_H_
29 #define V8_IA32_CODE_STUBS_IA32_H_
30 
31 #include "macro-assembler.h"
32 #include "code-stubs.h"
33 #include "ic-inl.h"
34 
35 namespace v8 {
36 namespace internal {
37 
38 
39 // Compute a transcendental math function natively, or call the
40 // TranscendentalCache runtime function.
41 class TranscendentalCacheStub: public CodeStub {
42  public:
43  enum ArgumentType {
44  TAGGED = 0,
46  };
47 
49  ArgumentType argument_type)
50  : type_(type), argument_type_(argument_type) {}
51  void Generate(MacroAssembler* masm);
52  static void GenerateOperation(MacroAssembler* masm,
54  private:
56  ArgumentType argument_type_;
57 
58  Major MajorKey() { return TranscendentalCache; }
59  int MinorKey() { return type_ | argument_type_; }
60  Runtime::FunctionId RuntimeFunction();
61 };
62 
63 
64 class StoreBufferOverflowStub: public CodeStub {
65  public:
67  : save_doubles_(save_fp) { }
68 
69  void Generate(MacroAssembler* masm);
70 
71  virtual bool IsPregenerated() { return true; }
73  virtual bool SometimesSetsUpAFrame() { return false; }
74 
75  private:
76  SaveFPRegsMode save_doubles_;
77 
78  Major MajorKey() { return StoreBufferOverflow; }
79  int MinorKey() { return (save_doubles_ == kSaveFPRegs) ? 1 : 0; }
80 };
81 
82 
83 class UnaryOpStub: public CodeStub {
84  public:
86  UnaryOverwriteMode mode,
88  : op_(op),
89  mode_(mode),
90  operand_type_(operand_type) {
91  }
92 
93  private:
94  Token::Value op_;
95  UnaryOverwriteMode mode_;
96 
97  // Operand type information determined at runtime.
98  UnaryOpIC::TypeInfo operand_type_;
99 
100  virtual void PrintName(StringStream* stream);
101 
102  class ModeBits: public BitField<UnaryOverwriteMode, 0, 1> {};
103  class OpBits: public BitField<Token::Value, 1, 7> {};
104  class OperandTypeInfoBits: public BitField<UnaryOpIC::TypeInfo, 8, 3> {};
105 
106  Major MajorKey() { return UnaryOp; }
107  int MinorKey() {
108  return ModeBits::encode(mode_)
109  | OpBits::encode(op_)
110  | OperandTypeInfoBits::encode(operand_type_);
111  }
112 
113  // Note: A lot of the helper functions below will vanish when we use virtual
114  // function instead of switch more often.
115  void Generate(MacroAssembler* masm);
116 
117  void GenerateTypeTransition(MacroAssembler* masm);
118 
119  void GenerateSmiStub(MacroAssembler* masm);
120  void GenerateSmiStubSub(MacroAssembler* masm);
121  void GenerateSmiStubBitNot(MacroAssembler* masm);
122  void GenerateSmiCodeSub(MacroAssembler* masm,
123  Label* non_smi,
124  Label* undo,
125  Label* slow,
126  Label::Distance non_smi_near = Label::kFar,
127  Label::Distance undo_near = Label::kFar,
128  Label::Distance slow_near = Label::kFar);
129  void GenerateSmiCodeBitNot(MacroAssembler* masm,
130  Label* non_smi,
131  Label::Distance non_smi_near = Label::kFar);
132  void GenerateSmiCodeUndo(MacroAssembler* masm);
133 
134  void GenerateHeapNumberStub(MacroAssembler* masm);
135  void GenerateHeapNumberStubSub(MacroAssembler* masm);
136  void GenerateHeapNumberStubBitNot(MacroAssembler* masm);
137  void GenerateHeapNumberCodeSub(MacroAssembler* masm, Label* slow);
138  void GenerateHeapNumberCodeBitNot(MacroAssembler* masm, Label* slow);
139 
140  void GenerateGenericStub(MacroAssembler* masm);
141  void GenerateGenericStubSub(MacroAssembler* masm);
142  void GenerateGenericStubBitNot(MacroAssembler* masm);
143  void GenerateGenericCodeFallback(MacroAssembler* masm);
144 
145  virtual int GetCodeKind() { return Code::UNARY_OP_IC; }
146 
147  virtual InlineCacheState GetICState() {
148  return UnaryOpIC::ToState(operand_type_);
149  }
150 
151  virtual void FinishCode(Handle<Code> code) {
152  code->set_unary_op_type(operand_type_);
153  }
154 };
155 
156 
157 class BinaryOpStub: public CodeStub {
158  public:
160  : op_(op),
161  mode_(mode),
162  operands_type_(BinaryOpIC::UNINITIALIZED),
163  result_type_(BinaryOpIC::UNINITIALIZED) {
164  use_sse3_ = CpuFeatures::IsSupported(SSE3);
165  ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
166  }
167 
169  int key,
170  BinaryOpIC::TypeInfo operands_type,
172  : op_(OpBits::decode(key)),
173  mode_(ModeBits::decode(key)),
174  use_sse3_(SSE3Bits::decode(key)),
175  operands_type_(operands_type),
176  result_type_(result_type) { }
177 
178  private:
179  enum SmiCodeGenerateHeapNumberResults {
180  ALLOW_HEAPNUMBER_RESULTS,
181  NO_HEAPNUMBER_RESULTS
182  };
183 
184  Token::Value op_;
185  OverwriteMode mode_;
186  bool use_sse3_;
187 
188  // Operand type information determined at runtime.
189  BinaryOpIC::TypeInfo operands_type_;
190  BinaryOpIC::TypeInfo result_type_;
191 
192  virtual void PrintName(StringStream* stream);
193 
194  // Minor key encoding in 16 bits RRRTTTSOOOOOOOMM.
195  class ModeBits: public BitField<OverwriteMode, 0, 2> {};
196  class OpBits: public BitField<Token::Value, 2, 7> {};
197  class SSE3Bits: public BitField<bool, 9, 1> {};
198  class OperandTypeInfoBits: public BitField<BinaryOpIC::TypeInfo, 10, 3> {};
199  class ResultTypeInfoBits: public BitField<BinaryOpIC::TypeInfo, 13, 3> {};
200 
201  Major MajorKey() { return BinaryOp; }
202  int MinorKey() {
203  return OpBits::encode(op_)
204  | ModeBits::encode(mode_)
205  | SSE3Bits::encode(use_sse3_)
206  | OperandTypeInfoBits::encode(operands_type_)
207  | ResultTypeInfoBits::encode(result_type_);
208  }
209 
210  void Generate(MacroAssembler* masm);
211  void GenerateGeneric(MacroAssembler* masm);
212  void GenerateSmiCode(MacroAssembler* masm,
213  Label* slow,
214  SmiCodeGenerateHeapNumberResults heapnumber_results);
215  void GenerateLoadArguments(MacroAssembler* masm);
216  void GenerateReturn(MacroAssembler* masm);
217  void GenerateUninitializedStub(MacroAssembler* masm);
218  void GenerateSmiStub(MacroAssembler* masm);
219  void GenerateInt32Stub(MacroAssembler* masm);
220  void GenerateHeapNumberStub(MacroAssembler* masm);
221  void GenerateOddballStub(MacroAssembler* masm);
222  void GenerateStringStub(MacroAssembler* masm);
223  void GenerateBothStringStub(MacroAssembler* masm);
224  void GenerateGenericStub(MacroAssembler* masm);
225  void GenerateAddStrings(MacroAssembler* masm);
226 
227  void GenerateHeapResultAllocation(MacroAssembler* masm, Label* alloc_failure);
228  void GenerateRegisterArgsPush(MacroAssembler* masm);
229  void GenerateTypeTransition(MacroAssembler* masm);
230  void GenerateTypeTransitionWithSavedArgs(MacroAssembler* masm);
231 
232  virtual int GetCodeKind() { return Code::BINARY_OP_IC; }
233 
234  virtual InlineCacheState GetICState() {
235  return BinaryOpIC::ToState(operands_type_);
236  }
237 
238  virtual void FinishCode(Handle<Code> code) {
239  code->set_binary_op_type(operands_type_);
240  code->set_binary_op_result_type(result_type_);
241  }
242 
243  friend class CodeGenerator;
244 };
245 
246 
247 class StringHelper : public AllStatic {
248  public:
249  // Generate code for copying characters using a simple loop. This should only
250  // be used in places where the number of characters is small and the
251  // additional setup and checking in GenerateCopyCharactersREP adds too much
252  // overhead. Copying of overlapping regions is not supported.
253  static void GenerateCopyCharacters(MacroAssembler* masm,
254  Register dest,
255  Register src,
256  Register count,
257  Register scratch,
258  bool ascii);
259 
260  // Generate code for copying characters using the rep movs instruction.
261  // Copies ecx characters from esi to edi. Copying of overlapping regions is
262  // not supported.
263  static void GenerateCopyCharactersREP(MacroAssembler* masm,
264  Register dest, // Must be edi.
265  Register src, // Must be esi.
266  Register count, // Must be ecx.
267  Register scratch, // Neither of above.
268  bool ascii);
269 
270  // Probe the symbol table for a two character string. If the string
271  // requires non-standard hashing a jump to the label not_probed is
272  // performed and registers c1 and c2 are preserved. In all other
273  // cases they are clobbered. If the string is not found by probing a
274  // jump to the label not_found is performed. This jump does not
275  // guarantee that the string is not in the symbol table. If the
276  // string is found the code falls through with the string in
277  // register eax.
278  static void GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
279  Register c1,
280  Register c2,
281  Register scratch1,
282  Register scratch2,
283  Register scratch3,
284  Label* not_probed,
285  Label* not_found);
286 
287  // Generate string hash.
288  static void GenerateHashInit(MacroAssembler* masm,
289  Register hash,
290  Register character,
291  Register scratch);
292  static void GenerateHashAddCharacter(MacroAssembler* masm,
293  Register hash,
294  Register character,
295  Register scratch);
296  static void GenerateHashGetHash(MacroAssembler* masm,
297  Register hash,
298  Register scratch);
299 
300  private:
301  DISALLOW_IMPLICIT_CONSTRUCTORS(StringHelper);
302 };
303 
304 
305 // Flag that indicates how to generate code for the stub StringAddStub.
308  // Omit left string check in stub (left is definitely a string).
310  // Omit right string check in stub (right is definitely a string).
312  // Omit both string checks in stub.
315 };
316 
317 
318 class StringAddStub: public CodeStub {
319  public:
320  explicit StringAddStub(StringAddFlags flags) : flags_(flags) {}
321 
322  private:
323  Major MajorKey() { return StringAdd; }
324  int MinorKey() { return flags_; }
325 
326  void Generate(MacroAssembler* masm);
327 
328  void GenerateConvertArgument(MacroAssembler* masm,
329  int stack_offset,
330  Register arg,
331  Register scratch1,
332  Register scratch2,
333  Register scratch3,
334  Label* slow);
335 
336  const StringAddFlags flags_;
337 };
338 
339 
340 class SubStringStub: public CodeStub {
341  public:
343 
344  private:
345  Major MajorKey() { return SubString; }
346  int MinorKey() { return 0; }
347 
348  void Generate(MacroAssembler* masm);
349 };
350 
351 
352 class StringCompareStub: public CodeStub {
353  public:
355 
356  // Compares two flat ASCII strings and returns result in eax.
358  Register left,
359  Register right,
360  Register scratch1,
361  Register scratch2,
362  Register scratch3);
363 
364  // Compares two flat ASCII strings for equality and returns result
365  // in eax.
367  Register left,
368  Register right,
369  Register scratch1,
370  Register scratch2);
371 
372  private:
373  virtual Major MajorKey() { return StringCompare; }
374  virtual int MinorKey() { return 0; }
375  virtual void Generate(MacroAssembler* masm);
376 
377  static void GenerateAsciiCharsCompareLoop(
378  MacroAssembler* masm,
379  Register left,
380  Register right,
381  Register length,
382  Register scratch,
383  Label* chars_not_equal,
384  Label::Distance chars_not_equal_near = Label::kFar);
385 };
386 
387 
388 class NumberToStringStub: public CodeStub {
389  public:
391 
392  // Generate code to do a lookup in the number string cache. If the number in
393  // the register object is found in the cache the generated code falls through
394  // with the result in the result register. The object and the result register
395  // can be the same. If the number is not found in the cache the code jumps to
396  // the label not_found with only the content of register object unchanged.
398  Register object,
399  Register result,
400  Register scratch1,
401  Register scratch2,
402  bool object_is_smi,
403  Label* not_found);
404 
405  private:
406  Major MajorKey() { return NumberToString; }
407  int MinorKey() { return 0; }
408 
409  void Generate(MacroAssembler* masm);
410 };
411 
412 
413 class StringDictionaryLookupStub: public CodeStub {
414  public:
416 
418  Register result,
419  Register index,
420  LookupMode mode)
421  : dictionary_(dictionary), result_(result), index_(index), mode_(mode) { }
422 
423  void Generate(MacroAssembler* masm);
424 
425  static void GenerateNegativeLookup(MacroAssembler* masm,
426  Label* miss,
427  Label* done,
428  Register properties,
429  Handle<String> name,
430  Register r0);
431 
432  static void GeneratePositiveLookup(MacroAssembler* masm,
433  Label* miss,
434  Label* done,
435  Register elements,
436  Register name,
437  Register r0,
438  Register r1);
439 
440  virtual bool SometimesSetsUpAFrame() { return false; }
441 
442  private:
443  static const int kInlinedProbes = 4;
444  static const int kTotalProbes = 20;
445 
446  static const int kCapacityOffset =
449 
450  static const int kElementsStartOffset =
453 
454  Major MajorKey() { return StringDictionaryLookup; }
455 
456  int MinorKey() {
457  return DictionaryBits::encode(dictionary_.code()) |
458  ResultBits::encode(result_.code()) |
459  IndexBits::encode(index_.code()) |
460  LookupModeBits::encode(mode_);
461  }
462 
463  class DictionaryBits: public BitField<int, 0, 3> {};
464  class ResultBits: public BitField<int, 3, 3> {};
465  class IndexBits: public BitField<int, 6, 3> {};
466  class LookupModeBits: public BitField<LookupMode, 9, 1> {};
467 
468  Register dictionary_;
469  Register result_;
470  Register index_;
471  LookupMode mode_;
472 };
473 
474 
475 class RecordWriteStub: public CodeStub {
476  public:
478  Register value,
479  Register address,
480  RememberedSetAction remembered_set_action,
481  SaveFPRegsMode fp_mode)
482  : object_(object),
483  value_(value),
484  address_(address),
485  remembered_set_action_(remembered_set_action),
486  save_fp_regs_mode_(fp_mode),
487  regs_(object, // An input reg.
488  address, // An input reg.
489  value) { // One scratch reg.
490  }
491 
492  enum Mode {
494  INCREMENTAL,
496  };
497 
498  virtual bool IsPregenerated();
499  static void GenerateFixedRegStubsAheadOfTime();
500  virtual bool SometimesSetsUpAFrame() { return false; }
501 
502  static const byte kTwoByteNopInstruction = 0x3c; // Cmpb al, #imm8.
503  static const byte kTwoByteJumpInstruction = 0xeb; // Jmp #imm8.
504 
505  static const byte kFiveByteNopInstruction = 0x3d; // Cmpl eax, #imm32.
506  static const byte kFiveByteJumpInstruction = 0xe9; // Jmp #imm32.
507 
508  static Mode GetMode(Code* stub) {
509  byte first_instruction = stub->instruction_start()[0];
510  byte second_instruction = stub->instruction_start()[2];
511 
512  if (first_instruction == kTwoByteJumpInstruction) {
513  return INCREMENTAL;
514  }
515 
516  ASSERT(first_instruction == kTwoByteNopInstruction);
517 
518  if (second_instruction == kFiveByteJumpInstruction) {
519  return INCREMENTAL_COMPACTION;
520  }
521 
522  ASSERT(second_instruction == kFiveByteNopInstruction);
523 
524  return STORE_BUFFER_ONLY;
525  }
526 
527  static void Patch(Code* stub, Mode mode) {
528  switch (mode) {
529  case STORE_BUFFER_ONLY:
530  ASSERT(GetMode(stub) == INCREMENTAL ||
534  break;
535  case INCREMENTAL:
536  ASSERT(GetMode(stub) == STORE_BUFFER_ONLY);
538  break;
540  ASSERT(GetMode(stub) == STORE_BUFFER_ONLY);
543  break;
544  }
545  ASSERT(GetMode(stub) == mode);
547  }
548 
549  private:
550  // This is a helper class for freeing up 3 scratch registers, where the third
551  // is always ecx (needed for shift operations). The input is two registers
552  // that must be preserved and one scratch register provided by the caller.
553  class RegisterAllocation {
554  public:
555  RegisterAllocation(Register object,
556  Register address,
557  Register scratch0)
558  : object_orig_(object),
559  address_orig_(address),
560  scratch0_orig_(scratch0),
561  object_(object),
562  address_(address),
563  scratch0_(scratch0) {
564  ASSERT(!AreAliased(scratch0, object, address, no_reg));
565  scratch1_ = GetRegThatIsNotEcxOr(object_, address_, scratch0_);
566  if (scratch0.is(ecx)) {
567  scratch0_ = GetRegThatIsNotEcxOr(object_, address_, scratch1_);
568  }
569  if (object.is(ecx)) {
570  object_ = GetRegThatIsNotEcxOr(address_, scratch0_, scratch1_);
571  }
572  if (address.is(ecx)) {
573  address_ = GetRegThatIsNotEcxOr(object_, scratch0_, scratch1_);
574  }
575  ASSERT(!AreAliased(scratch0_, object_, address_, ecx));
576  }
577 
578  void Save(MacroAssembler* masm) {
579  ASSERT(!address_orig_.is(object_));
580  ASSERT(object_.is(object_orig_) || address_.is(address_orig_));
581  ASSERT(!AreAliased(object_, address_, scratch1_, scratch0_));
582  ASSERT(!AreAliased(object_orig_, address_, scratch1_, scratch0_));
583  ASSERT(!AreAliased(object_, address_orig_, scratch1_, scratch0_));
584  // We don't have to save scratch0_orig_ because it was given to us as
585  // a scratch register. But if we had to switch to a different reg then
586  // we should save the new scratch0_.
587  if (!scratch0_.is(scratch0_orig_)) masm->push(scratch0_);
588  if (!ecx.is(scratch0_orig_) &&
589  !ecx.is(object_orig_) &&
590  !ecx.is(address_orig_)) {
591  masm->push(ecx);
592  }
593  masm->push(scratch1_);
594  if (!address_.is(address_orig_)) {
595  masm->push(address_);
596  masm->mov(address_, address_orig_);
597  }
598  if (!object_.is(object_orig_)) {
599  masm->push(object_);
600  masm->mov(object_, object_orig_);
601  }
602  }
603 
604  void Restore(MacroAssembler* masm) {
605  // These will have been preserved the entire time, so we just need to move
606  // them back. Only in one case is the orig_ reg different from the plain
607  // one, since only one of them can alias with ecx.
608  if (!object_.is(object_orig_)) {
609  masm->mov(object_orig_, object_);
610  masm->pop(object_);
611  }
612  if (!address_.is(address_orig_)) {
613  masm->mov(address_orig_, address_);
614  masm->pop(address_);
615  }
616  masm->pop(scratch1_);
617  if (!ecx.is(scratch0_orig_) &&
618  !ecx.is(object_orig_) &&
619  !ecx.is(address_orig_)) {
620  masm->pop(ecx);
621  }
622  if (!scratch0_.is(scratch0_orig_)) masm->pop(scratch0_);
623  }
624 
625  // If we have to call into C then we need to save and restore all caller-
626  // saved registers that were not already preserved. The caller saved
627  // registers are eax, ecx and edx. The three scratch registers (incl. ecx)
628  // will be restored by other means so we don't bother pushing them here.
629  void SaveCallerSaveRegisters(MacroAssembler* masm, SaveFPRegsMode mode) {
630  if (!scratch0_.is(eax) && !scratch1_.is(eax)) masm->push(eax);
631  if (!scratch0_.is(edx) && !scratch1_.is(edx)) masm->push(edx);
632  if (mode == kSaveFPRegs) {
633  CpuFeatures::Scope scope(SSE2);
634  masm->sub(esp,
635  Immediate(kDoubleSize * (XMMRegister::kNumRegisters - 1)));
636  // Save all XMM registers except XMM0.
637  for (int i = XMMRegister::kNumRegisters - 1; i > 0; i--) {
638  XMMRegister reg = XMMRegister::from_code(i);
639  masm->movdbl(Operand(esp, (i - 1) * kDoubleSize), reg);
640  }
641  }
642  }
643 
644  inline void RestoreCallerSaveRegisters(MacroAssembler*masm,
645  SaveFPRegsMode mode) {
646  if (mode == kSaveFPRegs) {
647  CpuFeatures::Scope scope(SSE2);
648  // Restore all XMM registers except XMM0.
649  for (int i = XMMRegister::kNumRegisters - 1; i > 0; i--) {
650  XMMRegister reg = XMMRegister::from_code(i);
651  masm->movdbl(reg, Operand(esp, (i - 1) * kDoubleSize));
652  }
653  masm->add(esp,
654  Immediate(kDoubleSize * (XMMRegister::kNumRegisters - 1)));
655  }
656  if (!scratch0_.is(edx) && !scratch1_.is(edx)) masm->pop(edx);
657  if (!scratch0_.is(eax) && !scratch1_.is(eax)) masm->pop(eax);
658  }
659 
660  inline Register object() { return object_; }
661  inline Register address() { return address_; }
662  inline Register scratch0() { return scratch0_; }
663  inline Register scratch1() { return scratch1_; }
664 
665  private:
666  Register object_orig_;
667  Register address_orig_;
668  Register scratch0_orig_;
669  Register object_;
670  Register address_;
671  Register scratch0_;
672  Register scratch1_;
673  // Third scratch register is always ecx.
674 
675  Register GetRegThatIsNotEcxOr(Register r1,
676  Register r2,
677  Register r3) {
678  for (int i = 0; i < Register::kNumAllocatableRegisters; i++) {
679  Register candidate = Register::FromAllocationIndex(i);
680  if (candidate.is(ecx)) continue;
681  if (candidate.is(r1)) continue;
682  if (candidate.is(r2)) continue;
683  if (candidate.is(r3)) continue;
684  return candidate;
685  }
686  UNREACHABLE();
687  return no_reg;
688  }
689  friend class RecordWriteStub;
690  };
691 
692  enum OnNoNeedToInformIncrementalMarker {
693  kReturnOnNoNeedToInformIncrementalMarker,
694  kUpdateRememberedSetOnNoNeedToInformIncrementalMarker
695  }
696 ;
697  void Generate(MacroAssembler* masm);
698  void GenerateIncremental(MacroAssembler* masm, Mode mode);
699  void CheckNeedsToInformIncrementalMarker(
700  MacroAssembler* masm,
701  OnNoNeedToInformIncrementalMarker on_no_need,
702  Mode mode);
703  void InformIncrementalMarker(MacroAssembler* masm, Mode mode);
704 
705  Major MajorKey() { return RecordWrite; }
706 
707  int MinorKey() {
708  return ObjectBits::encode(object_.code()) |
709  ValueBits::encode(value_.code()) |
710  AddressBits::encode(address_.code()) |
711  RememberedSetActionBits::encode(remembered_set_action_) |
712  SaveFPRegsModeBits::encode(save_fp_regs_mode_);
713  }
714 
715  void Activate(Code* code) {
716  code->GetHeap()->incremental_marking()->ActivateGeneratedStub(code);
717  }
718 
719  class ObjectBits: public BitField<int, 0, 3> {};
720  class ValueBits: public BitField<int, 3, 3> {};
721  class AddressBits: public BitField<int, 6, 3> {};
722  class RememberedSetActionBits: public BitField<RememberedSetAction, 9, 1> {};
723  class SaveFPRegsModeBits: public BitField<SaveFPRegsMode, 10, 1> {};
724 
725  Register object_;
726  Register value_;
727  Register address_;
728  RememberedSetAction remembered_set_action_;
729  SaveFPRegsMode save_fp_regs_mode_;
730  RegisterAllocation regs_;
731 };
732 
733 
734 } } // namespace v8::internal
735 
736 #endif // V8_IA32_CODE_STUBS_IA32_H_
static Mode GetMode(Code *stub)
static void Patch(Code *stub, Mode mode)
RecordWriteStub(Register object, Register value, Register address, RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode)
StringDictionaryLookupStub(Register dictionary, Register result, Register index, LookupMode mode)
const Register r3
static void GenerateFixedRegStubsAheadOfTime()
static void GenerateCopyCharacters(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, bool ascii)
UnaryOpStub(Token::Value op, UnaryOverwriteMode mode, UnaryOpIC::TypeInfo operand_type=UnaryOpIC::UNINITIALIZED)
static const byte kTwoByteNopInstruction
const Register esp
void Generate(MacroAssembler *masm)
static void GenerateHashGetHash(MacroAssembler *masm, Register hash)
static bool IsSupported(CpuFeature f)
bool AreAliased(Register r1, Register r2, Register r3, Register r4)
#define ASSERT(condition)
Definition: checks.h:270
static void GenerateCompareFlatAsciiStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
static void GenerateOperation(MacroAssembler *masm, TranscendentalCache::Type type)
const Register r2
BinaryOpStub(Token::Value op, OverwriteMode mode)
static State ToState(TypeInfo type_info)
Definition: ic.cc:2321
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< String > name, Register scratch0)
Handle< String > SubString(Handle< String > str, int start, int end, PretenureFlag pretenure)
Definition: handles.cc:326
uint8_t byte
Definition: globals.h:156
#define UNREACHABLE()
Definition: checks.h:50
static void GenerateCopyCharactersREP(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, bool ascii)
const Register eax
static void GenerateFlatAsciiStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
const int kDoubleSize
Definition: globals.h:218
byte * instruction_start()
Definition: objects-inl.h:4649
const int kPointerSize
Definition: globals.h:220
const Register ecx
static Register FromAllocationIndex(int index)
Definition: assembler-arm.h:82
static const byte kTwoByteJumpInstruction
static const byte kFiveByteNopInstruction
static State ToState(TypeInfo type_info)
Definition: ic.cc:2252
static void GenerateHashAddCharacter(MacroAssembler *masm, Register hash, Register character)
const Register r0
static const int kHeaderSize
Definition: objects.h:2296
bool is(Register reg) const
const Register r1
void Generate(MacroAssembler *masm)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
BinaryOpStub(int key, BinaryOpIC::TypeInfo operands_type, BinaryOpIC::TypeInfo result_type=BinaryOpIC::UNINITIALIZED)
static const int kNumAllocatableRegisters
Definition: assembler-arm.h:74
TranscendentalCacheStub(TranscendentalCache::Type type, ArgumentType argument_type)
static XMMRegister from_code(int code)
static void GenerateLookupNumberStringCache(MacroAssembler *masm, Register object, Register result, Register scratch1, Register scratch2, Register scratch3, bool object_is_smi, Label *not_found)
const Register no_reg
static void GenerateTwoCharacterSymbolTableProbe(MacroAssembler *masm, Register c1, Register c2, Register scratch1, Register scratch2, Register scratch3, Register scratch4, Register scratch5, Label *not_found)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static void FlushICache(void *start, size_t size)
static const byte kFiveByteJumpInstruction
static const int kTranscendentalTypeBits
Definition: heap.h:2640
static const int kNumRegisters
const Register edx
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
Definition: flags.cc:495
StringAddStub(StringAddFlags flags)
StoreBufferOverflowStub(SaveFPRegsMode save_fp)
static void GenerateHashInit(MacroAssembler *masm, Register hash, Register character)
void Generate(MacroAssembler *masm)