v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
assembler.h
Go to the documentation of this file.
1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are
6 // met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the distribution.
14 //
15 // - Neither the name of Sun Microsystems or the names of contributors may
16 // be used to endorse or promote products derived from this software without
17 // specific prior written permission.
18 //
19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
20 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
21 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
23 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
24 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
25 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
26 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
27 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
28 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
29 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 
31 // The original source code covered by the above license above has been
32 // modified significantly by Google Inc.
33 // Copyright 2012 the V8 project authors. All rights reserved.
34 
35 #ifndef V8_ASSEMBLER_H_
36 #define V8_ASSEMBLER_H_
37 
38 #include "v8.h"
39 
40 #include "allocation.h"
41 #include "builtins.h"
42 #include "gdb-jit.h"
43 #include "isolate.h"
44 #include "runtime.h"
45 #include "token.h"
46 
47 namespace v8 {
48 
49 class ApiFunction;
50 
51 namespace internal {
52 
53 class StatsCounter;
54 // -----------------------------------------------------------------------------
55 // Platform independent assembler base class.
56 
57 class AssemblerBase: public Malloced {
58  public:
59  AssemblerBase(Isolate* isolate, void* buffer, int buffer_size);
60  virtual ~AssemblerBase();
61 
62  Isolate* isolate() const { return isolate_; }
63  int jit_cookie() const { return jit_cookie_; }
64 
65  bool emit_debug_code() const { return emit_debug_code_; }
66  void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
67 
68  bool predictable_code_size() const { return predictable_code_size_; }
69  void set_predictable_code_size(bool value) { predictable_code_size_ = value; }
70 
71  uint64_t enabled_cpu_features() const { return enabled_cpu_features_; }
72  void set_enabled_cpu_features(uint64_t features) {
73  enabled_cpu_features_ = features;
74  }
76  return (enabled_cpu_features_ & (static_cast<uint64_t>(1) << f)) != 0;
77  }
78 
79  // Overwrite a host NaN with a quiet target NaN. Used by mksnapshot for
80  // cross-snapshotting.
81  static void QuietNaN(HeapObject* nan) { }
82 
83  int pc_offset() const { return static_cast<int>(pc_ - buffer_); }
84 
85  // This function is called when code generation is aborted, so that
86  // the assembler could clean up internal data structures.
87  virtual void AbortedCodeGeneration() { }
88 
89  static const int kMinimalBufferSize = 4*KB;
90 
91  protected:
92  // The buffer into which code and relocation info are generated. It could
93  // either be owned by the assembler or be provided externally.
97 
98  // The program counter, which points into the buffer above and moves forward.
100 
101  private:
102  Isolate* isolate_;
103  int jit_cookie_;
104  uint64_t enabled_cpu_features_;
105  bool emit_debug_code_;
106  bool predictable_code_size_;
107 };
108 
109 
110 // Avoids using instructions that vary in size in unpredictable ways between the
111 // snapshot and the running VM.
113  public:
116 
117  private:
118  AssemblerBase* assembler_;
119  int expected_size_;
120  int start_offset_;
121  bool old_value_;
122 };
123 
124 
125 // Enable a specified feature within a scope.
126 class CpuFeatureScope BASE_EMBEDDED {
127  public:
128 #ifdef DEBUG
129  CpuFeatureScope(AssemblerBase* assembler, CpuFeature f);
130  ~CpuFeatureScope();
131 
132  private:
133  AssemblerBase* assembler_;
134  uint64_t old_enabled_;
135 #else
137 #endif
138 };
139 
140 
141 // Enable a unsupported feature within a scope for cross-compiling for a
142 // different CPU.
143 class PlatformFeatureScope BASE_EMBEDDED {
144  public:
145  explicit PlatformFeatureScope(CpuFeature f);
146  ~PlatformFeatureScope();
147 
148  private:
149  uint64_t old_cross_compile_;
150 };
151 
152 
153 // -----------------------------------------------------------------------------
154 // Labels represent pc locations; they are typically jump or call targets.
155 // After declaration, a label can be freely used to denote known or (yet)
156 // unknown pc location. Assembler::bind() is used to bind a label to the
157 // current pc. A label can be bound only once.
158 
159 class Label BASE_EMBEDDED {
160  public:
161  enum Distance {
162  kNear, kFar
163  };
164 
165  INLINE(Label()) {
166  Unuse();
167  UnuseNear();
168  }
169 
170  INLINE(~Label()) {
171  ASSERT(!is_linked());
172  ASSERT(!is_near_linked());
173  }
174 
175  INLINE(void Unuse()) { pos_ = 0; }
176  INLINE(void UnuseNear()) { near_link_pos_ = 0; }
177 
178  INLINE(bool is_bound() const) { return pos_ < 0; }
179  INLINE(bool is_unused() const) { return pos_ == 0 && near_link_pos_ == 0; }
180  INLINE(bool is_linked() const) { return pos_ > 0; }
181  INLINE(bool is_near_linked() const) { return near_link_pos_ > 0; }
182 
183  // Returns the position of bound or linked labels. Cannot be used
184  // for unused labels.
185  int pos() const;
186  int near_link_pos() const { return near_link_pos_ - 1; }
187 
188  private:
189  // pos_ encodes both the binding state (via its sign)
190  // and the binding position (via its value) of a label.
191  //
192  // pos_ < 0 bound label, pos() returns the jump target position
193  // pos_ == 0 unused label
194  // pos_ > 0 linked label, pos() returns the last reference position
195  int pos_;
196 
197  // Behaves like |pos_| in the "> 0" case, but for near jumps to this label.
198  int near_link_pos_;
199 
200  void bind_to(int pos) {
201  pos_ = -pos - 1;
202  ASSERT(is_bound());
203  }
204  void link_to(int pos, Distance distance = kFar) {
205  if (distance == kNear) {
206  near_link_pos_ = pos + 1;
207  ASSERT(is_near_linked());
208  } else {
209  pos_ = pos + 1;
210  ASSERT(is_linked());
211  }
212  }
213 
214  friend class Assembler;
215  friend class Displacement;
216  friend class RegExpMacroAssemblerIrregexp;
217 
218 #if V8_TARGET_ARCH_ARM64
219  // On ARM64, the Assembler keeps track of pointers to Labels to resolve
220  // branches to distant targets. Copying labels would confuse the Assembler.
221  DISALLOW_COPY_AND_ASSIGN(Label); // NOLINT
222 #endif
223 };
224 
225 
227 
228 
229 // -----------------------------------------------------------------------------
230 // Relocation information
231 
232 
233 // Relocation information consists of the address (pc) of the datum
234 // to which the relocation information applies, the relocation mode
235 // (rmode), and an optional data field. The relocation mode may be
236 // "descriptive" and not indicate a need for relocation, but simply
237 // describe a property of the datum. Such rmodes are useful for GC
238 // and nice disassembly output.
239 
240 class RelocInfo BASE_EMBEDDED {
241  public:
242  // The constant kNoPosition is used with the collecting of source positions
243  // in the relocation information. Two types of source positions are collected
244  // "position" (RelocMode position) and "statement position" (RelocMode
245  // statement_position). The "position" is collected at places in the source
246  // code which are of interest when making stack traces to pin-point the source
247  // location of a stack frame as close as possible. The "statement position" is
248  // collected at the beginning at each statement, and is used to indicate
249  // possible break locations. kNoPosition is used to indicate an
250  // invalid/uninitialized position value.
251  static const int kNoPosition = -1;
252 
253  // This string is used to add padding comments to the reloc info in cases
254  // where we are not sure to have enough space for patching in during
255  // lazy deoptimization. This is the case if we have indirect calls for which
256  // we do not normally record relocation info.
257  static const char* const kFillerCommentString;
258 
259  // The minimum size of a comment is equal to three bytes for the extra tagged
260  // pc + the tag for the data, and kPointerSize for the actual pointer to the
261  // comment.
262  static const int kMinRelocCommentSize = 3 + kPointerSize;
263 
264  // The maximum size for a call instruction including pc-jump.
265  static const int kMaxCallSize = 6;
266 
267  // The maximum pc delta that will use the short encoding.
268  static const int kMaxSmallPCDelta;
269 
270  enum Mode {
271  // Please note the order is important (see IsCodeTarget, IsGCRelocMode).
272  CODE_TARGET, // Code target which is not any of the above.
274  CONSTRUCT_CALL, // code target that is a call to a JavaScript constructor.
275  DEBUG_BREAK, // Code target for the debugger statement.
278 
279  // Everything after runtime_entry (inclusive) is not GC'ed.
281  JS_RETURN, // Marks start of the ExitJSFrame code.
283  POSITION, // See comment for kNoPosition above.
284  STATEMENT_POSITION, // See comment for kNoPosition above.
285  DEBUG_BREAK_SLOT, // Additional code inserted for debug break slot.
286  EXTERNAL_REFERENCE, // The address of an external C++ function.
287  INTERNAL_REFERENCE, // An address inside the same function.
288 
289  // Marks constant and veneer pools. Only used on ARM and ARM64.
290  // They use a custom noncompact encoding.
293 
294  // add more as needed
295  // Pseudo-types
296  NUMBER_OF_MODES, // There are at most 15 modes with noncompact encoding.
297  NONE32, // never recorded 32-bit value
298  NONE64, // never recorded 64-bit value
299  CODE_AGE_SEQUENCE, // Not stored in RelocInfo array, used explictly by
300  // code aging.
301  FIRST_REAL_RELOC_MODE = CODE_TARGET,
302  LAST_REAL_RELOC_MODE = VENEER_POOL,
303  FIRST_PSEUDO_RELOC_MODE = CODE_AGE_SEQUENCE,
304  LAST_PSEUDO_RELOC_MODE = CODE_AGE_SEQUENCE,
305  LAST_CODE_ENUM = DEBUG_BREAK,
306  LAST_GCED_ENUM = CELL,
307  // Modes <= LAST_COMPACT_ENUM are guaranteed to have compact encoding.
308  LAST_COMPACT_ENUM = CODE_TARGET_WITH_ID,
309  LAST_STANDARD_NONCOMPACT_ENUM = INTERNAL_REFERENCE
310  };
311 
312 
314 
315  RelocInfo(byte* pc, Mode rmode, intptr_t data, Code* host)
316  : pc_(pc), rmode_(rmode), data_(data), host_(host) {
317  }
318  RelocInfo(byte* pc, double data64)
319  : pc_(pc), rmode_(NONE64), data64_(data64), host_(NULL) {
320  }
321 
322  static inline bool IsRealRelocMode(Mode mode) {
323  return mode >= FIRST_REAL_RELOC_MODE &&
324  mode <= LAST_REAL_RELOC_MODE;
325  }
326  static inline bool IsPseudoRelocMode(Mode mode) {
327  ASSERT(!IsRealRelocMode(mode));
328  return mode >= FIRST_PSEUDO_RELOC_MODE &&
329  mode <= LAST_PSEUDO_RELOC_MODE;
330  }
331  static inline bool IsConstructCall(Mode mode) {
332  return mode == CONSTRUCT_CALL;
333  }
334  static inline bool IsCodeTarget(Mode mode) {
335  return mode <= LAST_CODE_ENUM;
336  }
337  static inline bool IsEmbeddedObject(Mode mode) {
338  return mode == EMBEDDED_OBJECT;
339  }
340  static inline bool IsRuntimeEntry(Mode mode) {
341  return mode == RUNTIME_ENTRY;
342  }
343  // Is the relocation mode affected by GC?
344  static inline bool IsGCRelocMode(Mode mode) {
345  return mode <= LAST_GCED_ENUM;
346  }
347  static inline bool IsJSReturn(Mode mode) {
348  return mode == JS_RETURN;
349  }
350  static inline bool IsComment(Mode mode) {
351  return mode == COMMENT;
352  }
353  static inline bool IsConstPool(Mode mode) {
354  return mode == CONST_POOL;
355  }
356  static inline bool IsVeneerPool(Mode mode) {
357  return mode == VENEER_POOL;
358  }
359  static inline bool IsPosition(Mode mode) {
360  return mode == POSITION || mode == STATEMENT_POSITION;
361  }
362  static inline bool IsStatementPosition(Mode mode) {
363  return mode == STATEMENT_POSITION;
364  }
365  static inline bool IsExternalReference(Mode mode) {
366  return mode == EXTERNAL_REFERENCE;
367  }
368  static inline bool IsInternalReference(Mode mode) {
369  return mode == INTERNAL_REFERENCE;
370  }
371  static inline bool IsDebugBreakSlot(Mode mode) {
372  return mode == DEBUG_BREAK_SLOT;
373  }
374  static inline bool IsNone(Mode mode) {
375  return mode == NONE32 || mode == NONE64;
376  }
377  static inline bool IsCodeAgeSequence(Mode mode) {
378  return mode == CODE_AGE_SEQUENCE;
379  }
380  static inline int ModeMask(Mode mode) { return 1 << mode; }
381 
382  // Returns true if the first RelocInfo has the same mode and raw data as the
383  // second one.
384  static inline bool IsEqual(RelocInfo first, RelocInfo second) {
385  return first.rmode() == second.rmode() &&
386  (first.rmode() == RelocInfo::NONE64 ?
387  first.raw_data64() == second.raw_data64() :
388  first.data() == second.data());
389  }
390 
391  // Accessors
392  byte* pc() const { return pc_; }
393  void set_pc(byte* pc) { pc_ = pc; }
394  Mode rmode() const { return rmode_; }
395  intptr_t data() const { return data_; }
396  double data64() const { return data64_; }
397  uint64_t raw_data64() {
398  return BitCast<uint64_t>(data64_);
399  }
400  Code* host() const { return host_; }
401  void set_host(Code* host) { host_ = host; }
402 
403  // Apply a relocation by delta bytes
404  INLINE(void apply(intptr_t delta));
405 
406  // Is the pointer this relocation info refers to coded like a plain pointer
407  // or is it strange in some way (e.g. relative or patched into a series of
408  // instructions).
409  bool IsCodedSpecially();
410 
411  // If true, the pointer this relocation info refers to is an entry in the
412  // constant pool, otherwise the pointer is embedded in the instruction stream.
413  bool IsInConstantPool();
414 
415  // Read/modify the code target in the branch/call instruction
416  // this relocation applies to;
417  // can only be called if IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
418  INLINE(Address target_address());
419  INLINE(void set_target_address(Address target,
421  INLINE(Object* target_object());
422  INLINE(Handle<Object> target_object_handle(Assembler* origin));
423  INLINE(void set_target_object(Object* target,
425  INLINE(Address target_runtime_entry(Assembler* origin));
426  INLINE(void set_target_runtime_entry(Address target,
429  INLINE(Cell* target_cell());
430  INLINE(Handle<Cell> target_cell_handle());
431  INLINE(void set_target_cell(Cell* cell,
433  INLINE(Handle<Object> code_age_stub_handle(Assembler* origin));
434  INLINE(Code* code_age_stub());
435  INLINE(void set_code_age_stub(Code* stub));
436 
437  // Returns the address of the constant pool entry where the target address
438  // is held. This should only be called if IsInConstantPool returns true.
439  INLINE(Address constant_pool_entry_address());
440 
441  // Read the address of the word containing the target_address in an
442  // instruction stream. What this means exactly is architecture-independent.
443  // The only architecture-independent user of this function is the serializer.
444  // The serializer uses it to find out how many raw bytes of instruction to
445  // output before the next target. Architecture-independent code shouldn't
446  // dereference the pointer it gets back from this.
447  INLINE(Address target_address_address());
448 
449  // This indicates how much space a target takes up when deserializing a code
450  // stream. For most architectures this is just the size of a pointer. For
451  // an instruction like movw/movt where the target bits are mixed into the
452  // instruction bits the size of the target will be zero, indicating that the
453  // serializer should not step forwards in memory after a target is resolved
454  // and written. In this case the target_address_address function above
455  // should return the end of the instructions to be patched, allowing the
456  // deserializer to deserialize the instructions as raw bytes and put them in
457  // place, ready to be patched with the target.
458  INLINE(int target_address_size());
459 
460  // Read/modify the reference in the instruction this relocation
461  // applies to; can only be called if rmode_ is external_reference
462  INLINE(Address target_reference());
463 
464  // Read/modify the address of a call instruction. This is used to relocate
465  // the break points where straight-line code is patched with a call
466  // instruction.
467  INLINE(Address call_address());
468  INLINE(void set_call_address(Address target));
469  INLINE(Object* call_object());
470  INLINE(void set_call_object(Object* target));
471  INLINE(Object** call_object_address());
472 
473  // Wipe out a relocation to a fixed value, used for making snapshots
474  // reproducible.
475  INLINE(void WipeOut());
476 
477  template<typename StaticVisitor> inline void Visit(Heap* heap);
478  inline void Visit(Isolate* isolate, ObjectVisitor* v);
479 
480  // Patch the code with some other code.
481  void PatchCode(byte* instructions, int instruction_count);
482 
483  // Patch the code with a call.
484  void PatchCodeWithCall(Address target, int guard_bytes);
485 
486  // Check whether this return sequence has been patched
487  // with a call to the debugger.
488  INLINE(bool IsPatchedReturnSequence());
489 
490  // Check whether this debug break slot has been patched with a call to the
491  // debugger.
492  INLINE(bool IsPatchedDebugBreakSlotSequence());
493 
494 #ifdef DEBUG
495  // Check whether the given code contains relocation information that
496  // either is position-relative or movable by the garbage collector.
497  static bool RequiresRelocation(const CodeDesc& desc);
498 #endif
499 
500 #ifdef ENABLE_DISASSEMBLER
501  // Printing
502  static const char* RelocModeName(Mode rmode);
503  void Print(Isolate* isolate, FILE* out);
504 #endif // ENABLE_DISASSEMBLER
505 #ifdef VERIFY_HEAP
506  void Verify();
507 #endif
508 
509  static const int kCodeTargetMask = (1 << (LAST_CODE_ENUM + 1)) - 1;
510  static const int kPositionMask = 1 << POSITION | 1 << STATEMENT_POSITION;
511  static const int kDataMask =
512  (1 << CODE_TARGET_WITH_ID) | kPositionMask | (1 << COMMENT);
513  static const int kApplyMask; // Modes affected by apply. Depends on arch.
514 
515  private:
516  // On ARM, note that pc_ is the address of the constant pool entry
517  // to be relocated and not the address of the instruction
518  // referencing the constant pool entry (except when rmode_ ==
519  // comment).
520  byte* pc_;
521  Mode rmode_;
522  union {
523  intptr_t data_;
524  double data64_;
525  };
526  Code* host_;
527  // External-reference pointers are also split across instruction-pairs
528  // on some platforms, but are accessed via indirect pointers. This location
529  // provides a place for that pointer to exist naturally. Its address
530  // is returned by RelocInfo::target_reference_address().
531  Address reconstructed_adr_ptr_;
532  friend class RelocIterator;
533 };
534 
535 
536 // RelocInfoWriter serializes a stream of relocation info. It writes towards
537 // lower addresses.
538 class RelocInfoWriter BASE_EMBEDDED {
539  public:
541  last_pc_(NULL),
542  last_id_(0),
543  last_position_(0) {}
544  RelocInfoWriter(byte* pos, byte* pc) : pos_(pos),
545  last_pc_(pc),
546  last_id_(0),
547  last_position_(0) {}
548 
549  byte* pos() const { return pos_; }
550  byte* last_pc() const { return last_pc_; }
551 
552  void Write(const RelocInfo* rinfo);
553 
554  // Update the state of the stream after reloc info buffer
555  // and/or code is moved while the stream is active.
556  void Reposition(byte* pos, byte* pc) {
557  pos_ = pos;
558  last_pc_ = pc;
559  }
560 
561  // Max size (bytes) of a written RelocInfo. Longest encoding is
562  // ExtraTag, VariableLengthPCJump, ExtraTag, pc_delta, ExtraTag, data_delta.
563  // On ia32 and arm this is 1 + 4 + 1 + 1 + 1 + 4 = 12.
564  // On x64 this is 1 + 4 + 1 + 1 + 1 + 8 == 16;
565  // Here we use the maximum of the two.
566  static const int kMaxSize = 16;
567 
568  private:
569  inline uint32_t WriteVariableLengthPCJump(uint32_t pc_delta);
570  inline void WriteTaggedPC(uint32_t pc_delta, int tag);
571  inline void WriteExtraTaggedPC(uint32_t pc_delta, int extra_tag);
572  inline void WriteExtraTaggedIntData(int data_delta, int top_tag);
573  inline void WriteExtraTaggedPoolData(int data, int pool_type);
574  inline void WriteExtraTaggedData(intptr_t data_delta, int top_tag);
575  inline void WriteTaggedData(intptr_t data_delta, int tag);
576  inline void WriteExtraTag(int extra_tag, int top_tag);
577 
578  byte* pos_;
579  byte* last_pc_;
580  int last_id_;
581  int last_position_;
582  DISALLOW_COPY_AND_ASSIGN(RelocInfoWriter);
583 };
584 
585 
586 // A RelocIterator iterates over relocation information.
587 // Typical use:
588 //
589 // for (RelocIterator it(code); !it.done(); it.next()) {
590 // // do something with it.rinfo() here
591 // }
592 //
593 // A mask can be specified to skip unwanted modes.
594 class RelocIterator: public Malloced {
595  public:
596  // Create a new iterator positioned at
597  // the beginning of the reloc info.
598  // Relocation information with mode k is included in the
599  // iteration iff bit k of mode_mask is set.
600  explicit RelocIterator(Code* code, int mode_mask = -1);
601  explicit RelocIterator(const CodeDesc& desc, int mode_mask = -1);
602 
603  // Iteration
604  bool done() const { return done_; }
605  void next();
606 
607  // Return pointer valid until next next().
608  RelocInfo* rinfo() {
609  ASSERT(!done());
610  return &rinfo_;
611  }
612 
613  private:
614  // Advance* moves the position before/after reading.
615  // *Read* reads from current byte(s) into rinfo_.
616  // *Get* just reads and returns info on current byte.
617  void Advance(int bytes = 1) { pos_ -= bytes; }
618  int AdvanceGetTag();
619  int GetExtraTag();
620  int GetTopTag();
621  void ReadTaggedPC();
622  void AdvanceReadPC();
623  void AdvanceReadId();
624  void AdvanceReadPoolData();
625  void AdvanceReadPosition();
626  void AdvanceReadData();
627  void AdvanceReadVariableLengthPCJump();
628  int GetLocatableTypeTag();
629  void ReadTaggedId();
630  void ReadTaggedPosition();
631 
632  // If the given mode is wanted, set it in rinfo_ and return true.
633  // Else return false. Used for efficiently skipping unwanted modes.
634  bool SetMode(RelocInfo::Mode mode) {
635  return (mode_mask_ & (1 << mode)) ? (rinfo_.rmode_ = mode, true) : false;
636  }
637 
638  byte* pos_;
639  byte* end_;
640  byte* code_age_sequence_;
641  RelocInfo rinfo_;
642  bool done_;
643  int mode_mask_;
644  int last_id_;
645  int last_position_;
646  DISALLOW_COPY_AND_ASSIGN(RelocIterator);
647 };
648 
649 
650 //------------------------------------------------------------------------------
651 // External function
652 
653 //----------------------------------------------------------------------------
654 class IC_Utility;
655 class SCTableReference;
656 #ifdef ENABLE_DEBUGGER_SUPPORT
657 class Debug_Address;
658 #endif
659 
660 
661 // An ExternalReference represents a C++ address used in the generated
662 // code. All references to C++ functions and variables must be encapsulated in
663 // an ExternalReference instance. This is done in order to track the origin of
664 // all external references in the code so that they can be bound to the correct
665 // addresses when deserializing a heap.
666 class ExternalReference BASE_EMBEDDED {
667  public:
668  // Used in the simulator to support different native api calls.
669  enum Type {
670  // Builtin call.
671  // MaybeObject* f(v8::internal::Arguments).
672  BUILTIN_CALL, // default
673 
674  // Builtin that takes float arguments and returns an int.
675  // int f(double, double).
677 
678  // Builtin call that returns floating point.
679  // double f(double, double).
681 
682  // Builtin call that returns floating point.
683  // double f(double).
685 
686  // Builtin call that returns floating point.
687  // double f(double, int).
689 
690  // Direct call to API function callback.
691  // void f(v8::FunctionCallbackInfo&)
693 
694  // Call to function callback via InvokeFunctionCallback.
695  // void f(v8::FunctionCallbackInfo&, v8::FunctionCallback)
697 
698  // Direct call to accessor getter callback.
699  // void f(Local<String> property, PropertyCallbackInfo& info)
701 
702  // Call to accessor getter callback via InvokeAccessorGetterCallback.
703  // void f(Local<String> property, PropertyCallbackInfo& info,
704  // AccessorGetterCallback callback)
705  PROFILING_GETTER_CALL
706  };
707 
708  static void SetUp();
709  static void InitializeMathExpData();
710  static void TearDownMathExpData();
711 
712  typedef void* ExternalReferenceRedirector(void* original, Type type);
713 
714  ExternalReference() : address_(NULL) {}
715 
716  ExternalReference(Builtins::CFunctionId id, Isolate* isolate);
717 
718  ExternalReference(ApiFunction* ptr, Type type, Isolate* isolate);
719 
720  ExternalReference(Builtins::Name name, Isolate* isolate);
721 
722  ExternalReference(Runtime::FunctionId id, Isolate* isolate);
723 
724  ExternalReference(const Runtime::Function* f, Isolate* isolate);
725 
726  ExternalReference(const IC_Utility& ic_utility, Isolate* isolate);
727 
728 #ifdef ENABLE_DEBUGGER_SUPPORT
729  ExternalReference(const Debug_Address& debug_address, Isolate* isolate);
730 #endif
731 
732  explicit ExternalReference(StatsCounter* counter);
733 
734  ExternalReference(Isolate::AddressId id, Isolate* isolate);
735 
736  explicit ExternalReference(const SCTableReference& table_ref);
737 
738  // Isolate as an external reference.
739  static ExternalReference isolate_address(Isolate* isolate);
740 
741  // One-of-a-kind references. These references are not part of a general
742  // pattern. This means that they have to be added to the
743  // ExternalReferenceTable in serialize.cc manually.
744 
745  static ExternalReference incremental_marking_record_write_function(
746  Isolate* isolate);
747  static ExternalReference store_buffer_overflow_function(
748  Isolate* isolate);
749  static ExternalReference flush_icache_function(Isolate* isolate);
750  static ExternalReference perform_gc_function(Isolate* isolate);
751  static ExternalReference out_of_memory_function(Isolate* isolate);
752  static ExternalReference delete_handle_scope_extensions(Isolate* isolate);
753 
754  static ExternalReference get_date_field_function(Isolate* isolate);
755  static ExternalReference date_cache_stamp(Isolate* isolate);
756 
757  static ExternalReference get_make_code_young_function(Isolate* isolate);
758  static ExternalReference get_mark_code_as_executed_function(Isolate* isolate);
759 
760  // Deoptimization support.
761  static ExternalReference new_deoptimizer_function(Isolate* isolate);
762  static ExternalReference compute_output_frames_function(Isolate* isolate);
763 
764  // Log support.
765  static ExternalReference log_enter_external_function(Isolate* isolate);
766  static ExternalReference log_leave_external_function(Isolate* isolate);
767 
768  // Static data in the keyed lookup cache.
769  static ExternalReference keyed_lookup_cache_keys(Isolate* isolate);
770  static ExternalReference keyed_lookup_cache_field_offsets(Isolate* isolate);
771 
772  // Static variable Heap::roots_array_start()
773  static ExternalReference roots_array_start(Isolate* isolate);
774 
775  // Static variable Heap::allocation_sites_list_address()
776  static ExternalReference allocation_sites_list_address(Isolate* isolate);
777 
778  // Static variable StackGuard::address_of_jslimit()
779  static ExternalReference address_of_stack_limit(Isolate* isolate);
780 
781  // Static variable StackGuard::address_of_real_jslimit()
782  static ExternalReference address_of_real_stack_limit(Isolate* isolate);
783 
784  // Static variable RegExpStack::limit_address()
785  static ExternalReference address_of_regexp_stack_limit(Isolate* isolate);
786 
787  // Static variables for RegExp.
788  static ExternalReference address_of_static_offsets_vector(Isolate* isolate);
789  static ExternalReference address_of_regexp_stack_memory_address(
790  Isolate* isolate);
791  static ExternalReference address_of_regexp_stack_memory_size(
792  Isolate* isolate);
793 
794  // Static variable Heap::NewSpaceStart()
795  static ExternalReference new_space_start(Isolate* isolate);
796  static ExternalReference new_space_mask(Isolate* isolate);
797  static ExternalReference heap_always_allocate_scope_depth(Isolate* isolate);
798  static ExternalReference new_space_mark_bits(Isolate* isolate);
799 
800  // Write barrier.
801  static ExternalReference store_buffer_top(Isolate* isolate);
802 
803  // Used for fast allocation in generated code.
804  static ExternalReference new_space_allocation_top_address(Isolate* isolate);
805  static ExternalReference new_space_allocation_limit_address(Isolate* isolate);
806  static ExternalReference old_pointer_space_allocation_top_address(
807  Isolate* isolate);
808  static ExternalReference old_pointer_space_allocation_limit_address(
809  Isolate* isolate);
810  static ExternalReference old_data_space_allocation_top_address(
811  Isolate* isolate);
812  static ExternalReference old_data_space_allocation_limit_address(
813  Isolate* isolate);
814  static ExternalReference new_space_high_promotion_mode_active_address(
815  Isolate* isolate);
816 
817  static ExternalReference mod_two_doubles_operation(Isolate* isolate);
818  static ExternalReference power_double_double_function(Isolate* isolate);
819  static ExternalReference power_double_int_function(Isolate* isolate);
820 
821  static ExternalReference handle_scope_next_address(Isolate* isolate);
822  static ExternalReference handle_scope_limit_address(Isolate* isolate);
823  static ExternalReference handle_scope_level_address(Isolate* isolate);
824 
825  static ExternalReference scheduled_exception_address(Isolate* isolate);
826  static ExternalReference address_of_pending_message_obj(Isolate* isolate);
827  static ExternalReference address_of_has_pending_message(Isolate* isolate);
828  static ExternalReference address_of_pending_message_script(Isolate* isolate);
829 
830  // Static variables containing common double constants.
831  static ExternalReference address_of_min_int();
832  static ExternalReference address_of_one_half();
833  static ExternalReference address_of_minus_one_half();
834  static ExternalReference address_of_minus_zero();
835  static ExternalReference address_of_zero();
836  static ExternalReference address_of_uint8_max_value();
837  static ExternalReference address_of_negative_infinity();
838  static ExternalReference address_of_canonical_non_hole_nan();
839  static ExternalReference address_of_the_hole_nan();
840  static ExternalReference address_of_uint32_bias();
841 
842  static ExternalReference math_log_double_function(Isolate* isolate);
843 
844  static ExternalReference math_exp_constants(int constant_index);
845  static ExternalReference math_exp_log_table();
846 
847  static ExternalReference page_flags(Page* page);
848 
849  static ExternalReference ForDeoptEntry(Address entry);
850 
851  static ExternalReference cpu_features();
852 
853  Address address() const { return reinterpret_cast<Address>(address_); }
854 
855 #ifdef ENABLE_DEBUGGER_SUPPORT
856  // Function Debug::Break()
857  static ExternalReference debug_break(Isolate* isolate);
858 
859  // Used to check if single stepping is enabled in generated code.
860  static ExternalReference debug_step_in_fp_address(Isolate* isolate);
861 #endif
862 
863 #ifndef V8_INTERPRETED_REGEXP
864  // C functions called from RegExp generated code.
865 
866  // Function NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16()
867  static ExternalReference re_case_insensitive_compare_uc16(Isolate* isolate);
868 
869  // Function RegExpMacroAssembler*::CheckStackGuardState()
870  static ExternalReference re_check_stack_guard_state(Isolate* isolate);
871 
872  // Function NativeRegExpMacroAssembler::GrowStack()
873  static ExternalReference re_grow_stack(Isolate* isolate);
874 
875  // byte NativeRegExpMacroAssembler::word_character_bitmap
876  static ExternalReference re_word_character_map();
877 
878 #endif
879 
880  // This lets you register a function that rewrites all external references.
881  // Used by the ARM simulator to catch calls to external references.
882  static void set_redirector(Isolate* isolate,
883  ExternalReferenceRedirector* redirector) {
884  // We can't stack them.
885  ASSERT(isolate->external_reference_redirector() == NULL);
886  isolate->set_external_reference_redirector(
887  reinterpret_cast<ExternalReferenceRedirectorPointer*>(redirector));
888  }
889 
890  static ExternalReference stress_deopt_count(Isolate* isolate);
891 
892  bool operator==(const ExternalReference& other) const {
893  return address_ == other.address_;
894  }
895 
896  bool operator!=(const ExternalReference& other) const {
897  return !(*this == other);
898  }
899 
900  private:
901  explicit ExternalReference(void* address)
902  : address_(address) {}
903 
904  static void* Redirect(Isolate* isolate,
905  void* address,
906  Type type = ExternalReference::BUILTIN_CALL) {
907  ExternalReferenceRedirector* redirector =
908  reinterpret_cast<ExternalReferenceRedirector*>(
909  isolate->external_reference_redirector());
910  if (redirector == NULL) return address;
911  void* answer = (*redirector)(address, type);
912  return answer;
913  }
914 
915  static void* Redirect(Isolate* isolate,
916  Address address_arg,
917  Type type = ExternalReference::BUILTIN_CALL) {
918  ExternalReferenceRedirector* redirector =
919  reinterpret_cast<ExternalReferenceRedirector*>(
920  isolate->external_reference_redirector());
921  void* address = reinterpret_cast<void*>(address_arg);
922  void* answer = (redirector == NULL) ?
923  address :
924  (*redirector)(address, type);
925  return answer;
926  }
927 
928  void* address_;
929 };
930 
931 
932 // -----------------------------------------------------------------------------
933 // Position recording support
934 
936  PositionState() : current_position(RelocInfo::kNoPosition),
937  written_position(RelocInfo::kNoPosition),
938  current_statement_position(RelocInfo::kNoPosition),
939  written_statement_position(RelocInfo::kNoPosition) {}
940 
943 
946 };
947 
948 
949 class PositionsRecorder BASE_EMBEDDED {
950  public:
951  explicit PositionsRecorder(Assembler* assembler)
952  : assembler_(assembler) {
953 #ifdef ENABLE_GDB_JIT_INTERFACE
954  gdbjit_lineinfo_ = NULL;
955 #endif
956  jit_handler_data_ = NULL;
957  }
958 
959 #ifdef ENABLE_GDB_JIT_INTERFACE
960  ~PositionsRecorder() {
961  delete gdbjit_lineinfo_;
962  }
963 
964  void StartGDBJITLineInfoRecording() {
965  if (FLAG_gdbjit) {
966  gdbjit_lineinfo_ = new GDBJITLineInfo();
967  }
968  }
969 
970  GDBJITLineInfo* DetachGDBJITLineInfo() {
971  GDBJITLineInfo* lineinfo = gdbjit_lineinfo_;
972  gdbjit_lineinfo_ = NULL; // To prevent deallocation in destructor.
973  return lineinfo;
974  }
975 #endif
976  void AttachJITHandlerData(void* user_data) {
977  jit_handler_data_ = user_data;
978  }
979 
981  void* old_data = jit_handler_data_;
982  jit_handler_data_ = NULL;
983  return old_data;
984  }
985  // Set current position to pos.
986  void RecordPosition(int pos);
987 
988  // Set current statement position to pos.
989  void RecordStatementPosition(int pos);
990 
991  // Write recorded positions to relocation information.
992  bool WriteRecordedPositions();
993 
994  int current_position() const { return state_.current_position; }
995 
997  return state_.current_statement_position;
998  }
999 
1000  private:
1001  Assembler* assembler_;
1002  PositionState state_;
1003 #ifdef ENABLE_GDB_JIT_INTERFACE
1004  GDBJITLineInfo* gdbjit_lineinfo_;
1005 #endif
1006 
1007  // Currently jit_handler_data_ is used to store JITHandler-specific data
1008  // over the lifetime of a PositionsRecorder
1009  void* jit_handler_data_;
1010  friend class PreservePositionScope;
1011 
1012  DISALLOW_COPY_AND_ASSIGN(PositionsRecorder);
1013 };
1014 
1015 
1016 class PreservePositionScope BASE_EMBEDDED {
1017  public:
1018  explicit PreservePositionScope(PositionsRecorder* positions_recorder)
1019  : positions_recorder_(positions_recorder),
1020  saved_state_(positions_recorder->state_) {}
1021 
1023  positions_recorder_->state_ = saved_state_;
1024  }
1025 
1026  private:
1027  PositionsRecorder* positions_recorder_;
1028  const PositionState saved_state_;
1029 
1030  DISALLOW_COPY_AND_ASSIGN(PreservePositionScope);
1031 };
1032 
1033 
1034 // -----------------------------------------------------------------------------
1035 // Utility functions
1036 
1037 inline int NumberOfBitsSet(uint32_t x) {
1038  unsigned int num_bits_set;
1039  for (num_bits_set = 0; x; x >>= 1) {
1040  num_bits_set += x & 1;
1041  }
1042  return num_bits_set;
1043 }
1044 
1045 bool EvalComparison(Token::Value op, double op1, double op2);
1046 
1047 // Computes pow(x, y) with the special cases in the spec for Math.pow.
1048 double power_helper(double x, double y);
1049 double power_double_int(double x, int y);
1050 double power_double_double(double x, double y);
1051 
1052 // Helper class for generating code or data associated with the code
1053 // right after a call instruction. As an example this can be used to
1054 // generate safepoint data after calls for crankshaft.
1056  public:
1058  virtual ~CallWrapper() { }
1059  // Called just before emitting a call. Argument is the size of the generated
1060  // call code.
1061  virtual void BeforeCall(int call_size) const = 0;
1062  // Called just after emitting a call, i.e., at the return site for the call.
1063  virtual void AfterCall() const = 0;
1064 };
1065 
1067  public:
1069  virtual ~NullCallWrapper() { }
1070  virtual void BeforeCall(int call_size) const { }
1071  virtual void AfterCall() const { }
1072 };
1073 
1074 
1075 // The multiplier and shift for signed division via multiplication, see Warren's
1076 // "Hacker's Delight", chapter 10.
1078  public:
1079  explicit MultiplierAndShift(int32_t d);
1080  int32_t multiplier() const { return multiplier_; }
1081  int32_t shift() const { return shift_; }
1082 
1083  private:
1084  int32_t multiplier_;
1085  int32_t shift_;
1086 };
1087 
1088 
1089 } } // namespace v8::internal
1090 
1091 #endif // V8_ASSEMBLER_H_
byte * Address
Definition: globals.h:186
RelocIterator(Code *code, int mode_mask=-1)
Definition: assembler.cc:713
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
static bool IsDebugBreakSlot(Mode mode)
Definition: assembler.h:371
Isolate * isolate() const
Definition: assembler.h:62
int near_link_pos() const
Definition: assembler.h:186
static bool IsExternalReference(Mode mode)
Definition: assembler.h:365
RelocInfo(byte *pc, Mode rmode, intptr_t data, Code *host)
Definition: assembler.h:315
virtual void BeforeCall(int call_size) const =0
static void set_redirector(Isolate *isolate, ExternalReferenceRedirector *redirector)
Definition: assembler.h:882
static bool IsNone(Mode mode)
Definition: assembler.h:374
const int KB
Definition: globals.h:245
PositionsRecorder(Assembler *assembler)
Definition: assembler.h:951
static bool IsRuntimeEntry(Mode mode)
Definition: assembler.h:340
byte * last_pc() const
Definition: assembler.h:550
double power_helper(double x, double y)
Definition: assembler.cc:1436
bool operator==(const ExternalReference &other) const
Definition: assembler.h:892
void * ExternalReferenceRedirector(void *original, Type type)
Definition: assembler.h:712
TypeImpl< ZoneTypeConfig > Type
int int32_t
Definition: unicode.cc:47
int current_statement_position() const
Definition: assembler.h:996
static const int kMinimalBufferSize
Definition: assembler.h:89
#define ASSERT(condition)
Definition: checks.h:329
virtual void AbortedCodeGeneration()
Definition: assembler.h:87
static bool IsComment(Mode mode)
Definition: assembler.h:350
Code * host() const
Definition: assembler.h:400
PredictableCodeSizeScope(AssemblerBase *assembler, int expected_size)
Definition: assembler.cc:168
static bool IsRealRelocMode(Mode mode)
Definition: assembler.h:322
static void QuietNaN(HeapObject *nan)
Definition: assembler.h:81
void set_emit_debug_code(bool value)
Definition: assembler.h:66
uint8_t byte
Definition: globals.h:185
static bool IsPseudoRelocMode(Mode mode)
Definition: assembler.h:326
void set_host(Code *host)
Definition: assembler.h:401
static bool IsCodeAgeSequence(Mode mode)
Definition: assembler.h:377
void Reposition(byte *pos, byte *pc)
Definition: assembler.h:556
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
PreservePositionScope(PositionsRecorder *positions_recorder)
Definition: assembler.h:1018
bool predictable_code_size() const
Definition: assembler.h:68
static bool IsConstPool(Mode mode)
Definition: assembler.h:353
static const int kApplyMask
Definition: assembler.h:513
const int kPointerSize
Definition: globals.h:268
AssemblerBase(Isolate *isolate, void *buffer, int buffer_size)
Definition: assembler.cc:120
double power_double_double(double x, double y)
Definition: assembler.cc:1471
void set_enabled_cpu_features(uint64_t features)
Definition: assembler.h:72
virtual void AfterCall() const
Definition: assembler.h:1071
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition: globals.h:359
static bool IsJSReturn(Mode mode)
Definition: assembler.h:347
const Register pc
static const int kMaxSmallPCDelta
Definition: assembler.h:268
double power_double_int(double x, int y)
Definition: assembler.cc:1456
void AttachJITHandlerData(void *user_data)
Definition: assembler.h:976
static bool IsEqual(RelocInfo first, RelocInfo second)
Definition: assembler.h:384
#define BASE_EMBEDDED
Definition: allocation.h:68
static bool IsStatementPosition(Mode mode)
Definition: assembler.h:362
bool operator!=(const ExternalReference &other) const
Definition: assembler.h:896
static bool IsEmbeddedObject(Mode mode)
Definition: assembler.h:337
INLINE(bool is_near_linked() const)
Definition: assembler.h:181
void set_predictable_code_size(bool value)
Definition: assembler.h:69
static bool IsPosition(Mode mode)
Definition: assembler.h:359
INLINE(static HeapObject *EnsureDoubleAligned(Heap *heap, HeapObject *object, int size))
bool emit_debug_code() const
Definition: assembler.h:65
static bool IsGCRelocMode(Mode mode)
Definition: assembler.h:344
static int ModeMask(Mode mode)
Definition: assembler.h:380
static bool IsCodeTarget(Mode mode)
Definition: assembler.h:334
uint64_t enabled_cpu_features() const
Definition: assembler.h:71
CpuFeatureScope(AssemblerBase *assembler, CpuFeature f)
Definition: assembler.h:136
void Print(const v8::FunctionCallbackInfo< v8::Value > &args)
static bool IsInternalReference(Mode mode)
Definition: assembler.h:368
RelocInfoWriter(byte *pos, byte *pc)
Definition: assembler.h:544
double data64() const
Definition: assembler.h:396
static bool IsVeneerPool(Mode mode)
Definition: assembler.h:356
static bool IsConstructCall(Mode mode)
Definition: assembler.h:331
RelocInfo(byte *pc, double data64)
Definition: assembler.h:318
virtual void AfterCall() const =0
INLINE(bool is_unused() const)
Definition: assembler.h:179
bool IsEnabled(CpuFeature f)
Definition: assembler.h:75
Address address() const
Definition: assembler.h:853
virtual void BeforeCall(int call_size) const
Definition: assembler.h:1070
INLINE(void UnuseNear())
Definition: assembler.h:176
int NumberOfBitsSet(uint32_t x)
Definition: assembler.h:1037
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
int expected_size
INLINE(bool is_linked() const)
Definition: assembler.h:180
intptr_t data() const
Definition: assembler.h:395
bool EvalComparison(Token::Value op, double op1, double op2)
Definition: assembler.cc:1516
int current_position() const
Definition: assembler.h:994
INLINE(bool is_bound() const)
Definition: assembler.h:178
static const char *const kFillerCommentString
Definition: assembler.h:257