v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
v8globals.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_V8GLOBALS_H_
29 #define V8_V8GLOBALS_H_
30 
31 #include "globals.h"
32 #include "checks.h"
33 
34 namespace v8 {
35 namespace internal {
36 
37 // This file contains constants and global declarations related to the
38 // V8 system.
39 
40 // Mask for the sign bit in a smi.
41 const intptr_t kSmiSignMask = kIntptrSignBit;
42 
44 const intptr_t kObjectAlignment = 1 << kObjectAlignmentBits;
46 
47 // Desired alignment for pointers.
48 const intptr_t kPointerAlignment = (1 << kPointerSizeLog2);
50 
51 // Desired alignment for double values.
52 const intptr_t kDoubleAlignment = 8;
54 
55 // Desired alignment for generated code is 32 bytes (to improve cache line
56 // utilization).
57 const int kCodeAlignmentBits = 5;
58 const intptr_t kCodeAlignment = 1 << kCodeAlignmentBits;
59 const intptr_t kCodeAlignmentMask = kCodeAlignment - 1;
60 
61 // Tag information for Failure.
62 const int kFailureTag = 3;
63 const int kFailureTagSize = 2;
64 const intptr_t kFailureTagMask = (1 << kFailureTagSize) - 1;
65 
66 
67 // Zap-value: The value used for zapping dead objects.
68 // Should be a recognizable hex value tagged as a failure.
69 #ifdef V8_HOST_ARCH_64_BIT
70 const Address kZapValue =
71  reinterpret_cast<Address>(V8_UINT64_C(0xdeadbeedbeadbeef));
73  reinterpret_cast<Address>(V8_UINT64_C(0x1baddead0baddeaf));
75  reinterpret_cast<Address>(V8_UINT64_C(0x1baffed00baffedf));
77  reinterpret_cast<Address>(V8_UINT64_C(0x1beefdad0beefdaf));
78 const uint64_t kDebugZapValue = V8_UINT64_C(0xbadbaddbbadbaddb);
79 const uint64_t kSlotsZapValue = V8_UINT64_C(0xbeefdeadbeefdeef);
80 const uint64_t kFreeListZapValue = 0xfeed1eaffeed1eaf;
81 #else
82 const Address kZapValue = reinterpret_cast<Address>(0xdeadbeef);
83 const Address kHandleZapValue = reinterpret_cast<Address>(0xbaddeaf);
84 const Address kGlobalHandleZapValue = reinterpret_cast<Address>(0xbaffedf);
85 const Address kFromSpaceZapValue = reinterpret_cast<Address>(0xbeefdaf);
86 const uint32_t kSlotsZapValue = 0xbeefdeef;
87 const uint32_t kDebugZapValue = 0xbadbaddb;
88 const uint32_t kFreeListZapValue = 0xfeed1eaf;
89 #endif
90 
91 const int kCodeZapValue = 0xbadc0de;
92 
93 // Number of bits to represent the page size for paged spaces. The value of 20
94 // gives 1Mb bytes per page.
95 const int kPageSizeBits = 20;
96 
97 // On Intel architecture, cache line size is 64 bytes.
98 // On ARM it may be less (32 bytes), but as far this constant is
99 // used for aligning data, it doesn't hurt to align on a greater value.
100 #define PROCESSOR_CACHE_LINE_SIZE 64
101 
102 // Constants relevant to double precision floating point numbers.
103 // If looking only at the top 32 bits, the QNaN mask is bits 19 to 30.
104 const uint32_t kQuietNaNHighBitsMask = 0xfff << (51 - 32);
105 
106 
107 // -----------------------------------------------------------------------------
108 // Forward declarations for frequently used classes
109 
110 class AccessorInfo;
111 class Allocation;
112 class Arguments;
113 class Assembler;
114 class Code;
115 class CodeGenerator;
116 class CodeStub;
117 class Context;
118 class Debug;
119 class Debugger;
120 class DebugInfo;
121 class Descriptor;
122 class DescriptorArray;
123 class TransitionArray;
124 class ExternalReference;
125 class FixedArray;
127 class MemoryChunk;
130 class NameDictionary;
131 template <typename T> class Handle;
132 class Heap;
133 class HeapObject;
134 class IC;
135 class InterceptorInfo;
136 class Isolate;
137 class JSReceiver;
138 class JSArray;
139 class JSFunction;
140 class JSObject;
141 class LargeObjectSpace;
142 class LookupResult;
143 class MacroAssembler;
144 class Map;
145 class MapSpace;
147 class NewSpace;
148 class Object;
149 class MaybeObject;
150 class OldSpace;
151 class Foreign;
152 class Scope;
153 class ScopeInfo;
154 class Script;
155 class Smi;
156 template <typename Config, class Allocator = FreeStoreAllocationPolicy>
157  class SplayTree;
158 class String;
159 class Name;
160 class Struct;
161 class Variable;
162 class RelocInfo;
163 class Deserializer;
164 class MessageLocation;
165 class VirtualMemory;
166 class Mutex;
167 class RecursiveMutex;
168 
169 typedef bool (*WeakSlotCallback)(Object** pointer);
170 
171 typedef bool (*WeakSlotCallbackWithHeap)(Heap* heap, Object** pointer);
172 
173 // -----------------------------------------------------------------------------
174 // Miscellaneous
175 
176 // NOTE: SpaceIterator depends on AllocationSpace enumeration values being
177 // consecutive.
179  NEW_SPACE, // Semispaces collected with copying collector.
180  OLD_POINTER_SPACE, // May contain pointers to new space.
181  OLD_DATA_SPACE, // Must not have pointers to new space.
182  CODE_SPACE, // No pointers to new space, marked executable.
183  MAP_SPACE, // Only and all map objects.
184  CELL_SPACE, // Only and all cell objects.
185  PROPERTY_CELL_SPACE, // Only and all global property cell objects.
186  LO_SPACE, // Promoted large objects.
187 
192 };
193 const int kSpaceTagSize = 3;
194 const int kSpaceTagMask = (1 << kSpaceTagSize) - 1;
195 
196 
197 // A flag that indicates whether objects should be pretenured when
198 // allocated (allocated directly into the old generation) or not
199 // (allocated in the young generation if the object size and type
200 // allows).
202 
206 };
207 
209 
211 
212 enum VisitMode {
217 };
218 
219 // Flag indicating whether code is built into the VM (one of the natives files).
221 
222 
223 // A CodeDesc describes a buffer holding instructions and relocation
224 // information. The instructions start at the beginning of the buffer
225 // and grow forward, the relocation information starts at the end of
226 // the buffer and grows backward.
227 //
228 // |<--------------- buffer_size ---------------->|
229 // |<-- instr_size -->| |<-- reloc_size -->|
230 // +==================+========+==================+
231 // | instructions | free | reloc info |
232 // +==================+========+==================+
233 // ^
234 // |
235 // buffer
236 
237 struct CodeDesc {
243 };
244 
245 
246 // Callback function used for iterating objects in heap spaces,
247 // for example, scanning heap objects.
249 
250 
251 // Callback function used for checking constraints when copying/relocating
252 // objects. Returns true if an object can be copied/relocated from its
253 // old_addr to a new_addr.
254 typedef bool (*ConstraintCallback)(Address new_addr, Address old_addr);
255 
256 
257 // Callback function on inline caches, used for iterating over inline caches
258 // in compiled code.
259 typedef void (*InlineCacheCallback)(Code* code, Address ic);
260 
261 
262 // State for inline cache call sites. Aliased as IC::State.
264  // Has never been executed.
266  // Has been executed but monomorhic state has been delayed.
268  // Has been executed and only one receiver type has been seen.
270  // Like MONOMORPHIC but check failed due to prototype.
272  // Multiple receiver types have been seen.
274  // Many receiver types have been seen.
276  // A generic handler is installed and no extra typefeedback is recorded.
278  // Special state for debug break or step in prepare stubs.
280 };
281 
282 
285  // The call target is cached in the instruction stream.
288  // Always wrap the receiver and call to the JSFunction. Only use this flag
289  // both the receiver type and the target method are statically known.
291 };
292 
293 
295  OWN_MAP, // For fast properties objects.
296  PROTOTYPE_MAP // For slow properties objects (except GlobalObjects).
297 };
298 
299 
300 // The Store Buffer (GC).
301 typedef enum {
306 
307 
308 typedef void (*StoreBufferCallback)(Heap* heap,
309  MemoryChunk* page,
310  StoreBufferEvent event);
311 
312 
313 // Union used for fast testing of specific double values.
315  double value;
316  int64_t bits;
317  DoubleRepresentation(double x) { value = x; }
318  bool operator==(const DoubleRepresentation& other) const {
319  return bits == other.bits;
320  }
321 };
322 
323 
324 // Union used for customized checking of the IEEE double types
325 // inlined within v8 runtime, rather than going to the underlying
326 // platform headers and libraries
328  double d;
329  struct {
330  unsigned int man_low :32;
331  unsigned int man_high :20;
332  unsigned int exp :11;
333  unsigned int sign :1;
334  } bits;
335 };
336 
337 
339  double d;
340  struct {
341  unsigned int sign :1;
342  unsigned int exp :11;
343  unsigned int man_high :20;
344  unsigned int man_low :32;
345  } bits;
346 };
347 
348 
349 // AccessorCallback
351  MaybeObject* (*getter)(Isolate* isolate, Object* object, void* data);
352  MaybeObject* (*setter)(
353  Isolate* isolate, JSObject* object, Object* value, void* data);
354  void* data;
355 };
356 
357 
358 // Logging and profiling. A StateTag represents a possible state of
359 // the VM. The logger maintains a stack of these. Creating a VMState
360 // object enters a state by pushing on the stack, and destroying a
361 // VMState object leaves a state by popping the current state from the
362 // stack.
363 
364 enum StateTag {
365  JS,
366  GC,
371 };
372 
373 
374 // -----------------------------------------------------------------------------
375 // Macros
376 
377 // Testers for test.
378 
379 #define HAS_SMI_TAG(value) \
380  ((reinterpret_cast<intptr_t>(value) & kSmiTagMask) == kSmiTag)
381 
382 #define HAS_FAILURE_TAG(value) \
383  ((reinterpret_cast<intptr_t>(value) & kFailureTagMask) == kFailureTag)
384 
385 // OBJECT_POINTER_ALIGN returns the value aligned as a HeapObject pointer
386 #define OBJECT_POINTER_ALIGN(value) \
387  (((value) + kObjectAlignmentMask) & ~kObjectAlignmentMask)
388 
389 // POINTER_SIZE_ALIGN returns the value aligned as a pointer.
390 #define POINTER_SIZE_ALIGN(value) \
391  (((value) + kPointerAlignmentMask) & ~kPointerAlignmentMask)
392 
393 // CODE_POINTER_ALIGN returns the value aligned as a generated code segment.
394 #define CODE_POINTER_ALIGN(value) \
395  (((value) + kCodeAlignmentMask) & ~kCodeAlignmentMask)
396 
397 // Support for tracking C++ memory allocation. Insert TRACK_MEMORY("Fisk")
398 // inside a C++ class and new and delete will be overloaded so logging is
399 // performed.
400 // This file (globals.h) is included before log.h, so we use direct calls to
401 // the Logger rather than the LOG macro.
402 #ifdef DEBUG
403 #define TRACK_MEMORY(name) \
404  void* operator new(size_t size) { \
405  void* result = ::operator new(size); \
406  Logger::NewEventStatic(name, result, size); \
407  return result; \
408  } \
409  void operator delete(void* object) { \
410  Logger::DeleteEventStatic(name, object); \
411  ::operator delete(object); \
412  }
413 #else
414 #define TRACK_MEMORY(name)
415 #endif
416 
417 
418 // Feature flags bit positions. They are mostly based on the CPUID spec.
419 // On X86/X64, values below 32 are bits in EDX, values above 32 are bits in ECX.
420 enum CpuFeature { SSE4_1 = 32 + 19, // x86
421  SSE3 = 32 + 0, // x86
422  SSE2 = 26, // x86
423  CMOV = 15, // x86
424  VFP3 = 1, // ARM
425  ARMv7 = 2, // ARM
426  SUDIV = 3, // ARM
427  UNALIGNED_ACCESSES = 4, // ARM
429  VFP32DREGS = 6, // ARM
430  NEON = 7, // ARM
431  SAHF = 0, // x86
432  FPU = 1}; // MIPS
433 
434 
435 // Used to specify if a macro instruction must perform a smi check on tagged
436 // values.
440 };
441 
442 
443 enum ScopeType {
444  EVAL_SCOPE, // The top-level scope for an eval source.
445  FUNCTION_SCOPE, // The top-level scope for a function.
446  MODULE_SCOPE, // The scope introduced by a module literal
447  GLOBAL_SCOPE, // The top-level scope for a program or a top-level eval.
448  CATCH_SCOPE, // The scope introduced by catch.
449  BLOCK_SCOPE, // The scope introduced by a new block.
450  WITH_SCOPE // The scope introduced by with.
451 };
452 
453 
454 const uint32_t kHoleNanUpper32 = 0x7FFFFFFF;
455 const uint32_t kHoleNanLower32 = 0xFFFFFFFF;
456 const uint32_t kNaNOrInfinityLowerBoundUpper32 = 0x7FF00000;
457 
458 const uint64_t kHoleNanInt64 =
459  (static_cast<uint64_t>(kHoleNanUpper32) << 32) | kHoleNanLower32;
460 const uint64_t kLastNonNaNInt64 =
461  (static_cast<uint64_t>(kNaNOrInfinityLowerBoundUpper32) << 32);
462 
463 
464 // The order of this enum has to be kept in sync with the predicates below.
466  // User declared variables:
467  VAR, // declared via 'var', and 'function' declarations
468 
469  CONST_LEGACY, // declared via legacy 'const' declarations
470 
471  LET, // declared via 'let' declarations (first lexical)
472 
473  CONST, // declared via 'const' declarations
474 
475  MODULE, // declared via 'module' declaration (last lexical)
476 
477  // Variables introduced by the compiler:
478  INTERNAL, // like VAR, but not user-visible (may or may not
479  // be in a context)
480 
481  TEMPORARY, // temporary variables (not user-visible), stack-allocated
482  // unless the scope as a whole has forced context allocation
483 
484  DYNAMIC, // always require dynamic lookup (we don't know
485  // the declaration)
486 
487  DYNAMIC_GLOBAL, // requires dynamic lookup, but we know that the
488  // variable is global unless it has been shadowed
489  // by an eval-introduced variable
490 
491  DYNAMIC_LOCAL // requires dynamic lookup, but we know that the
492  // variable is local and where it is unless it
493  // has been shadowed by an eval-introduced
494  // variable
495 };
496 
497 
499  return mode >= DYNAMIC && mode <= DYNAMIC_LOCAL;
500 }
501 
502 
504  return mode >= VAR && mode <= MODULE;
505 }
506 
507 
509  return mode >= LET && mode <= MODULE;
510 }
511 
512 
514  return (mode >= CONST && mode <= MODULE) || mode == CONST_LEGACY;
515 }
516 
517 
518 // ES6 Draft Rev3 10.2 specifies declarative environment records with mutable
519 // and immutable bindings that can be in two states: initialized and
520 // uninitialized. In ES5 only immutable bindings have these two states. When
521 // accessing a binding, it needs to be checked for initialization. However in
522 // the following cases the binding is initialized immediately after creation
523 // so the initialization check can always be skipped:
524 // 1. Var declared local variables.
525 // var foo;
526 // 2. A local variable introduced by a function declaration.
527 // function foo() {}
528 // 3. Parameters
529 // function x(foo) {}
530 // 4. Catch bound variables.
531 // try {} catch (foo) {}
532 // 6. Function variables of named function expressions.
533 // var x = function foo() {}
534 // 7. Implicit binding of 'this'.
535 // 8. Implicit binding of 'arguments' in functions.
536 //
537 // ES5 specified object environment records which are introduced by ES elements
538 // such as Program and WithStatement that associate identifier bindings with the
539 // properties of some object. In the specification only mutable bindings exist
540 // (which may be non-writable) and have no distinct initialization step. However
541 // V8 allows const declarations in global code with distinct creation and
542 // initialization steps which are represented by non-writable properties in the
543 // global object. As a result also these bindings need to be checked for
544 // initialization.
545 //
546 // The following enum specifies a flag that indicates if the binding needs a
547 // distinct initialization step (kNeedsInitialization) or if the binding is
548 // immediately initialized upon creation (kCreatedInitialized).
552 };
553 
554 
558 };
559 
560 
564 };
565 
566 } } // namespace v8::internal
567 
568 #endif // V8_V8GLOBALS_H_
byte * Address
Definition: globals.h:186
bool(* WeakSlotCallbackWithHeap)(Heap *heap, Object **pointer)
Definition: v8globals.h:171
const intptr_t kSmiSignMask
Definition: v8globals.h:41
const intptr_t kDoubleAlignmentMask
Definition: v8globals.h:53
const uint32_t kNaNOrInfinityLowerBoundUpper32
Definition: v8globals.h:456
const intptr_t kCodeAlignmentMask
Definition: v8globals.h:59
const intptr_t kIntptrSignBit
Definition: globals.h:282
#define V8_UINT64_C(x)
Definition: globals.h:217
const int kPointerSizeLog2
Definition: globals.h:281
const intptr_t kCodeAlignment
Definition: v8globals.h:58
const int kSpaceTagSize
Definition: v8globals.h:193
const intptr_t kObjectAlignmentMask
Definition: v8globals.h:45
void(* StoreBufferCallback)(Heap *heap, MemoryChunk *page, StoreBufferEvent event)
Definition: v8globals.h:308
struct v8::internal::IeeeDoubleLittleEndianArchType::@41 bits
uint8_t byte
Definition: globals.h:185
bool operator==(const DoubleRepresentation &other) const
Definition: v8globals.h:318
const uint32_t kSlotsZapValue
Definition: v8globals.h:86
const uint64_t kHoleNanInt64
Definition: v8globals.h:458
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
int(* HeapObjectCallback)(HeapObject *obj)
Definition: v8globals.h:248
const intptr_t kFailureTagMask
Definition: v8globals.h:64
const int kFailureTagSize
Definition: v8globals.h:63
const uint32_t kHoleNanUpper32
Definition: v8globals.h:454
const Address kZapValue
Definition: v8globals.h:82
const uint32_t kHoleNanLower32
Definition: v8globals.h:455
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
bool IsLexicalVariableMode(VariableMode mode)
Definition: v8globals.h:508
const uint32_t kQuietNaNHighBitsMask
Definition: v8globals.h:104
Assembler * origin
Definition: v8globals.h:242
const Address kFromSpaceZapValue
Definition: v8globals.h:85
bool IsDeclaredVariableMode(VariableMode mode)
Definition: v8globals.h:503
const intptr_t kPointerAlignmentMask
Definition: v8globals.h:49
const int kSpaceTagMask
Definition: v8globals.h:194
const uint32_t kFreeListZapValue
Definition: v8globals.h:88
void(* InlineCacheCallback)(Code *code, Address ic)
Definition: v8globals.h:259
const intptr_t kObjectAlignment
Definition: v8globals.h:44
const uint64_t kLastNonNaNInt64
Definition: v8globals.h:460
const intptr_t kPointerAlignment
Definition: v8globals.h:48
const int kObjectAlignmentBits
Definition: v8globals.h:43
const int kCodeAlignmentBits
Definition: v8globals.h:57
bool(* ConstraintCallback)(Address new_addr, Address old_addr)
Definition: v8globals.h:254
const Address kGlobalHandleZapValue
Definition: v8globals.h:84
bool IsDynamicVariableMode(VariableMode mode)
Definition: v8globals.h:498
const int kFailureTag
Definition: v8globals.h:62
const uint32_t kDebugZapValue
Definition: v8globals.h:87
HeapObject * obj
const int kPageSizeBits
Definition: v8globals.h:95
bool IsImmutableVariableMode(VariableMode mode)
Definition: v8globals.h:513
struct v8::internal::IeeeDoubleBigEndianArchType::@42 bits
const intptr_t kDoubleAlignment
Definition: v8globals.h:52
const int kCodeZapValue
Definition: v8globals.h:91
bool(* WeakSlotCallback)(Object **pointer)
Definition: v8globals.h:169
const Address kHandleZapValue
Definition: v8globals.h:83