28 #ifndef V8_V8GLOBALS_H_
29 #define V8_V8GLOBALS_H_
69 #ifdef V8_HOST_ARCH_64_BIT
71 reinterpret_cast<Address>(V8_UINT64_C(0xdeadbeedbeadbeef));
73 reinterpret_cast<Address>(V8_UINT64_C(0x1baddead0baddeaf));
75 reinterpret_cast<Address>(V8_UINT64_C(0x1beefdad0beefdaf));
122 class ExternalReference;
129 template <
typename T>
class Handle;
152 template <
typename Config,
class Allocator = FreeStoreAllocationPolicy>
354 #define STATE_TAG_LIST(V) \
358 V(PARALLEL_COMPILER_PROLOGUE) \
363 #define DEF_STATE_TAG(name) name,
376 #define HAS_SMI_TAG(value) \
377 ((reinterpret_cast<intptr_t>(value) & kSmiTagMask) == kSmiTag)
379 #define HAS_FAILURE_TAG(value) \
380 ((reinterpret_cast<intptr_t>(value) & kFailureTagMask) == kFailureTag)
383 #define OBJECT_POINTER_ALIGN(value) \
384 (((value) + kObjectAlignmentMask) & ~kObjectAlignmentMask)
387 #define POINTER_SIZE_ALIGN(value) \
388 (((value) + kPointerAlignmentMask) & ~kPointerAlignmentMask)
391 #define CODE_POINTER_ALIGN(value) \
392 (((value) + kCodeAlignmentMask) & ~kCodeAlignmentMask)
400 #define TRACK_MEMORY(name) \
401 void* operator new(size_t size) { \
402 void* result = ::operator new(size); \
403 Logger::NewEventStatic(name, result, size); \
406 void operator delete(void* object) { \
407 Logger::DeleteEventStatic(name, object); \
408 ::operator delete(object); \
411 #define TRACK_MEMORY(name)
576 #endif // V8_V8GLOBALS_H_
bool(* WeakSlotCallbackWithHeap)(Heap *heap, Object **pointer)
const intptr_t kSmiSignMask
const intptr_t kDoubleAlignmentMask
const uint32_t kNaNOrInfinityLowerBoundUpper32
DoubleRepresentation(double x)
const intptr_t kCodeAlignmentMask
const intptr_t kIntptrSignBit
const int kProcessorCacheLineSize
struct v8::internal::IeeeDoubleBigEndianArchType::@33 bits
const int kPointerSizeLog2
const intptr_t kCodeAlignment
const intptr_t kObjectAlignmentMask
void(* StoreBufferCallback)(Heap *heap, MemoryChunk *page, StoreBufferEvent event)
const uint32_t kSlotsZapValue
const uint64_t kHoleNanInt64
int(* HeapObjectCallback)(HeapObject *obj)
const intptr_t kFailureTagMask
const int kFailureTagSize
const uint32_t kHoleNanUpper32
const uint32_t kHoleNanLower32
bool IsLexicalVariableMode(VariableMode mode)
#define DEF_STATE_TAG(name)
const uint32_t kQuietNaNHighBitsMask
const Address kFromSpaceZapValue
bool IsDeclaredVariableMode(VariableMode mode)
const intptr_t kPointerAlignmentMask
const uint32_t kFreeListZapValue
void(* InlineCacheCallback)(Code *code, Address ic)
const intptr_t kObjectAlignment
const uint64_t kLastNonNaNInt64
const intptr_t kPointerAlignment
const int kObjectAlignmentBits
const int kCodeAlignmentBits
bool(* ConstraintCallback)(Address new_addr, Address old_addr)
bool IsDynamicVariableMode(VariableMode mode)
const uint32_t kDebugZapValue
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
bool IsImmutableVariableMode(VariableMode mode)
const intptr_t kDoubleAlignment
#define STATE_TAG_LIST(V)
bool(* WeakSlotCallback)(Object **pointer)
const Address kHandleZapValue
struct v8::internal::IeeeDoubleLittleEndianArchType::@32 bits