28 #ifndef V8_STORE_BUFFER_H_
29 #define V8_STORE_BUFFER_H_
102 old_top_ =
reinterpret_cast<Address*
>(top);
120 bool CellIsInStoreBuffer(
Address cell);
140 bool old_buffer_is_sorted_;
141 bool old_buffer_is_filtered_;
146 bool store_buffer_rebuilding_enabled_;
148 bool may_move_store_buffer_entries_;
155 uintptr_t* hash_set_1_;
156 uintptr_t* hash_set_2_;
157 bool hash_sets_are_empty_;
159 void ClearFilteringHashSets();
161 void CheckForFullBuffer();
163 void ExemptPopularPages(
int prime_sample_step,
int threshold);
165 void FindPointersToNewSpaceInRegion(
Address start,
174 void IteratePointersOnPage(
180 void FindPointersToNewSpaceInMaps(
185 void FindPointersToNewSpaceInMapsRegion(
190 void FindPointersToNewSpaceOnPage(
214 store_buffer_(store_buffer),
215 stored_state_(store_buffer->store_buffer_rebuilding_enabled_),
216 stored_callback_(store_buffer->callback_) {
217 store_buffer_->store_buffer_rebuilding_enabled_ =
true;
218 store_buffer_->callback_ = callback;
223 store_buffer_->callback_ = stored_callback_;
224 store_buffer_->store_buffer_rebuilding_enabled_ = stored_state_;
225 store_buffer_->CheckForFullBuffer();
239 : store_buffer_(store_buffer),
240 stored_state_(store_buffer->may_move_store_buffer_entries_) {
241 store_buffer_->may_move_store_buffer_entries_ =
false;
245 store_buffer_->may_move_store_buffer_entries_ = stored_state_;
255 #endif // V8_STORE_BUFFER_H_
static const int kHashSetLength
void(* ObjectSlotCallback)(HeapObject **from, HeapObject *to)
bool old_buffer_is_filtered()
void SetTop(Object ***top)
#define ASSERT(condition)
static void StoreBufferOverflow(Isolate *isolate)
const int kPointerSizeLog2
void(* StoreBufferCallback)(Heap *heap, MemoryChunk *page, StoreBufferEvent event)
static const int kStoreBufferLength
void IteratePointersToNewSpace(ObjectSlotCallback callback)
static const int kStoreBufferOverflowBit
static const int kStoreBufferSize
bool old_buffer_is_sorted()
void EnsureSpace(intptr_t space_needed)
void(StoreBuffer::* RegionCallback)(Address start, Address end, ObjectSlotCallback slot_callback)
DontMoveStoreBufferEntriesScope(StoreBuffer *store_buffer)
bool PrepareForIteration()
~DontMoveStoreBufferEntriesScope()
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset flag
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
~StoreBufferRebuildScope()
static const int kOldStoreBufferLength
StoreBufferRebuildScope(Heap *heap, StoreBuffer *store_buffer, StoreBufferCallback callback)
static const int kHashSetLengthLog2
void EnterDirectlyIntoStoreBuffer(Address addr)