28 #ifndef V8_SMALL_POINTER_LIST_H_
29 #define V8_SMALL_POINTER_LIST_H_
52 if (capacity < 2)
return;
53 if ((data_ & kTagMask) == kListTag) {
54 if (list()->capacity() >= capacity)
return;
55 int old_length = list()->length();
56 list()->
AddBlock(
NULL, capacity - list()->capacity(), zone);
57 list()->Rewind(old_length);
61 if ((data_ & kTagMask) == kSingletonTag) {
62 list->
Add(single_value(), zone);
65 data_ =
reinterpret_cast<intptr_t
>(list) | kListTag;
73 if ((data_ & kTagMask) == kListTag) {
74 list()->
Sort(compare_value);
81 if ((data_ & kTagMask) == kEmptyTag)
return 0;
82 if ((data_ & kTagMask) == kSingletonTag)
return 1;
83 return list()->length();
88 if ((data_ & kTagMask) == kEmptyTag) {
89 data_ =
reinterpret_cast<intptr_t
>(pointer) | kSingletonTag;
92 if ((data_ & kTagMask) == kSingletonTag) {
94 list->
Add(single_value(), zone);
95 list->
Add(pointer, zone);
97 data_ =
reinterpret_cast<intptr_t
>(list) | kListTag;
100 list()->
Add(pointer, zone);
106 ASSERT((data_ & kTagMask) != kEmptyTag);
107 if ((data_ & kTagMask) == kSingletonTag) {
109 return single_value();
111 return list()->
at(i);
119 if ((data_ & kTagMask) == kEmptyTag)
return;
120 if ((data_ & kTagMask) == kSingletonTag) {
121 if (pointer == single_value()) {
130 ASSERT((data_ & kTagMask) != kEmptyTag);
131 if ((data_ & kTagMask) == kSingletonTag) {
132 T* result = single_value();
136 return list()->RemoveLast();
140 if ((data_ & kTagMask) == kEmptyTag) {
144 if ((data_ & kTagMask) == kSingletonTag) {
145 ASSERT(pos == 0 || pos == 1);
155 if ((data_ & kTagMask) == kEmptyTag)
return 0;
156 if ((data_ & kTagMask) == kSingletonTag) {
157 if (start == 0 && end >= 0) {
158 return (single_value() == pointer) ? 1 : 0;
168 static int compare_value(
T*
const* a,
T*
const* b) {
169 return Compare<T>(**a, **b);
172 static const intptr_t kEmptyTag = 1;
173 static const intptr_t kSingletonTag = 0;
174 static const intptr_t kListTag = 2;
175 static const intptr_t kTagMask = 3;
176 static const intptr_t kValueMask = ~kTagMask;
180 T* single_value()
const {
181 ASSERT((data_ & kTagMask) == kSingletonTag);
182 STATIC_ASSERT(kSingletonTag == 0);
183 return reinterpret_cast<T*
>(data_);
186 PointerList* list()
const {
187 ASSERT((data_ & kTagMask) == kListTag);
188 return reinterpret_cast<PointerList*
>(data_ & kValueMask);
198 #endif // V8_SMALL_POINTER_LIST_H_
T * operator[](int i) const
void RemoveElement(T *pointer)
SmallPointerList(int capacity, Zone *zone)
#define ASSERT(condition)
bool IsAligned(T value, U alignment)
int CountOccurrences(T *pointer, int start, int end) const
#define T(name, string, precedence)
void Sort(int(*cmp)(const T *x, const T *y))
int CountOccurrences(const T &elm, int start, int end) const
const intptr_t kPointerAlignment
void Reserve(int capacity, Zone *zone)
void Add(T *pointer, Zone *zone)
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Vector< T > AddBlock(T value, int count, AllocationPolicy allocator=AllocationPolicy())
bool RemoveElement(const T &elm)