28 #ifndef V8_SPLAY_TREE_INL_H_
29 #define V8_SPLAY_TREE_INL_H_
37 template<
typename Config,
class Allocator>
40 ForEachNode(&deleter);
44 template<
typename Config,
class Allocator>
49 root_ =
new(allocator_)
Node(key, Config::NoValue());
61 Node* node =
new(allocator_)
Node(key, Config::NoValue());
62 InsertInternal(cmp, node);
69 template<
typename Config,
class Allocator>
73 node->right_ = root_->right_;
77 node->left_ = root_->left_;
84 template<
typename Config,
class Allocator>
85 bool SplayTree<Config, Allocator>::FindInternal(
const Key& key) {
93 template<
typename Config,
class Allocator>
95 if (FindInternal(key)) {
104 template<
typename Config,
class Allocator>
116 locator->bind(root_);
120 root_ = root_->left_;
121 bool result = FindGreatest(locator);
128 template<
typename Config,
class Allocator>
140 locator->bind(root_);
144 root_ = root_->right_;
145 bool result = FindLeast(locator);
152 template<
typename Config,
class Allocator>
156 Node* current = root_;
157 while (current->right_ !=
NULL)
158 current = current->right_;
159 locator->bind(current);
164 template<
typename Config,
class Allocator>
168 Node* current = root_;
169 while (current->left_ !=
NULL)
170 current = current->left_;
171 locator->bind(current);
176 template<
typename Config,
class Allocator>
178 const Key& new_key) {
179 if (!FindInternal(old_key))
181 Node* node_to_move = root_;
182 RemoveRootNode(old_key);
190 node_to_move->key_ = new_key;
191 InsertInternal(cmp, node_to_move);
196 template<
typename Config,
class Allocator>
198 if (!FindInternal(key))
200 Node* node_to_remove = root_;
202 delete node_to_remove;
207 template<
typename Config,
class Allocator>
209 if (root_->left_ ==
NULL) {
211 root_ = root_->right_;
214 Node* right = root_->right_;
216 root_ = root_->left_;
221 root_->right_ = right;
226 template<
typename Config,
class Allocator>
230 Node dummy_node(Config::kNoKey, Config::NoValue());
236 Node* dummy = &dummy_node;
239 Node* current = root_;
243 if (current->left_ ==
NULL)
247 Node* temp = current->left_;
248 current->left_ = temp->right_;
249 temp->right_ = current;
251 if (current->left_ ==
NULL)
255 right->left_ = current;
257 current = current->left_;
258 }
else if (cmp > 0) {
259 if (current->right_ ==
NULL)
263 Node* temp = current->right_;
264 current->right_ = temp->left_;
265 temp->left_ = current;
267 if (current->right_ ==
NULL)
271 left->right_ = current;
273 current = current->right_;
279 left->right_ = current->left_;
280 right->left_ = current->right_;
281 current->left_ = dummy->right_;
282 current->right_ = dummy->left_;
287 template <
typename Config,
class Allocator>
template <
class Callback>
289 NodeToPairAdaptor<Callback> callback_adaptor(callback);
290 ForEachNode(&callback_adaptor);
294 template <
typename Config,
class Allocator>
template <
class Callback>
298 if (root_ !=
NULL) nodes_to_visit.Add(root_, allocator_);
300 while (pos < nodes_to_visit.length()) {
301 Node* node = nodes_to_visit[pos++];
302 if (node->left() !=
NULL) nodes_to_visit.Add(node->left(), allocator_);
303 if (node->right() !=
NULL) nodes_to_visit.Add(node->right(), allocator_);
304 callback->Call(node);
311 #endif // V8_SPLAY_TREE_INL_H_
bool Find(const Key &key, Locator *locator)
bool Insert(const Key &key, Locator *locator)
bool FindGreatest(Locator *locator)
void Splay(const Key &key)
void ForEach(Callback *callback)
int Compare(const T &a, const T &b)
bool FindLeast(Locator *locator)
bool Move(const Key &old_key, const Key &new_key)
bool FindLeastGreaterThan(const Key &key, Locator *locator)
bool Remove(const Key &key)
bool FindGreatestLessThan(const Key &key, Locator *locator)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL