Replace rtree path cache with LRU cache.
Rework rtree_ctx_t to encapsulate an rtree leaf LRU lookup cache rather than a single-path element lookup cache. The replacement is logically much simpler, as well as slightly faster in the fast path case and less prone to degraded performance during non-trivial sequences of lookups.
This commit is contained in:
@@ -54,22 +54,16 @@ struct rtree_level_s {
|
||||
unsigned cumbits;
|
||||
};
|
||||
|
||||
struct rtree_ctx_cache_elm_s {
|
||||
uintptr_t leafkey;
|
||||
rtree_elm_t *leaf;
|
||||
};
|
||||
|
||||
struct rtree_ctx_s {
|
||||
/* If false, key/elms have not yet been initialized by a lookup. */
|
||||
bool valid;
|
||||
/* Key that corresponds to the tree path recorded in elms. */
|
||||
uintptr_t key;
|
||||
/* Memoized rtree_start_level(key). */
|
||||
unsigned start_level;
|
||||
/*
|
||||
* A path through rtree, driven by key. Only elements that could
|
||||
* actually be used for subsequent lookups are initialized, i.e. if
|
||||
* start_level = rtree_start_level(key) is non-zero, the first
|
||||
* start_level elements are uninitialized. The last element contains a
|
||||
* pointer to the leaf node element that corresponds to key, so that
|
||||
* exact matches require no tree node offset computation.
|
||||
*/
|
||||
rtree_elm_t *elms[RTREE_HEIGHT_MAX + 1];
|
||||
#ifndef _MSC_VER
|
||||
JEMALLOC_ALIGNED(CACHELINE)
|
||||
#endif
|
||||
rtree_ctx_cache_elm_t cache[RTREE_CTX_NCACHE];
|
||||
};
|
||||
|
||||
struct rtree_s {
|
||||
|
Reference in New Issue
Block a user