Split rtree_elm_lookup_hard() out of rtree_elm_lookup().

Anything but a hit in the first element of the lookup cache is
expensive enough to negate the benefits of inlining.
This commit is contained in:
Jason Evans
2017-02-03 20:12:49 -08:00
parent 4a346f5593
commit c511a44e99
4 changed files with 111 additions and 101 deletions

View File

@@ -13,6 +13,8 @@ rtree_elm_t *rtree_subtree_read_hard(tsdn_t *tsdn, rtree_t *rtree,
unsigned level);
rtree_elm_t *rtree_child_read_hard(tsdn_t *tsdn, rtree_t *rtree,
rtree_elm_t *elm, unsigned level);
rtree_elm_t *rtree_elm_lookup_hard(tsdn_t *tsdn, rtree_t *rtree,
rtree_ctx_t *rtree_ctx, uintptr_t key, bool dependent, bool init_missing);
void rtree_elm_witness_acquire(tsdn_t *tsdn, const rtree_t *rtree,
uintptr_t key, const rtree_elm_t *elm);
void rtree_elm_witness_access(tsdn_t *tsdn, const rtree_t *rtree,