Optimize rtree_get().

Specialize fast path to avoid code that cannot execute for dependent
loads.

Manually unroll.
This commit is contained in:
Jason Evans
2016-03-22 17:54:35 -07:00
parent 18903c592f
commit 6c460ad91b
3 changed files with 134 additions and 35 deletions

View File

@@ -15,6 +15,8 @@ rtree_new(rtree_t *rtree, unsigned bits, rtree_node_alloc_t *alloc,
{
unsigned bits_in_leaf, height, i;
assert(RTREE_HEIGHT_MAX == ((ZU(1) << (LG_SIZEOF_PTR+3)) /
RTREE_BITS_PER_LEVEL));
assert(bits > 0 && bits <= (sizeof(uintptr_t) << 3));
bits_in_leaf = (bits % RTREE_BITS_PER_LEVEL) == 0 ? RTREE_BITS_PER_LEVEL

View File

@@ -14,6 +14,7 @@
malloc_write("<jemalloc>: Unreachable code reached\n"); \
abort(); \
} \
unreachable(); \
} while (0)
#define not_implemented() do { \