Determine rtree levels at compile time.

Rather than dynamically building a table to aid per level computations,
define a constant table at compile time.  Omit both high and low
insignificant bits.  Use one to three tree levels, depending on the
number of significant bits.
This commit is contained in:
Jason Evans 2017-02-06 13:17:12 -08:00
parent ff4db5014e
commit f5cf9b19c8
9 changed files with 244 additions and 268 deletions

View File

@ -406,6 +406,74 @@ case "${host_cpu}" in
esac esac
AC_DEFINE_UNQUOTED([CPU_SPINWAIT], [$CPU_SPINWAIT]) AC_DEFINE_UNQUOTED([CPU_SPINWAIT], [$CPU_SPINWAIT])
case "${host_cpu}" in
aarch64)
AC_MSG_CHECKING([number of significant virtual address bits])
LG_VADDR=48
AC_MSG_RESULT([$LG_VADDR])
;;
x86_64)
AC_CACHE_CHECK([number of significant virtual address bits],
[je_cv_lg_vaddr],
AC_RUN_IFELSE([AC_LANG_PROGRAM(
[[
#include <stdio.h>
#ifdef _WIN32
#include <limits.h>
#include <intrin.h>
typedef unsigned __int32 uint32_t;
#else
#include <stdint.h>
#endif
]], [[
uint32_t r[[4]];
uint32_t eax_in = 0x80000008U;
#ifdef _WIN32
__cpuid((int *)r, (int)eax_in);
#else
asm volatile ("cpuid"
: "=a" (r[[0]]), "=b" (r[[1]]), "=c" (r[[2]]), "=d" (r[[3]])
: "a" (eax_in), "c" (0)
);
#endif
uint32_t eax_out = r[[0]];
uint32_t vaddr = ((eax_out & 0x0000ff00U) >> 8);
FILE *f = fopen("conftest.out", "w");
if (f == NULL) {
return 1;
}
fprintf(f, "%u", vaddr);
fclose(f);
return 0;
]])],
[je_cv_lg_vaddr=`cat conftest.out`],
[je_cv_lg_vaddr=error],
[je_cv_lg_vaddr=57]))
if test "x${je_cv_lg_vaddr}" != "x" ; then
LG_VADDR="${je_cv_lg_vaddr}"
fi
if test "x${LG_VADDR}" != "xerror" ; then
AC_DEFINE_UNQUOTED([LG_VADDR], [$LG_VADDR])
else
AC_MSG_ERROR([cannot determine number of significant virtual address bits])
fi
;;
*)
AC_MSG_CHECKING([number of significant virtual address bits])
if test "x${LG_SIZEOF_PTR}" = "x3" ; then
LG_VADDR=64
elif test "x${LG_SIZEOF_PTR}" = "x2" ; then
LG_VADDR=32
elif test "x${LG_SIZEOF_PTR}" = "xLG_SIZEOF_PTR_WIN" ; then
LG_VADDR="(1U << (LG_SIZEOF_PTR_WIN+3))"
else
AC_MSG_ERROR([Unsupported lg(pointer size): ${LG_SIZEOF_PTR}])
fi
AC_MSG_RESULT([$LG_VADDR])
;;
esac
AC_DEFINE_UNQUOTED([LG_VADDR], [$LG_VADDR])
LD_PRELOAD_VAR="LD_PRELOAD" LD_PRELOAD_VAR="LD_PRELOAD"
so="so" so="so"
importlib="${so}" importlib="${so}"

View File

@ -22,6 +22,13 @@
*/ */
#undef CPU_SPINWAIT #undef CPU_SPINWAIT
/*
* Number of significant bits in virtual addresses. This may be less than the
* total number of bits in a pointer, e.g. on x64, for which the uppermost 16
* bits are the same as bit 47.
*/
#undef LG_VADDR
/* Defined if C11 atomics are available. */ /* Defined if C11 atomics are available. */
#undef JEMALLOC_C11ATOMICS #undef JEMALLOC_C11ATOMICS

View File

@ -1,7 +1,29 @@
#ifndef JEMALLOC_INTERNAL_RTREE_EXTERNS_H #ifndef JEMALLOC_INTERNAL_RTREE_EXTERNS_H
#define JEMALLOC_INTERNAL_RTREE_EXTERNS_H #define JEMALLOC_INTERNAL_RTREE_EXTERNS_H
bool rtree_new(rtree_t *rtree, unsigned bits); /*
* Split the bits into one to three partitions depending on number of
* significant bits. It the number of bits does not divide evenly into the
* number of levels, place one remainder bit per level starting at the leaf
* level.
*/
static const rtree_level_t rtree_levels[] = {
#if RTREE_NSB <= 10
{RTREE_NSB, RTREE_NHIB + RTREE_NSB}
#elif RTREE_NSB <= 36
{RTREE_NSB/2, RTREE_NHIB + RTREE_NSB/2},
{RTREE_NSB/2 + RTREE_NSB%2, RTREE_NHIB + RTREE_NSB}
#elif RTREE_NSB <= 52
{RTREE_NSB/3, RTREE_NHIB + RTREE_NSB/3},
{RTREE_NSB/3 + RTREE_NSB%3/2,
RTREE_NHIB + RTREE_NSB/3*2 + RTREE_NSB%3/2},
{RTREE_NSB/3 + RTREE_NSB%3 - RTREE_NSB%3/2, RTREE_NHIB + RTREE_NSB}
#else
# error Unsupported number of significant virtual address bits
#endif
};
bool rtree_new(rtree_t *rtree);
#ifdef JEMALLOC_JET #ifdef JEMALLOC_JET
typedef rtree_elm_t *(rtree_node_alloc_t)(tsdn_t *, rtree_t *, size_t); typedef rtree_elm_t *(rtree_node_alloc_t)(tsdn_t *, rtree_t *, size_t);
extern rtree_node_alloc_t *rtree_node_alloc; extern rtree_node_alloc_t *rtree_node_alloc;

View File

@ -2,8 +2,8 @@
#define JEMALLOC_INTERNAL_RTREE_INLINES_H #define JEMALLOC_INTERNAL_RTREE_INLINES_H
#ifndef JEMALLOC_ENABLE_INLINE #ifndef JEMALLOC_ENABLE_INLINE
uintptr_t rtree_leafkey(rtree_t *rtree, uintptr_t key); uintptr_t rtree_leafkey(uintptr_t key);
uintptr_t rtree_subkey(rtree_t *rtree, uintptr_t key, unsigned level); uintptr_t rtree_subkey(uintptr_t key, unsigned level);
extent_t *rtree_elm_read(rtree_elm_t *elm, bool dependent); extent_t *rtree_elm_read(rtree_elm_t *elm, bool dependent);
void rtree_elm_write(rtree_elm_t *elm, const extent_t *extent); void rtree_elm_write(rtree_elm_t *elm, const extent_t *extent);
rtree_elm_t *rtree_elm_lookup(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *rtree_elm_lookup(tsdn_t *tsdn, rtree_t *rtree,
@ -25,21 +25,21 @@ void rtree_clear(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_RTREE_C_)) #if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_RTREE_C_))
JEMALLOC_ALWAYS_INLINE uintptr_t JEMALLOC_ALWAYS_INLINE uintptr_t
rtree_leafkey(rtree_t *rtree, uintptr_t key) { rtree_leafkey(uintptr_t key) {
unsigned ptrbits = ZU(1) << (LG_SIZEOF_PTR+3); unsigned ptrbits = ZU(1) << (LG_SIZEOF_PTR+3);
unsigned cumbits = (rtree->levels[rtree->height-1].cumbits - unsigned cumbits = (rtree_levels[RTREE_HEIGHT-1].cumbits -
rtree->levels[rtree->height-1].bits); rtree_levels[RTREE_HEIGHT-1].bits);
unsigned maskbits = ptrbits - cumbits; unsigned maskbits = ptrbits - cumbits;
uintptr_t mask = ~((ZU(1) << maskbits) - 1); uintptr_t mask = ~((ZU(1) << maskbits) - 1);
return (key & mask); return (key & mask);
} }
JEMALLOC_ALWAYS_INLINE uintptr_t JEMALLOC_ALWAYS_INLINE uintptr_t
rtree_subkey(rtree_t *rtree, uintptr_t key, unsigned level) { rtree_subkey(uintptr_t key, unsigned level) {
unsigned ptrbits = ZU(1) << (LG_SIZEOF_PTR+3); unsigned ptrbits = ZU(1) << (LG_SIZEOF_PTR+3);
unsigned cumbits = rtree->levels[level].cumbits; unsigned cumbits = rtree_levels[level].cumbits;
unsigned shiftbits = ptrbits - cumbits; unsigned shiftbits = ptrbits - cumbits;
unsigned maskbits = rtree->levels[level].bits; unsigned maskbits = rtree_levels[level].bits;
unsigned mask = (ZU(1) << maskbits) - 1; unsigned mask = (ZU(1) << maskbits) - 1;
return ((key >> shiftbits) & mask); return ((key >> shiftbits) & mask);
} }
@ -82,7 +82,7 @@ rtree_elm_lookup(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
assert(!dependent || !init_missing); assert(!dependent || !init_missing);
if (likely(key != 0)) { if (likely(key != 0)) {
uintptr_t leafkey = rtree_leafkey(rtree, key); uintptr_t leafkey = rtree_leafkey(key);
#define RTREE_CACHE_CHECK(i) do { \ #define RTREE_CACHE_CHECK(i) do { \
if (likely(rtree_ctx->cache[i].leafkey == leafkey)) { \ if (likely(rtree_ctx->cache[i].leafkey == leafkey)) { \
rtree_elm_t *leaf = rtree_ctx->cache[i].leaf; \ rtree_elm_t *leaf = rtree_ctx->cache[i].leaf; \
@ -94,8 +94,8 @@ rtree_elm_lookup(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
rtree_ctx->cache[0].leafkey = leafkey; \ rtree_ctx->cache[0].leafkey = leafkey; \
rtree_ctx->cache[0].leaf = leaf; \ rtree_ctx->cache[0].leaf = leaf; \
\ \
uintptr_t subkey = rtree_subkey(rtree, \ uintptr_t subkey = rtree_subkey(key, \
key, rtree->height-1); \ RTREE_HEIGHT-1); \
return &leaf[subkey]; \ return &leaf[subkey]; \
} \ } \
} \ } \

View File

@ -41,12 +41,10 @@ struct rtree_ctx_s {
}; };
struct rtree_s { struct rtree_s {
unsigned height;
union { union {
void *root_pun; void *root_pun;
rtree_elm_t *root; rtree_elm_t *root;
}; };
rtree_level_t levels[RTREE_HEIGHT_MAX];
malloc_mutex_t init_lock; malloc_mutex_t init_lock;
}; };

View File

@ -16,15 +16,14 @@ typedef struct rtree_ctx_cache_elm_s rtree_ctx_cache_elm_t;
typedef struct rtree_ctx_s rtree_ctx_t; typedef struct rtree_ctx_s rtree_ctx_t;
typedef struct rtree_s rtree_t; typedef struct rtree_s rtree_t;
/* /* Number of high insignificant bits. */
* RTREE_BITS_PER_LEVEL must be a power of two that is no larger than the #define RTREE_NHIB ((1U << (LG_SIZEOF_PTR+3)) - LG_VADDR)
* machine address width. /* Number of low insigificant bits. */
*/ #define RTREE_NLIB LG_PAGE
#define LG_RTREE_BITS_PER_LEVEL 4 /* Number of significant bits. */
#define RTREE_BITS_PER_LEVEL (1U << LG_RTREE_BITS_PER_LEVEL) #define RTREE_NSB (LG_VADDR - RTREE_NLIB)
/* Maximum rtree height. */ /* Number of levels in radix tree. */
#define RTREE_HEIGHT_MAX \ #define RTREE_HEIGHT (sizeof(rtree_levels)/sizeof(rtree_level_t))
((1U << (LG_SIZEOF_PTR+3)) / RTREE_BITS_PER_LEVEL)
/* /*
* Number of leafkey/leaf pairs to cache. Each entry supports an entire leaf, * Number of leafkey/leaf pairs to cache. Each entry supports an entire leaf,

View File

@ -1522,8 +1522,7 @@ extent_merge_wrapper(tsdn_t *tsdn, arena_t *arena,
bool bool
extent_boot(void) { extent_boot(void) {
if (rtree_new(&extents_rtree, (unsigned)((ZU(1) << (LG_SIZEOF_PTR+3)) - if (rtree_new(&extents_rtree)) {
LG_PAGE))) {
return true; return true;
} }

View File

@ -6,46 +6,11 @@
* used. * used.
*/ */
bool bool
rtree_new(rtree_t *rtree, unsigned bits) { rtree_new(rtree_t *rtree) {
unsigned bits_in_leaf, height, i;
assert(RTREE_HEIGHT_MAX == ((ZU(1) << (LG_SIZEOF_PTR+3)) /
RTREE_BITS_PER_LEVEL));
assert(bits > 0 && bits <= (sizeof(uintptr_t) << 3));
bits_in_leaf = (bits % RTREE_BITS_PER_LEVEL) == 0 ? RTREE_BITS_PER_LEVEL
: (bits % RTREE_BITS_PER_LEVEL);
if (bits > bits_in_leaf) {
height = 1 + (bits - bits_in_leaf) / RTREE_BITS_PER_LEVEL;
if ((height-1) * RTREE_BITS_PER_LEVEL + bits_in_leaf != bits) {
height++;
}
} else {
height = 1;
}
assert((height-1) * RTREE_BITS_PER_LEVEL + bits_in_leaf == bits);
rtree->height = height;
rtree->root_pun = NULL; rtree->root_pun = NULL;
if (malloc_mutex_init(&rtree->init_lock, "rtree", WITNESS_RANK_RTREE)) {
/* Root level. */ return true;
rtree->levels[0].bits = (height > 1) ? RTREE_BITS_PER_LEVEL :
bits_in_leaf;
rtree->levels[0].cumbits = rtree->levels[0].bits;
/* Interior levels. */
for (i = 1; i < height-1; i++) {
rtree->levels[i].bits = RTREE_BITS_PER_LEVEL;
rtree->levels[i].cumbits = rtree->levels[i-1].cumbits +
RTREE_BITS_PER_LEVEL;
} }
/* Leaf level. */
if (height > 1) {
rtree->levels[height-1].bits = bits_in_leaf;
rtree->levels[height-1].cumbits = bits;
}
malloc_mutex_init(&rtree->init_lock, "rtree", WITNESS_RANK_RTREE);
return false; return false;
} }
@ -84,10 +49,10 @@ rtree_node_dalloc_t *rtree_node_dalloc = JEMALLOC_N(rtree_node_dalloc_impl);
static void static void
rtree_delete_subtree(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node, rtree_delete_subtree(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node,
unsigned level) { unsigned level) {
if (level + 1 < rtree->height) { if (level + 1 < RTREE_HEIGHT) {
size_t nchildren, i; size_t nchildren, i;
nchildren = ZU(1) << rtree->levels[level].bits; nchildren = ZU(1) << rtree_levels[level].bits;
for (i = 0; i < nchildren; i++) { for (i = 0; i < nchildren; i++) {
rtree_elm_t *child = node[i].child; rtree_elm_t *child = node[i].child;
if (child != NULL) { if (child != NULL) {
@ -116,7 +81,7 @@ rtree_node_init(tsdn_t *tsdn, rtree_t *rtree, unsigned level,
node = atomic_read_p((void**)elmp); node = atomic_read_p((void**)elmp);
if (node == NULL) { if (node == NULL) {
node = rtree_node_alloc(tsdn, rtree, ZU(1) << node = rtree_node_alloc(tsdn, rtree, ZU(1) <<
rtree->levels[level].bits); rtree_levels[level].bits);
if (node == NULL) { if (node == NULL) {
malloc_mutex_unlock(tsdn, &rtree->init_lock); malloc_mutex_unlock(tsdn, &rtree->init_lock);
return NULL; return NULL;
@ -186,24 +151,18 @@ rtree_elm_lookup_hard(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
rtree_elm_t *node = init_missing ? rtree_subtree_read(tsdn, rtree, rtree_elm_t *node = init_missing ? rtree_subtree_read(tsdn, rtree,
dependent) : rtree_subtree_tryread(rtree, dependent); dependent) : rtree_subtree_tryread(rtree, dependent);
#define RTREE_GET_BIAS (RTREE_HEIGHT_MAX - rtree->height) #define RTREE_GET_SUBTREE(level) { \
switch (RTREE_GET_BIAS) { assert(level < RTREE_HEIGHT-1); \
#define RTREE_GET_SUBTREE(level) \
case level: { \
assert(level < (RTREE_HEIGHT_MAX-1)); \
if (!dependent && unlikely(!rtree_node_valid(node))) { \ if (!dependent && unlikely(!rtree_node_valid(node))) { \
return NULL; \ return NULL; \
} \ } \
uintptr_t subkey = rtree_subkey(rtree, key, level - \ uintptr_t subkey = rtree_subkey(key, level); \
RTREE_GET_BIAS); \
node = init_missing ? rtree_child_read(tsdn, rtree, \ node = init_missing ? rtree_child_read(tsdn, rtree, \
&node[subkey], level - RTREE_GET_BIAS, dependent) : \ &node[subkey], level, dependent) : \
rtree_child_tryread(&node[subkey], dependent); \ rtree_child_tryread(&node[subkey], dependent); \
/* Fall through. */ \
} }
#define RTREE_GET_LEAF(level) \ #define RTREE_GET_LEAF(level) { \
case level: { \ assert(level == RTREE_HEIGHT-1); \
assert(level == (RTREE_HEIGHT_MAX-1)); \
if (!dependent && unlikely(!rtree_node_valid(node))) { \ if (!dependent && unlikely(!rtree_node_valid(node))) { \
return NULL; \ return NULL; \
} \ } \
@ -218,68 +177,27 @@ rtree_elm_lookup_hard(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
sizeof(rtree_ctx_cache_elm_t) * \ sizeof(rtree_ctx_cache_elm_t) * \
(RTREE_CTX_NCACHE-1)); \ (RTREE_CTX_NCACHE-1)); \
} \ } \
uintptr_t leafkey = rtree_leafkey(rtree, key); \ uintptr_t leafkey = rtree_leafkey(key); \
rtree_ctx->cache[0].leafkey = leafkey; \ rtree_ctx->cache[0].leafkey = leafkey; \
rtree_ctx->cache[0].leaf = node; \ rtree_ctx->cache[0].leaf = node; \
} \ } \
uintptr_t subkey = rtree_subkey(rtree, key, level - \ uintptr_t subkey = rtree_subkey(key, level); \
RTREE_GET_BIAS); \
return &node[subkey]; \ return &node[subkey]; \
} }
#if RTREE_HEIGHT_MAX > 1 if (RTREE_HEIGHT > 1) {
RTREE_GET_SUBTREE(0) RTREE_GET_SUBTREE(0)
#endif }
#if RTREE_HEIGHT_MAX > 2 if (RTREE_HEIGHT > 2) {
RTREE_GET_SUBTREE(1) RTREE_GET_SUBTREE(1)
#endif }
#if RTREE_HEIGHT_MAX > 3 if (RTREE_HEIGHT > 3) {
RTREE_GET_SUBTREE(2) for (unsigned i = 2; i < RTREE_HEIGHT-1; i++) {
#endif RTREE_GET_SUBTREE(i)
#if RTREE_HEIGHT_MAX > 4 }
RTREE_GET_SUBTREE(3) }
#endif RTREE_GET_LEAF(RTREE_HEIGHT-1)
#if RTREE_HEIGHT_MAX > 5
RTREE_GET_SUBTREE(4)
#endif
#if RTREE_HEIGHT_MAX > 6
RTREE_GET_SUBTREE(5)
#endif
#if RTREE_HEIGHT_MAX > 7
RTREE_GET_SUBTREE(6)
#endif
#if RTREE_HEIGHT_MAX > 8
RTREE_GET_SUBTREE(7)
#endif
#if RTREE_HEIGHT_MAX > 9
RTREE_GET_SUBTREE(8)
#endif
#if RTREE_HEIGHT_MAX > 10
RTREE_GET_SUBTREE(9)
#endif
#if RTREE_HEIGHT_MAX > 11
RTREE_GET_SUBTREE(10)
#endif
#if RTREE_HEIGHT_MAX > 12
RTREE_GET_SUBTREE(11)
#endif
#if RTREE_HEIGHT_MAX > 13
RTREE_GET_SUBTREE(12)
#endif
#if RTREE_HEIGHT_MAX > 14
RTREE_GET_SUBTREE(13)
#endif
#if RTREE_HEIGHT_MAX > 15
RTREE_GET_SUBTREE(14)
#endif
#if RTREE_HEIGHT_MAX > 16
# error Unsupported RTREE_HEIGHT_MAX
#endif
RTREE_GET_LEAF(RTREE_HEIGHT_MAX-1)
#undef RTREE_GET_SUBTREE #undef RTREE_GET_SUBTREE
#undef RTREE_GET_LEAF #undef RTREE_GET_LEAF
default: not_reached();
}
#undef RTREE_GET_BIAS
not_reached(); not_reached();
} }

View File

@ -33,31 +33,26 @@ rtree_node_dalloc_intercept(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node) {
TEST_BEGIN(test_rtree_read_empty) { TEST_BEGIN(test_rtree_read_empty) {
tsdn_t *tsdn; tsdn_t *tsdn;
unsigned i;
tsdn = tsdn_fetch(); tsdn = tsdn_fetch();
for (i = 1; i <= (sizeof(uintptr_t) << 3); i++) {
rtree_t rtree; rtree_t rtree;
rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER; rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER;
test_rtree = &rtree; test_rtree = &rtree;
assert_false(rtree_new(&rtree, i), assert_false(rtree_new(&rtree), "Unexpected rtree_new() failure");
"Unexpected rtree_new() failure");
assert_ptr_null(rtree_read(tsdn, &rtree, &rtree_ctx, 0, false), assert_ptr_null(rtree_read(tsdn, &rtree, &rtree_ctx, 0, false),
"rtree_read() should return NULL for empty tree"); "rtree_read() should return NULL for empty tree");
rtree_delete(tsdn, &rtree); rtree_delete(tsdn, &rtree);
test_rtree = NULL; test_rtree = NULL;
} }
}
TEST_END TEST_END
#define NTHREADS 8 #define NTHREADS 8
#define MAX_NBITS 18 #define MAX_NBITS 30
#define NITERS 1000 #define NITERS 1000
#define SEED 42 #define SEED 42
typedef struct { typedef struct {
unsigned nbits;
rtree_t rtree; rtree_t rtree;
uint32_t seed; uint32_t seed;
} thd_start_arg_t; } thd_start_arg_t;
@ -77,7 +72,8 @@ thd_start(void *varg) {
tsdn = tsdn_fetch(); tsdn = tsdn_fetch();
for (i = 0; i < NITERS; i++) { for (i = 0; i < NITERS; i++) {
uintptr_t key = (uintptr_t)gen_rand64(sfmt); uintptr_t key = (uintptr_t)(gen_rand64(sfmt) & ((ZU(1) <<
MAX_NBITS) - ZU(1)));
if (i % 2 == 0) { if (i % 2 == 0) {
rtree_elm_t *elm; rtree_elm_t *elm;
@ -110,165 +106,134 @@ TEST_BEGIN(test_rtree_concurrent) {
thd_t thds[NTHREADS]; thd_t thds[NTHREADS];
sfmt_t *sfmt; sfmt_t *sfmt;
tsdn_t *tsdn; tsdn_t *tsdn;
unsigned i, j;
sfmt = init_gen_rand(SEED); sfmt = init_gen_rand(SEED);
tsdn = tsdn_fetch(); tsdn = tsdn_fetch();
for (i = 1; i < MAX_NBITS; i++) {
arg.nbits = i;
test_rtree = &arg.rtree; test_rtree = &arg.rtree;
assert_false(rtree_new(&arg.rtree, arg.nbits), assert_false(rtree_new(&arg.rtree), "Unexpected rtree_new() failure");
"Unexpected rtree_new() failure");
arg.seed = gen_rand32(sfmt); arg.seed = gen_rand32(sfmt);
for (j = 0; j < NTHREADS; j++) { for (unsigned i = 0; i < NTHREADS; i++) {
thd_create(&thds[j], thd_start, (void *)&arg); thd_create(&thds[i], thd_start, (void *)&arg);
} }
for (j = 0; j < NTHREADS; j++) { for (unsigned i = 0; i < NTHREADS; i++) {
thd_join(thds[j], NULL); thd_join(thds[i], NULL);
} }
rtree_delete(tsdn, &arg.rtree); rtree_delete(tsdn, &arg.rtree);
test_rtree = NULL; test_rtree = NULL;
}
fini_gen_rand(sfmt); fini_gen_rand(sfmt);
} }
TEST_END TEST_END
#undef NTHREADS #undef NTHREADS
#undef MAX_NBITS
#undef NITERS #undef NITERS
#undef SEED #undef SEED
TEST_BEGIN(test_rtree_extrema) { TEST_BEGIN(test_rtree_extrema) {
unsigned i;
extent_t extent_a, extent_b; extent_t extent_a, extent_b;
tsdn_t *tsdn; tsdn_t *tsdn;
tsdn = tsdn_fetch(); tsdn = tsdn_fetch();
for (i = 1; i <= (sizeof(uintptr_t) << 3); i++) {
rtree_t rtree; rtree_t rtree;
rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER; rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER;
test_rtree = &rtree; test_rtree = &rtree;
assert_false(rtree_new(&rtree, i), assert_false(rtree_new(&rtree), "Unexpected rtree_new() failure");
"Unexpected rtree_new() failure");
assert_false(rtree_write(tsdn, &rtree, &rtree_ctx, 0, assert_false(rtree_write(tsdn, &rtree, &rtree_ctx, 0, &extent_a),
&extent_a), "Unexpected rtree_write() failure, i=%u", i); "Unexpected rtree_write() failure");
assert_ptr_eq(rtree_read(tsdn, &rtree, &rtree_ctx, 0, true), assert_ptr_eq(rtree_read(tsdn, &rtree, &rtree_ctx, 0, true), &extent_a,
&extent_a, "rtree_read() should return previously set value");
"rtree_read() should return previously set value, i=%u", i);
assert_false(rtree_write(tsdn, &rtree, &rtree_ctx, assert_false(rtree_write(tsdn, &rtree, &rtree_ctx, ~((uintptr_t)0),
~((uintptr_t)0), &extent_b), &extent_b), "Unexpected rtree_write() failure");
"Unexpected rtree_write() failure, i=%u", i); assert_ptr_eq(rtree_read(tsdn, &rtree, &rtree_ctx, ~((uintptr_t)0),
assert_ptr_eq(rtree_read(tsdn, &rtree, &rtree_ctx, true), &extent_b,
~((uintptr_t)0), true), &extent_b, "rtree_read() should return previously set value");
"rtree_read() should return previously set value, i=%u", i);
rtree_delete(tsdn, &rtree); rtree_delete(tsdn, &rtree);
test_rtree = NULL; test_rtree = NULL;
} }
}
TEST_END TEST_END
TEST_BEGIN(test_rtree_bits) { TEST_BEGIN(test_rtree_bits) {
tsdn_t *tsdn; tsdn_t *tsdn = tsdn_fetch();
unsigned i, j, k;
tsdn = tsdn_fetch(); uintptr_t keys[] = {0, 1, (((uintptr_t)1) << LG_PAGE) - 1};
for (i = 1; i < (sizeof(uintptr_t) << 3); i++) {
uintptr_t keys[] = {0, 1,
(((uintptr_t)1) << (sizeof(uintptr_t)*8-i)) - 1};
extent_t extent; extent_t extent;
rtree_t rtree; rtree_t rtree;
rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER; rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER;
test_rtree = &rtree; test_rtree = &rtree;
assert_false(rtree_new(&rtree, i), assert_false(rtree_new(&rtree),
"Unexpected rtree_new() failure"); "Unexpected rtree_new() failure");
for (j = 0; j < sizeof(keys)/sizeof(uintptr_t); j++) { for (unsigned i = 0; i < sizeof(keys)/sizeof(uintptr_t); i++) {
assert_false(rtree_write(tsdn, &rtree, &rtree_ctx, assert_false(rtree_write(tsdn, &rtree, &rtree_ctx, keys[i],
keys[j], &extent), &extent), "Unexpected rtree_write() failure");
"Unexpected rtree_write() failure"); for (unsigned j = 0; j < sizeof(keys)/sizeof(uintptr_t); j++) {
for (k = 0; k < sizeof(keys)/sizeof(uintptr_t); k++) { assert_ptr_eq(rtree_read(tsdn, &rtree, &rtree_ctx,
assert_ptr_eq(rtree_read(tsdn, &rtree, keys[j], true), &extent,
&rtree_ctx, keys[k], true), &extent,
"rtree_read() should return previously set " "rtree_read() should return previously set "
"value and ignore insignificant key bits; " "value and ignore insignificant key bits; "
"i=%u, j=%u, k=%u, set key=%#"FMTxPTR", " "i=%u, j=%u, set key=%#"FMTxPTR", get "
"get key=%#"FMTxPTR, i, j, k, keys[j], "key=%#"FMTxPTR, i, j, keys[i], keys[j]);
keys[k]);
} }
assert_ptr_null(rtree_read(tsdn, &rtree, &rtree_ctx, assert_ptr_null(rtree_read(tsdn, &rtree, &rtree_ctx,
(((uintptr_t)1) << (sizeof(uintptr_t)*8-i)), false), (((uintptr_t)1) << LG_PAGE), false),
"Only leftmost rtree leaf should be set; " "Only leftmost rtree leaf should be set; i=%u", i);
"i=%u, j=%u", i, j); rtree_clear(tsdn, &rtree, &rtree_ctx, keys[i]);
rtree_clear(tsdn, &rtree, &rtree_ctx, keys[j]);
} }
rtree_delete(tsdn, &rtree); rtree_delete(tsdn, &rtree);
test_rtree = NULL; test_rtree = NULL;
} }
}
TEST_END TEST_END
TEST_BEGIN(test_rtree_random) { TEST_BEGIN(test_rtree_random) {
unsigned i;
sfmt_t *sfmt;
tsdn_t *tsdn;
#define NSET 16 #define NSET 16
#define SEED 42 #define SEED 42
sfmt_t *sfmt = init_gen_rand(SEED);
sfmt = init_gen_rand(SEED); tsdn_t *tsdn = tsdn_fetch();
tsdn = tsdn_fetch();
for (i = 1; i <= (sizeof(uintptr_t) << 3); i++) {
uintptr_t keys[NSET]; uintptr_t keys[NSET];
extent_t extent; extent_t extent;
unsigned j;
rtree_t rtree; rtree_t rtree;
rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER; rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER;
rtree_elm_t *elm; rtree_elm_t *elm;
test_rtree = &rtree; test_rtree = &rtree;
assert_false(rtree_new(&rtree, i), assert_false(rtree_new(&rtree), "Unexpected rtree_new() failure");
"Unexpected rtree_new() failure");
for (j = 0; j < NSET; j++) { for (unsigned i = 0; i < NSET; i++) {
keys[j] = (uintptr_t)gen_rand64(sfmt); keys[i] = (uintptr_t)gen_rand64(sfmt);
elm = rtree_elm_acquire(tsdn, &rtree, &rtree_ctx, elm = rtree_elm_acquire(tsdn, &rtree, &rtree_ctx, keys[i],
keys[j], false, true); false, true);
assert_ptr_not_null(elm, assert_ptr_not_null(elm,
"Unexpected rtree_elm_acquire() failure"); "Unexpected rtree_elm_acquire() failure");
rtree_elm_write_acquired(tsdn, &rtree, elm, &extent); rtree_elm_write_acquired(tsdn, &rtree, elm, &extent);
rtree_elm_release(tsdn, &rtree, elm); rtree_elm_release(tsdn, &rtree, elm);
assert_ptr_eq(rtree_read(tsdn, &rtree, &rtree_ctx, assert_ptr_eq(rtree_read(tsdn, &rtree, &rtree_ctx, keys[i],
keys[j], true), &extent, true), &extent,
"rtree_read() should return previously set value"); "rtree_read() should return previously set value");
} }
for (j = 0; j < NSET; j++) { for (unsigned i = 0; i < NSET; i++) {
assert_ptr_eq(rtree_read(tsdn, &rtree, &rtree_ctx, assert_ptr_eq(rtree_read(tsdn, &rtree, &rtree_ctx, keys[i],
keys[j], true), &extent, true), &extent,
"rtree_read() should return previously set value, " "rtree_read() should return previously set value, i=%u", i);
"j=%u", j);
} }
for (j = 0; j < NSET; j++) { for (unsigned i = 0; i < NSET; i++) {
rtree_clear(tsdn, &rtree, &rtree_ctx, keys[j]); rtree_clear(tsdn, &rtree, &rtree_ctx, keys[i]);
assert_ptr_null(rtree_read(tsdn, &rtree, &rtree_ctx, assert_ptr_null(rtree_read(tsdn, &rtree, &rtree_ctx, keys[i],
keys[j], true), true), "rtree_read() should return previously set value");
"rtree_read() should return previously set value");
} }
for (j = 0; j < NSET; j++) { for (unsigned i = 0; i < NSET; i++) {
assert_ptr_null(rtree_read(tsdn, &rtree, &rtree_ctx, assert_ptr_null(rtree_read(tsdn, &rtree, &rtree_ctx, keys[i],
keys[j], true), true), "rtree_read() should return previously set value");
"rtree_read() should return previously set value");
} }
rtree_delete(tsdn, &rtree); rtree_delete(tsdn, &rtree);
test_rtree = NULL; test_rtree = NULL;
}
fini_gen_rand(sfmt); fini_gen_rand(sfmt);
#undef NSET #undef NSET
#undef SEED #undef SEED