From 2d2b4e98c947f9fcaf4a9fd2215b685057e89212 Mon Sep 17 00:00:00 2001 From: Jason Evans Date: Mon, 28 Mar 2016 03:06:35 -0700 Subject: [PATCH] Add element acquire/release capabilities to rtree. This makes it possible to acquire short-term "ownership" of rtree elements so that it is possible to read an extent pointer *and* read the extent's contents with a guarantee that the element will not be modified until the ownership is released. This is intended as a mechanism for resolving rtree read/write races rather than as a way to lock extents. --- include/jemalloc/internal/chunk.h | 2 +- include/jemalloc/internal/private_symbols.txt | 14 +- include/jemalloc/internal/rtree.h | 233 ++++++++++++------ src/chunk.c | 12 +- src/rtree.c | 23 +- test/unit/rtree.c | 153 +++++++++--- 6 files changed, 302 insertions(+), 135 deletions(-) diff --git a/include/jemalloc/internal/chunk.h b/include/jemalloc/internal/chunk.h index 4666a649..9e5502ac 100644 --- a/include/jemalloc/internal/chunk.h +++ b/include/jemalloc/internal/chunk.h @@ -87,7 +87,7 @@ JEMALLOC_INLINE extent_t * chunk_lookup(const void *ptr, bool dependent) { - return (rtree_get(&chunks_rtree, (uintptr_t)ptr, dependent)); + return (rtree_read(&chunks_rtree, (uintptr_t)ptr, dependent)); } #endif diff --git a/include/jemalloc/internal/private_symbols.txt b/include/jemalloc/internal/private_symbols.txt index 61b29b9d..478bc2ab 100644 --- a/include/jemalloc/internal/private_symbols.txt +++ b/include/jemalloc/internal/private_symbols.txt @@ -457,18 +457,24 @@ register_zone rtree_child_read rtree_child_read_hard rtree_child_tryread +rtree_clear rtree_delete -rtree_get rtree_new rtree_node_valid -rtree_set +rtree_elm_acquire +rtree_elm_lookup +rtree_elm_read +rtree_elm_read_acquired +rtree_elm_release +rtree_elm_write +rtree_elm_write_acquired +rtree_read rtree_start_level rtree_subkey rtree_subtree_read rtree_subtree_read_hard rtree_subtree_tryread -rtree_val_read -rtree_val_write +rtree_write run_quantize_ceil run_quantize_floor s2u diff --git a/include/jemalloc/internal/rtree.h b/include/jemalloc/internal/rtree.h index 45e49b74..59a7ab3c 100644 --- a/include/jemalloc/internal/rtree.h +++ b/include/jemalloc/internal/rtree.h @@ -6,7 +6,7 @@ */ #ifdef JEMALLOC_H_TYPES -typedef struct rtree_node_elm_s rtree_node_elm_t; +typedef struct rtree_elm_s rtree_elm_t; typedef struct rtree_level_s rtree_level_t; typedef struct rtree_s rtree_t; @@ -21,25 +21,24 @@ typedef struct rtree_s rtree_t; ((1U << (LG_SIZEOF_PTR+3)) / RTREE_BITS_PER_LEVEL) /* Used for two-stage lock-free node initialization. */ -#define RTREE_NODE_INITIALIZING ((rtree_node_elm_t *)0x1) +#define RTREE_NODE_INITIALIZING ((rtree_elm_t *)0x1) /* * The node allocation callback function's argument is the number of contiguous - * rtree_node_elm_t structures to allocate, and the resulting memory must be - * zeroed. + * rtree_elm_t structures to allocate, and the resulting memory must be zeroed. */ -typedef rtree_node_elm_t *(rtree_node_alloc_t)(size_t); -typedef void (rtree_node_dalloc_t)(rtree_node_elm_t *); +typedef rtree_elm_t *(rtree_node_alloc_t)(size_t); +typedef void (rtree_node_dalloc_t)(rtree_elm_t *); #endif /* JEMALLOC_H_TYPES */ /******************************************************************************/ #ifdef JEMALLOC_H_STRUCTS -struct rtree_node_elm_s { +struct rtree_elm_s { union { - void *pun; - rtree_node_elm_t *child; - extent_t *val; + void *pun; + rtree_elm_t *child; + extent_t *extent; }; }; @@ -60,15 +59,15 @@ struct rtree_level_s { * * levels[1] : [ | 0x00000001**** | 0x00000002**** | ... ] * - * levels[2] : [val(0x000000000000) | val(0x000000000001) | ...] + * levels[2] : [extent(0x000000000000) | extent(0x000000000001) | ...] * * This has practical implications on x64, which currently uses only the * lower 47 bits of virtual address space in userland, thus leaving * subtrees[0] unused and avoiding a level of tree traversal. */ union { - void *subtree_pun; - rtree_node_elm_t *subtree; + void *subtree_pun; + rtree_elm_t *subtree; }; /* Number of key bits distinguished by this level. */ unsigned bits; @@ -98,10 +97,9 @@ struct rtree_s { bool rtree_new(rtree_t *rtree, unsigned bits, rtree_node_alloc_t *alloc, rtree_node_dalloc_t *dalloc); void rtree_delete(rtree_t *rtree); -rtree_node_elm_t *rtree_subtree_read_hard(rtree_t *rtree, +rtree_elm_t *rtree_subtree_read_hard(rtree_t *rtree, unsigned level); +rtree_elm_t *rtree_child_read_hard(rtree_t *rtree, rtree_elm_t *elm, unsigned level); -rtree_node_elm_t *rtree_child_read_hard(rtree_t *rtree, - rtree_node_elm_t *elm, unsigned level); #endif /* JEMALLOC_H_EXTERNS */ /******************************************************************************/ @@ -111,22 +109,27 @@ rtree_node_elm_t *rtree_child_read_hard(rtree_t *rtree, unsigned rtree_start_level(rtree_t *rtree, uintptr_t key); uintptr_t rtree_subkey(rtree_t *rtree, uintptr_t key, unsigned level); -bool rtree_node_valid(rtree_node_elm_t *node); -rtree_node_elm_t *rtree_child_tryread(rtree_node_elm_t *elm, - bool dependent); -rtree_node_elm_t *rtree_child_read(rtree_t *rtree, rtree_node_elm_t *elm, +bool rtree_node_valid(rtree_elm_t *node); +rtree_elm_t *rtree_child_tryread(rtree_elm_t *elm, bool dependent); +rtree_elm_t *rtree_child_read(rtree_t *rtree, rtree_elm_t *elm, unsigned level, bool dependent); -extent_t *rtree_val_read(rtree_t *rtree, rtree_node_elm_t *elm, +extent_t *rtree_elm_read(rtree_elm_t *elm, bool dependent); +void rtree_elm_write(rtree_elm_t *elm, const extent_t *extent); +rtree_elm_t *rtree_subtree_tryread(rtree_t *rtree, unsigned level, bool dependent); -void rtree_val_write(rtree_t *rtree, rtree_node_elm_t *elm, - const extent_t *val); -rtree_node_elm_t *rtree_subtree_tryread(rtree_t *rtree, unsigned level, - bool dependent); -rtree_node_elm_t *rtree_subtree_read(rtree_t *rtree, unsigned level, +rtree_elm_t *rtree_subtree_read(rtree_t *rtree, unsigned level, bool dependent); +rtree_elm_t *rtree_elm_lookup(rtree_t *rtree, uintptr_t key, + bool dependent, bool init_missing); -extent_t *rtree_get(rtree_t *rtree, uintptr_t key, bool dependent); -bool rtree_set(rtree_t *rtree, uintptr_t key, const extent_t *val); +bool rtree_write(rtree_t *rtree, uintptr_t key, const extent_t *extent); +extent_t *rtree_read(rtree_t *rtree, uintptr_t key, bool dependent); +rtree_elm_t *rtree_elm_acquire(rtree_t *rtree, uintptr_t key, + bool dependent, bool init_missing); +extent_t *rtree_elm_read_acquired(rtree_elm_t *elm); +void rtree_elm_write_acquired(rtree_elm_t *elm, const extent_t *extent); +void rtree_elm_release(rtree_elm_t *elm); +void rtree_clear(rtree_t *rtree, uintptr_t key); #endif #if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_RTREE_C_)) @@ -154,18 +157,18 @@ rtree_subkey(rtree_t *rtree, uintptr_t key, unsigned level) } JEMALLOC_ALWAYS_INLINE bool -rtree_node_valid(rtree_node_elm_t *node) +rtree_node_valid(rtree_elm_t *node) { return ((uintptr_t)node > (uintptr_t)RTREE_NODE_INITIALIZING); } -JEMALLOC_ALWAYS_INLINE rtree_node_elm_t * -rtree_child_tryread(rtree_node_elm_t *elm, bool dependent) +JEMALLOC_ALWAYS_INLINE rtree_elm_t * +rtree_child_tryread(rtree_elm_t *elm, bool dependent) { - rtree_node_elm_t *child; + rtree_elm_t *child; - /* Double-checked read (first read may be stale. */ + /* Double-checked read (first read may be stale). */ child = elm->child; if (!dependent && !rtree_node_valid(child)) child = atomic_read_p(&elm->pun); @@ -173,11 +176,11 @@ rtree_child_tryread(rtree_node_elm_t *elm, bool dependent) return (child); } -JEMALLOC_ALWAYS_INLINE rtree_node_elm_t * -rtree_child_read(rtree_t *rtree, rtree_node_elm_t *elm, unsigned level, +JEMALLOC_ALWAYS_INLINE rtree_elm_t * +rtree_child_read(rtree_t *rtree, rtree_elm_t *elm, unsigned level, bool dependent) { - rtree_node_elm_t *child; + rtree_elm_t *child; child = rtree_child_tryread(elm, dependent); if (!dependent && unlikely(!rtree_node_valid(child))) @@ -187,40 +190,46 @@ rtree_child_read(rtree_t *rtree, rtree_node_elm_t *elm, unsigned level, } JEMALLOC_ALWAYS_INLINE extent_t * -rtree_val_read(rtree_t *rtree, rtree_node_elm_t *elm, bool dependent) +rtree_elm_read(rtree_elm_t *elm, bool dependent) { + extent_t *extent; if (dependent) { /* - * Reading a val on behalf of a pointer to a valid allocation is - * guaranteed to be a clean read even without synchronization, - * because the rtree update became visible in memory before the - * pointer came into existence. + * Reading a value on behalf of a pointer to a valid allocation + * is guaranteed to be a clean read even without + * synchronization, because the rtree update became visible in + * memory before the pointer came into existence. */ - return (elm->val); + extent = elm->extent; } else { /* * An arbitrary read, e.g. on behalf of ivsalloc(), may not be * dependent on a previous rtree write, which means a stale read * could result if synchronization were omitted here. */ - return (atomic_read_p(&elm->pun)); + extent = (extent_t *)atomic_read_p(&elm->pun); } + + /* Mask the lock bit. */ + extent = (extent_t *)((uintptr_t)extent & ~((uintptr_t)0x1)); + + return (extent); } JEMALLOC_INLINE void -rtree_val_write(rtree_t *rtree, rtree_node_elm_t *elm, const extent_t *val) +rtree_elm_write(rtree_elm_t *elm, const extent_t *extent) { - atomic_write_p(&elm->pun, val); + atomic_write_p(&elm->pun, extent); } -JEMALLOC_ALWAYS_INLINE rtree_node_elm_t * +JEMALLOC_ALWAYS_INLINE rtree_elm_t * rtree_subtree_tryread(rtree_t *rtree, unsigned level, bool dependent) { - rtree_node_elm_t *subtree; + rtree_elm_t *subtree; - /* Double-checked read (first read may be stale. */ + /* Double-checked read (first read may be stale). */ subtree = rtree->levels[level].subtree; if (!dependent && unlikely(!rtree_node_valid(subtree))) subtree = atomic_read_p(&rtree->levels[level].subtree_pun); @@ -228,10 +237,10 @@ rtree_subtree_tryread(rtree_t *rtree, unsigned level, bool dependent) return (subtree); } -JEMALLOC_ALWAYS_INLINE rtree_node_elm_t * +JEMALLOC_ALWAYS_INLINE rtree_elm_t * rtree_subtree_read(rtree_t *rtree, unsigned level, bool dependent) { - rtree_node_elm_t *subtree; + rtree_elm_t *subtree; subtree = rtree_subtree_tryread(rtree, level, dependent); if (!dependent && unlikely(!rtree_node_valid(subtree))) @@ -240,16 +249,20 @@ rtree_subtree_read(rtree_t *rtree, unsigned level, bool dependent) return (subtree); } -JEMALLOC_ALWAYS_INLINE extent_t * -rtree_get(rtree_t *rtree, uintptr_t key, bool dependent) +JEMALLOC_ALWAYS_INLINE rtree_elm_t * +rtree_elm_lookup(rtree_t *rtree, uintptr_t key, bool dependent, + bool init_missing) { uintptr_t subkey; unsigned start_level; - rtree_node_elm_t *node; + rtree_elm_t *node; + + assert(!dependent || !init_missing); start_level = rtree_start_level(rtree, key); - node = rtree_subtree_tryread(rtree, start_level, dependent); + node = init_missing ? rtree_subtree_read(rtree, start_level, dependent) + : rtree_subtree_tryread(rtree, start_level, dependent); #define RTREE_GET_BIAS (RTREE_HEIGHT_MAX - rtree->height) switch (start_level + RTREE_GET_BIAS) { #define RTREE_GET_SUBTREE(level) \ @@ -259,7 +272,9 @@ rtree_get(rtree_t *rtree, uintptr_t key, bool dependent) return (NULL); \ subkey = rtree_subkey(rtree, key, level - \ RTREE_GET_BIAS); \ - node = rtree_child_tryread(&node[subkey], dependent); \ + node = init_missing ? rtree_child_read(rtree, \ + &node[subkey], level - RTREE_GET_BIAS, dependent) : \ + rtree_child_tryread(&node[subkey], dependent); \ /* Fall through. */ #define RTREE_GET_LEAF(level) \ case level: \ @@ -272,8 +287,7 @@ rtree_get(rtree_t *rtree, uintptr_t key, bool dependent) * node is a leaf, so it contains values rather than \ * child pointers. \ */ \ - return (rtree_val_read(rtree, &node[subkey], \ - dependent)); + return (&node[subkey]); #if RTREE_HEIGHT_MAX > 1 RTREE_GET_SUBTREE(0) #endif @@ -332,33 +346,94 @@ rtree_get(rtree_t *rtree, uintptr_t key, bool dependent) } JEMALLOC_INLINE bool -rtree_set(rtree_t *rtree, uintptr_t key, const extent_t *val) +rtree_write(rtree_t *rtree, uintptr_t key, const extent_t *extent) { - uintptr_t subkey; - unsigned i, start_level; - rtree_node_elm_t *node, *child; + rtree_elm_t *elm; - start_level = rtree_start_level(rtree, key); + assert(extent != NULL); /* Use rtree_clear() for this case. */ + assert(((uintptr_t)extent & (uintptr_t)0x1) == (uintptr_t)0x0); - node = rtree_subtree_read(rtree, start_level, false); - if (node == NULL) + elm = rtree_elm_lookup(rtree, key, false, true); + if (elm == NULL) return (true); - for (i = start_level; /**/; i++, node = child) { - subkey = rtree_subkey(rtree, key, i); - if (i == rtree->height - 1) { - /* - * node is a leaf, so it contains values rather than - * child pointers. - */ - rtree_val_write(rtree, &node[subkey], val); - return (false); - } - assert(i + 1 < rtree->height); - child = rtree_child_read(rtree, &node[subkey], i, false); - if (child == NULL) - return (true); + assert(rtree_elm_read(elm, false) == NULL); + rtree_elm_write(elm, extent); + + return (false); +} + +JEMALLOC_ALWAYS_INLINE extent_t * +rtree_read(rtree_t *rtree, uintptr_t key, bool dependent) +{ + rtree_elm_t *elm; + + elm = rtree_elm_lookup(rtree, key, dependent, false); + if (elm == NULL) + return (NULL); + + return (rtree_elm_read(elm, dependent)); +} + +JEMALLOC_INLINE rtree_elm_t * +rtree_elm_acquire(rtree_t *rtree, uintptr_t key, bool dependent, + bool init_missing) +{ + rtree_elm_t *elm; + + elm = rtree_elm_lookup(rtree, key, dependent, init_missing); + if (!dependent && elm == NULL) + return (NULL); + { + extent_t *extent; + void *s; + + do { + extent = rtree_elm_read(elm, false); + /* The least significant bit serves as a lock. */ + s = (void *)((uintptr_t)extent | (uintptr_t)0x1); + } while (atomic_cas_p(&elm->pun, (void *)extent, s)); } - not_reached(); + + return (elm); +} + +JEMALLOC_INLINE extent_t * +rtree_elm_read_acquired(rtree_elm_t *elm) +{ + extent_t *extent; + + assert(((uintptr_t)elm->pun & (uintptr_t)0x1) == (uintptr_t)0x1); + extent = (extent_t *)((uintptr_t)elm->pun & ~((uintptr_t)0x1)); + assert(((uintptr_t)extent & (uintptr_t)0x1) == (uintptr_t)0x0); + + return (extent); +} + +JEMALLOC_INLINE void +rtree_elm_write_acquired(rtree_elm_t *elm, const extent_t *extent) +{ + + assert(((uintptr_t)extent & (uintptr_t)0x1) == (uintptr_t)0x0); + assert(((uintptr_t)elm->pun & (uintptr_t)0x1) == (uintptr_t)0x1); + elm->pun = (void *)((uintptr_t)extent | (uintptr_t)0x1); + assert(rtree_elm_read_acquired(elm) == extent); +} + +JEMALLOC_INLINE void +rtree_elm_release(rtree_elm_t *elm) +{ + + rtree_elm_write(elm, rtree_elm_read_acquired(elm)); +} + +JEMALLOC_INLINE void +rtree_clear(rtree_t *rtree, uintptr_t key) +{ + rtree_elm_t *elm; + + elm = rtree_elm_acquire(rtree, key, true, false); + rtree_elm_write_acquired(elm, NULL); + rtree_elm_release(elm); } #endif diff --git a/src/chunk.c b/src/chunk.c index d3a600a5..31b86456 100644 --- a/src/chunk.c +++ b/src/chunk.c @@ -146,7 +146,7 @@ chunk_register(tsdn_t *tsdn, const void *chunk, const extent_t *extent) assert(extent_addr_get(extent) == chunk); - if (rtree_set(&chunks_rtree, (uintptr_t)chunk, extent)) + if (rtree_write(&chunks_rtree, (uintptr_t)chunk, extent)) return (true); if (config_prof && opt_prof) { size_t size = extent_size_get(extent); @@ -170,10 +170,8 @@ chunk_register(tsdn_t *tsdn, const void *chunk, const extent_t *extent) void chunk_deregister(const void *chunk, const extent_t *extent) { - bool err; - err = rtree_set(&chunks_rtree, (uintptr_t)chunk, NULL); - assert(!err); + rtree_clear(&chunks_rtree, (uintptr_t)chunk); if (config_prof && opt_prof) { size_t size = extent_size_get(extent); size_t nsub = (size == 0) ? 1 : size / chunksize; @@ -684,12 +682,12 @@ chunk_merge_default(void *chunk_a, size_t size_a, void *chunk_b, size_t size_b, return (false); } -static rtree_node_elm_t * +static rtree_elm_t * chunks_rtree_node_alloc(size_t nelms) { - return ((rtree_node_elm_t *)base_alloc(tsdn_fetch(), nelms * - sizeof(rtree_node_elm_t))); + return ((rtree_elm_t *)base_alloc(tsdn_fetch(), nelms * + sizeof(rtree_elm_t))); } bool diff --git a/src/rtree.c b/src/rtree.c index 3166b45f..71c69c41 100644 --- a/src/rtree.c +++ b/src/rtree.c @@ -8,7 +8,10 @@ hmin(unsigned ha, unsigned hb) return (ha < hb ? ha : hb); } -/* Only the most significant bits of keys passed to rtree_[gs]et() are used. */ +/* + * Only the most significant bits of keys passed to rtree_{read,write}() are + * used. + */ bool rtree_new(rtree_t *rtree, unsigned bits, rtree_node_alloc_t *alloc, rtree_node_dalloc_t *dalloc) @@ -62,7 +65,7 @@ rtree_new(rtree_t *rtree, unsigned bits, rtree_node_alloc_t *alloc, } static void -rtree_delete_subtree(rtree_t *rtree, rtree_node_elm_t *node, unsigned level) +rtree_delete_subtree(rtree_t *rtree, rtree_elm_t *node, unsigned level) { if (level + 1 < rtree->height) { @@ -70,7 +73,7 @@ rtree_delete_subtree(rtree_t *rtree, rtree_node_elm_t *node, unsigned level) nchildren = ZU(1) << rtree->levels[level].bits; for (i = 0; i < nchildren; i++) { - rtree_node_elm_t *child = node[i].child; + rtree_elm_t *child = node[i].child; if (child != NULL) rtree_delete_subtree(rtree, child, level + 1); } @@ -84,16 +87,16 @@ rtree_delete(rtree_t *rtree) unsigned i; for (i = 0; i < rtree->height; i++) { - rtree_node_elm_t *subtree = rtree->levels[i].subtree; + rtree_elm_t *subtree = rtree->levels[i].subtree; if (subtree != NULL) rtree_delete_subtree(rtree, subtree, i); } } -static rtree_node_elm_t * -rtree_node_init(rtree_t *rtree, unsigned level, rtree_node_elm_t **elmp) +static rtree_elm_t * +rtree_node_init(rtree_t *rtree, unsigned level, rtree_elm_t **elmp) { - rtree_node_elm_t *node; + rtree_elm_t *node; if (atomic_cas_p((void **)elmp, NULL, RTREE_NODE_INITIALIZING)) { /* @@ -114,15 +117,15 @@ rtree_node_init(rtree_t *rtree, unsigned level, rtree_node_elm_t **elmp) return (node); } -rtree_node_elm_t * +rtree_elm_t * rtree_subtree_read_hard(rtree_t *rtree, unsigned level) { return (rtree_node_init(rtree, level, &rtree->levels[level].subtree)); } -rtree_node_elm_t * -rtree_child_read_hard(rtree_t *rtree, rtree_node_elm_t *elm, unsigned level) +rtree_elm_t * +rtree_child_read_hard(rtree_t *rtree, rtree_elm_t *elm, unsigned level) { return (rtree_node_init(rtree, level, &elm->child)); diff --git a/test/unit/rtree.c b/test/unit/rtree.c index 30b1c541..671e2c8a 100644 --- a/test/unit/rtree.c +++ b/test/unit/rtree.c @@ -1,20 +1,24 @@ #include "test/jemalloc_test.h" -static rtree_node_elm_t * +static rtree_elm_t * node_alloc(size_t nelms) { + rtree_elm_t *node; - return ((rtree_node_elm_t *)calloc(nelms, sizeof(rtree_node_elm_t))); + node = (rtree_elm_t *)calloc(nelms, sizeof(rtree_elm_t)); + assert_ptr_not_null(node, "Unexpected calloc() failure"); + + return (node); } static void -node_dalloc(rtree_node_elm_t *node) +node_dalloc(rtree_elm_t *node) { free(node); } -TEST_BEGIN(test_rtree_get_empty) +TEST_BEGIN(test_rtree_read_empty) { unsigned i; @@ -22,13 +26,89 @@ TEST_BEGIN(test_rtree_get_empty) rtree_t rtree; assert_false(rtree_new(&rtree, i, node_alloc, node_dalloc), "Unexpected rtree_new() failure"); - assert_ptr_null(rtree_get(&rtree, 0, false), - "rtree_get() should return NULL for empty tree"); + assert_ptr_null(rtree_read(&rtree, 0, false), + "rtree_read() should return NULL for empty tree"); rtree_delete(&rtree); } } TEST_END +#define NTHREADS 8 +#define MAX_NBITS 18 +#define NITERS 1000 +#define SEED 42 + +typedef struct { + unsigned nbits; + rtree_t rtree; + uint32_t seed; +} thd_start_arg_t; + +static void * +thd_start(void *varg) +{ + thd_start_arg_t *arg = (thd_start_arg_t *)varg; + sfmt_t *sfmt; + extent_t *extent; + unsigned i; + + sfmt = init_gen_rand(arg->seed); + extent = (extent_t *)malloc(sizeof(extent)); + assert_ptr_not_null(extent, "Unexpected malloc() failure"); + + for (i = 0; i < NITERS; i++) { + uintptr_t key = (uintptr_t)gen_rand64(sfmt); + if (i % 2 == 0) { + rtree_elm_t *elm; + + elm = rtree_elm_acquire(&arg->rtree, key, false, true); + assert_ptr_not_null(elm, + "Unexpected rtree_elm_acquire() failure"); + rtree_elm_write_acquired(elm, extent); + rtree_elm_release(elm); + + elm = rtree_elm_acquire(&arg->rtree, key, true, false); + assert_ptr_not_null(elm, + "Unexpected rtree_elm_acquire() failure"); + rtree_elm_read_acquired(elm); + rtree_elm_release(elm); + } else + rtree_read(&arg->rtree, key, false); + } + + free(extent); + fini_gen_rand(sfmt); + return (NULL); +} + +TEST_BEGIN(test_rtree_concurrent) +{ + thd_start_arg_t arg; + thd_t thds[NTHREADS]; + sfmt_t *sfmt; + unsigned i, j; + + sfmt = init_gen_rand(SEED); + for (i = 1; i < MAX_NBITS; i++) { + arg.nbits = i; + assert_false(rtree_new(&arg.rtree, arg.nbits, node_alloc, + node_dalloc), "Unexpected rtree_new() failure"); + arg.seed = gen_rand32(sfmt); + for (j = 0; j < NTHREADS; j++) + thd_create(&thds[j], thd_start, (void *)&arg); + for (j = 0; j < NTHREADS; j++) + thd_join(thds[j], NULL); + rtree_delete(&arg.rtree); + } + fini_gen_rand(sfmt); +} +TEST_END + +#undef NTHREADS +#undef MAX_NBITS +#undef NITERS +#undef SEED + TEST_BEGIN(test_rtree_extrema) { unsigned i; @@ -39,16 +119,16 @@ TEST_BEGIN(test_rtree_extrema) assert_false(rtree_new(&rtree, i, node_alloc, node_dalloc), "Unexpected rtree_new() failure"); - assert_false(rtree_set(&rtree, 0, &extent_a), - "Unexpected rtree_set() failure"); - assert_ptr_eq(rtree_get(&rtree, 0, true), &extent_a, - "rtree_get() should return previously set value"); + assert_false(rtree_write(&rtree, 0, &extent_a), + "Unexpected rtree_write() failure, i=%u", i); + assert_ptr_eq(rtree_read(&rtree, 0, true), &extent_a, + "rtree_read() should return previously set value, i=%u", i); - assert_false(rtree_set(&rtree, ~((uintptr_t)0), &extent_b), - "Unexpected rtree_set() failure"); - assert_ptr_eq(rtree_get(&rtree, ~((uintptr_t)0), true), + assert_false(rtree_write(&rtree, ~((uintptr_t)0), &extent_b), + "Unexpected rtree_write() failure, i=%u", i); + assert_ptr_eq(rtree_read(&rtree, ~((uintptr_t)0), true), &extent_b, - "rtree_get() should return previously set value"); + "rtree_read() should return previously set value, i=%u", i); rtree_delete(&rtree); } @@ -69,22 +149,21 @@ TEST_BEGIN(test_rtree_bits) "Unexpected rtree_new() failure"); for (j = 0; j < sizeof(keys)/sizeof(uintptr_t); j++) { - assert_false(rtree_set(&rtree, keys[j], &extent), - "Unexpected rtree_set() failure"); + assert_false(rtree_write(&rtree, keys[j], &extent), + "Unexpected rtree_write() failure"); for (k = 0; k < sizeof(keys)/sizeof(uintptr_t); k++) { - assert_ptr_eq(rtree_get(&rtree, keys[k], true), - &extent, "rtree_get() should return " + assert_ptr_eq(rtree_read(&rtree, keys[k], true), + &extent, "rtree_read() should return " "previously set value and ignore " "insignificant key bits; i=%u, j=%u, k=%u, " "set key=%#"FMTxPTR", get key=%#"FMTxPTR, i, j, k, keys[j], keys[k]); } - assert_ptr_null(rtree_get(&rtree, + assert_ptr_null(rtree_read(&rtree, (((uintptr_t)1) << (sizeof(uintptr_t)*8-i)), false), "Only leftmost rtree leaf should be set; " "i=%u, j=%u", i, j); - assert_false(rtree_set(&rtree, keys[j], NULL), - "Unexpected rtree_set() failure"); + rtree_clear(&rtree, keys[j]); } rtree_delete(&rtree); @@ -105,31 +184,36 @@ TEST_BEGIN(test_rtree_random) extent_t extent; unsigned j; rtree_t rtree; + rtree_elm_t *elm; assert_false(rtree_new(&rtree, i, node_alloc, node_dalloc), "Unexpected rtree_new() failure"); for (j = 0; j < NSET; j++) { keys[j] = (uintptr_t)gen_rand64(sfmt); - assert_false(rtree_set(&rtree, keys[j], &extent), - "Unexpected rtree_set() failure"); - assert_ptr_eq(rtree_get(&rtree, keys[j], true), &extent, - "rtree_get() should return previously set value"); + elm = rtree_elm_acquire(&rtree, keys[j], false, true); + assert_ptr_not_null(elm, + "Unexpected rtree_elm_acquire() failure"); + rtree_elm_write_acquired(elm, &extent); + rtree_elm_release(elm); + assert_ptr_eq(rtree_read(&rtree, keys[j], true), + &extent, + "rtree_read() should return previously set value"); } for (j = 0; j < NSET; j++) { - assert_ptr_eq(rtree_get(&rtree, keys[j], true), &extent, - "rtree_get() should return previously set value"); + assert_ptr_eq(rtree_read(&rtree, keys[j], true), + &extent, "rtree_read() should return previously " + "set value, j=%u", j); } for (j = 0; j < NSET; j++) { - assert_false(rtree_set(&rtree, keys[j], NULL), - "Unexpected rtree_set() failure"); - assert_ptr_null(rtree_get(&rtree, keys[j], true), - "rtree_get() should return previously set value"); + rtree_clear(&rtree, keys[j]); + assert_ptr_null(rtree_read(&rtree, keys[j], true), + "rtree_read() should return previously set value"); } for (j = 0; j < NSET; j++) { - assert_ptr_null(rtree_get(&rtree, keys[j], true), - "rtree_get() should return previously set value"); + assert_ptr_null(rtree_read(&rtree, keys[j], true), + "rtree_read() should return previously set value"); } rtree_delete(&rtree); @@ -145,7 +229,8 @@ main(void) { return (test( - test_rtree_get_empty, + test_rtree_read_empty, + test_rtree_concurrent, test_rtree_extrema, test_rtree_bits, test_rtree_random));