Split rtree_elm_t into rtree_{node,leaf}_elm_t.
This allows leaf elements to differ in size from internal node elements. In principle it would be more correct to use a different type for each level of the tree, but due to implementation details related to atomic operations, we use casts anyway, thus counteracting the value of additional type correctness. Furthermore, such a scheme would require function code generation (via cpp macros), as well as either unwieldy type names for leaves or type aliases, e.g. typedef struct rtree_elm_d2_s rtree_leaf_elm_t; This alternate strategy would be more correct, and with less code duplication, but probably not worth the complexity.
This commit is contained in:
74
src/extent.c
74
src/extent.c
@@ -450,8 +450,8 @@ extent_activate_locked(tsdn_t *tsdn, arena_t *arena, extents_t *extents,
|
||||
static bool
|
||||
extent_rtree_acquire(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx,
|
||||
const extent_t *extent, bool dependent, bool init_missing,
|
||||
rtree_elm_t **r_elm_a, rtree_elm_t **r_elm_b) {
|
||||
*r_elm_a = rtree_elm_acquire(tsdn, &extents_rtree, rtree_ctx,
|
||||
rtree_leaf_elm_t **r_elm_a, rtree_leaf_elm_t **r_elm_b) {
|
||||
*r_elm_a = rtree_leaf_elm_acquire(tsdn, &extents_rtree, rtree_ctx,
|
||||
(uintptr_t)extent_base_get(extent), dependent, init_missing);
|
||||
if (!dependent && *r_elm_a == NULL) {
|
||||
return true;
|
||||
@@ -459,11 +459,11 @@ extent_rtree_acquire(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx,
|
||||
assert(*r_elm_a != NULL);
|
||||
|
||||
if (extent_size_get(extent) > PAGE) {
|
||||
*r_elm_b = rtree_elm_acquire(tsdn, &extents_rtree, rtree_ctx,
|
||||
(uintptr_t)extent_last_get(extent), dependent,
|
||||
*r_elm_b = rtree_leaf_elm_acquire(tsdn, &extents_rtree,
|
||||
rtree_ctx, (uintptr_t)extent_last_get(extent), dependent,
|
||||
init_missing);
|
||||
if (!dependent && *r_elm_b == NULL) {
|
||||
rtree_elm_release(tsdn, &extents_rtree, *r_elm_a);
|
||||
rtree_leaf_elm_release(tsdn, &extents_rtree, *r_elm_a);
|
||||
return true;
|
||||
}
|
||||
assert(*r_elm_b != NULL);
|
||||
@@ -475,19 +475,21 @@ extent_rtree_acquire(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx,
|
||||
}
|
||||
|
||||
static void
|
||||
extent_rtree_write_acquired(tsdn_t *tsdn, rtree_elm_t *elm_a,
|
||||
rtree_elm_t *elm_b, const extent_t *extent) {
|
||||
rtree_elm_write_acquired(tsdn, &extents_rtree, elm_a, extent);
|
||||
extent_rtree_write_acquired(tsdn_t *tsdn, rtree_leaf_elm_t *elm_a,
|
||||
rtree_leaf_elm_t *elm_b, const extent_t *extent) {
|
||||
rtree_leaf_elm_write_acquired(tsdn, &extents_rtree, elm_a, extent);
|
||||
if (elm_b != NULL) {
|
||||
rtree_elm_write_acquired(tsdn, &extents_rtree, elm_b, extent);
|
||||
rtree_leaf_elm_write_acquired(tsdn, &extents_rtree, elm_b,
|
||||
extent);
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
extent_rtree_release(tsdn_t *tsdn, rtree_elm_t *elm_a, rtree_elm_t *elm_b) {
|
||||
rtree_elm_release(tsdn, &extents_rtree, elm_a);
|
||||
extent_rtree_release(tsdn_t *tsdn, rtree_leaf_elm_t *elm_a,
|
||||
rtree_leaf_elm_t *elm_b) {
|
||||
rtree_leaf_elm_release(tsdn, &extents_rtree, elm_a);
|
||||
if (elm_b != NULL) {
|
||||
rtree_elm_release(tsdn, &extents_rtree, elm_b);
|
||||
rtree_leaf_elm_release(tsdn, &extents_rtree, elm_b);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -543,7 +545,7 @@ static bool
|
||||
extent_register_impl(tsdn_t *tsdn, const extent_t *extent, bool gdump_add) {
|
||||
rtree_ctx_t rtree_ctx_fallback;
|
||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||
rtree_elm_t *elm_a, *elm_b;
|
||||
rtree_leaf_elm_t *elm_a, *elm_b;
|
||||
|
||||
if (extent_rtree_acquire(tsdn, rtree_ctx, extent, false, true, &elm_a,
|
||||
&elm_b)) {
|
||||
@@ -596,7 +598,7 @@ static void
|
||||
extent_deregister(tsdn_t *tsdn, extent_t *extent) {
|
||||
rtree_ctx_t rtree_ctx_fallback;
|
||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||
rtree_elm_t *elm_a, *elm_b;
|
||||
rtree_leaf_elm_t *elm_a, *elm_b;
|
||||
|
||||
extent_rtree_acquire(tsdn, rtree_ctx, extent, true, false, &elm_a,
|
||||
&elm_b);
|
||||
@@ -651,13 +653,13 @@ extent_recycle_extract(tsdn_t *tsdn, arena_t *arena,
|
||||
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
||||
extent_t *extent;
|
||||
if (new_addr != NULL) {
|
||||
rtree_elm_t *elm;
|
||||
rtree_leaf_elm_t *elm;
|
||||
|
||||
elm = rtree_elm_acquire(tsdn, &extents_rtree, rtree_ctx,
|
||||
elm = rtree_leaf_elm_acquire(tsdn, &extents_rtree, rtree_ctx,
|
||||
(uintptr_t)new_addr, false, false);
|
||||
if (elm != NULL) {
|
||||
extent = rtree_elm_read_acquired(tsdn, &extents_rtree,
|
||||
elm);
|
||||
extent = rtree_leaf_elm_read_acquired(tsdn,
|
||||
&extents_rtree, elm);
|
||||
if (extent != NULL) {
|
||||
assert(extent_base_get(extent) == new_addr);
|
||||
if (extent_arena_get(extent) != arena ||
|
||||
@@ -667,7 +669,7 @@ extent_recycle_extract(tsdn_t *tsdn, arena_t *arena,
|
||||
extent = NULL;
|
||||
}
|
||||
}
|
||||
rtree_elm_release(tsdn, &extents_rtree, elm);
|
||||
rtree_leaf_elm_release(tsdn, &extents_rtree, elm);
|
||||
} else {
|
||||
extent = NULL;
|
||||
}
|
||||
@@ -1156,11 +1158,11 @@ extent_try_coalesce(tsdn_t *tsdn, arena_t *arena,
|
||||
again = false;
|
||||
|
||||
/* Try to coalesce forward. */
|
||||
rtree_elm_t *next_elm = rtree_elm_acquire(tsdn, &extents_rtree,
|
||||
rtree_ctx, (uintptr_t)extent_past_get(extent), false,
|
||||
false);
|
||||
rtree_leaf_elm_t *next_elm = rtree_leaf_elm_acquire(tsdn,
|
||||
&extents_rtree, rtree_ctx,
|
||||
(uintptr_t)extent_past_get(extent), false, false);
|
||||
if (next_elm != NULL) {
|
||||
extent_t *next = rtree_elm_read_acquired(tsdn,
|
||||
extent_t *next = rtree_leaf_elm_read_acquired(tsdn,
|
||||
&extents_rtree, next_elm);
|
||||
/*
|
||||
* extents->mtx only protects against races for
|
||||
@@ -1169,7 +1171,7 @@ extent_try_coalesce(tsdn_t *tsdn, arena_t *arena,
|
||||
*/
|
||||
bool can_coalesce = (next != NULL &&
|
||||
extent_can_coalesce(arena, extents, extent, next));
|
||||
rtree_elm_release(tsdn, &extents_rtree, next_elm);
|
||||
rtree_leaf_elm_release(tsdn, &extents_rtree, next_elm);
|
||||
if (can_coalesce && !extent_coalesce(tsdn, arena,
|
||||
r_extent_hooks, extents, extent, next, true)) {
|
||||
if (extents->delay_coalesce) {
|
||||
@@ -1182,15 +1184,15 @@ extent_try_coalesce(tsdn_t *tsdn, arena_t *arena,
|
||||
}
|
||||
|
||||
/* Try to coalesce backward. */
|
||||
rtree_elm_t *prev_elm = rtree_elm_acquire(tsdn, &extents_rtree,
|
||||
rtree_ctx, (uintptr_t)extent_before_get(extent), false,
|
||||
false);
|
||||
rtree_leaf_elm_t *prev_elm = rtree_leaf_elm_acquire(tsdn,
|
||||
&extents_rtree, rtree_ctx,
|
||||
(uintptr_t)extent_before_get(extent), false, false);
|
||||
if (prev_elm != NULL) {
|
||||
extent_t *prev = rtree_elm_read_acquired(tsdn,
|
||||
extent_t *prev = rtree_leaf_elm_read_acquired(tsdn,
|
||||
&extents_rtree, prev_elm);
|
||||
bool can_coalesce = (prev != NULL &&
|
||||
extent_can_coalesce(arena, extents, extent, prev));
|
||||
rtree_elm_release(tsdn, &extents_rtree, prev_elm);
|
||||
rtree_leaf_elm_release(tsdn, &extents_rtree, prev_elm);
|
||||
if (can_coalesce && !extent_coalesce(tsdn, arena,
|
||||
r_extent_hooks, extents, extent, prev, false)) {
|
||||
extent = prev;
|
||||
@@ -1472,7 +1474,7 @@ extent_split_wrapper(tsdn_t *tsdn, arena_t *arena,
|
||||
extent_t *trail;
|
||||
rtree_ctx_t rtree_ctx_fallback;
|
||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||
rtree_elm_t *lead_elm_a, *lead_elm_b, *trail_elm_a, *trail_elm_b;
|
||||
rtree_leaf_elm_t *lead_elm_a, *lead_elm_b, *trail_elm_a, *trail_elm_b;
|
||||
|
||||
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
||||
|
||||
@@ -1590,19 +1592,21 @@ extent_merge_wrapper(tsdn_t *tsdn, arena_t *arena,
|
||||
*/
|
||||
rtree_ctx_t rtree_ctx_fallback;
|
||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||
rtree_elm_t *a_elm_a, *a_elm_b, *b_elm_a, *b_elm_b;
|
||||
rtree_leaf_elm_t *a_elm_a, *a_elm_b, *b_elm_a, *b_elm_b;
|
||||
extent_rtree_acquire(tsdn, rtree_ctx, a, true, false, &a_elm_a,
|
||||
&a_elm_b);
|
||||
extent_rtree_acquire(tsdn, rtree_ctx, b, true, false, &b_elm_a,
|
||||
&b_elm_b);
|
||||
|
||||
if (a_elm_b != NULL) {
|
||||
rtree_elm_write_acquired(tsdn, &extents_rtree, a_elm_b, NULL);
|
||||
rtree_elm_release(tsdn, &extents_rtree, a_elm_b);
|
||||
rtree_leaf_elm_write_acquired(tsdn, &extents_rtree, a_elm_b,
|
||||
NULL);
|
||||
rtree_leaf_elm_release(tsdn, &extents_rtree, a_elm_b);
|
||||
}
|
||||
if (b_elm_b != NULL) {
|
||||
rtree_elm_write_acquired(tsdn, &extents_rtree, b_elm_a, NULL);
|
||||
rtree_elm_release(tsdn, &extents_rtree, b_elm_a);
|
||||
rtree_leaf_elm_write_acquired(tsdn, &extents_rtree, b_elm_a,
|
||||
NULL);
|
||||
rtree_leaf_elm_release(tsdn, &extents_rtree, b_elm_a);
|
||||
} else {
|
||||
b_elm_b = b_elm_a;
|
||||
}
|
||||
|
355
src/rtree.c
355
src/rtree.c
@@ -19,10 +19,10 @@ rtree_new(rtree_t *rtree) {
|
||||
#undef rtree_node_alloc
|
||||
#define rtree_node_alloc JEMALLOC_N(rtree_node_alloc_impl)
|
||||
#endif
|
||||
static rtree_elm_t *
|
||||
static rtree_node_elm_t *
|
||||
rtree_node_alloc(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) {
|
||||
return (rtree_elm_t *)base_alloc(tsdn, b0get(), nelms *
|
||||
sizeof(rtree_elm_t), CACHELINE);
|
||||
return (rtree_node_elm_t *)base_alloc(tsdn, b0get(), nelms *
|
||||
sizeof(rtree_node_elm_t), CACHELINE);
|
||||
}
|
||||
#ifdef JEMALLOC_JET
|
||||
#undef rtree_node_alloc
|
||||
@@ -35,7 +35,7 @@ rtree_node_alloc_t *rtree_node_alloc = JEMALLOC_N(rtree_node_alloc_impl);
|
||||
#define rtree_node_dalloc JEMALLOC_N(rtree_node_dalloc_impl)
|
||||
#endif
|
||||
UNUSED static void
|
||||
rtree_node_dalloc(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node) {
|
||||
rtree_node_dalloc(tsdn_t *tsdn, rtree_t *rtree, rtree_node_elm_t *node) {
|
||||
/* Nodes are never deleted during normal operation. */
|
||||
not_reached();
|
||||
}
|
||||
@@ -46,46 +46,92 @@ rtree_node_dalloc_t *rtree_node_dalloc = JEMALLOC_N(rtree_node_dalloc_impl);
|
||||
#endif
|
||||
|
||||
#ifdef JEMALLOC_JET
|
||||
static void
|
||||
rtree_delete_subtree(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node,
|
||||
unsigned level) {
|
||||
if (level + 1 < RTREE_HEIGHT) {
|
||||
size_t nchildren, i;
|
||||
#undef rtree_leaf_alloc
|
||||
#define rtree_leaf_alloc JEMALLOC_N(rtree_leaf_alloc_impl)
|
||||
#endif
|
||||
static rtree_leaf_elm_t *
|
||||
rtree_leaf_alloc(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) {
|
||||
return (rtree_leaf_elm_t *)base_alloc(tsdn, b0get(), nelms *
|
||||
sizeof(rtree_leaf_elm_t), CACHELINE);
|
||||
}
|
||||
#ifdef JEMALLOC_JET
|
||||
#undef rtree_leaf_alloc
|
||||
#define rtree_leaf_alloc JEMALLOC_N(rtree_leaf_alloc)
|
||||
rtree_leaf_alloc_t *rtree_leaf_alloc = JEMALLOC_N(rtree_leaf_alloc_impl);
|
||||
#endif
|
||||
|
||||
nchildren = ZU(1) << rtree_levels[level].bits;
|
||||
for (i = 0; i < nchildren; i++) {
|
||||
rtree_elm_t *child = (rtree_elm_t *)atomic_load_p(
|
||||
&node[i].child_or_extent, ATOMIC_RELAXED);
|
||||
if (child != NULL) {
|
||||
rtree_delete_subtree(tsdn, rtree, child, level +
|
||||
#ifdef JEMALLOC_JET
|
||||
#undef rtree_leaf_dalloc
|
||||
#define rtree_leaf_dalloc JEMALLOC_N(rtree_leaf_dalloc_impl)
|
||||
#endif
|
||||
UNUSED static void
|
||||
rtree_leaf_dalloc(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *leaf) {
|
||||
/* Leaves are never deleted during normal operation. */
|
||||
not_reached();
|
||||
}
|
||||
#ifdef JEMALLOC_JET
|
||||
#undef rtree_leaf_dalloc
|
||||
#define rtree_leaf_dalloc JEMALLOC_N(rtree_leaf_dalloc)
|
||||
rtree_leaf_dalloc_t *rtree_leaf_dalloc = JEMALLOC_N(rtree_leaf_dalloc_impl);
|
||||
#endif
|
||||
|
||||
#ifdef JEMALLOC_JET
|
||||
static void
|
||||
rtree_delete_subtree(tsdn_t *tsdn, rtree_t *rtree, rtree_node_elm_t *subtree,
|
||||
unsigned level) {
|
||||
size_t nchildren = ZU(1) << rtree_levels[level].bits;
|
||||
if (level + 2 < RTREE_HEIGHT) {
|
||||
for (size_t i = 0; i < nchildren; i++) {
|
||||
rtree_node_elm_t *node =
|
||||
(rtree_node_elm_t *)atomic_load_p(&subtree[i].child,
|
||||
ATOMIC_RELAXED);
|
||||
if (node != NULL) {
|
||||
rtree_delete_subtree(tsdn, rtree, node, level +
|
||||
1);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (size_t i = 0; i < nchildren; i++) {
|
||||
rtree_leaf_elm_t *leaf =
|
||||
(rtree_leaf_elm_t *)atomic_load_p(&subtree[i].child,
|
||||
ATOMIC_RELAXED);
|
||||
if (leaf != NULL) {
|
||||
rtree_leaf_dalloc(tsdn, rtree, leaf);
|
||||
}
|
||||
}
|
||||
}
|
||||
rtree_node_dalloc(tsdn, rtree, node);
|
||||
|
||||
rtree_node_dalloc(tsdn, rtree, subtree);
|
||||
}
|
||||
|
||||
void
|
||||
rtree_delete(tsdn_t *tsdn, rtree_t *rtree) {
|
||||
rtree_elm_t *rtree_root = (rtree_elm_t *)atomic_load_p(&rtree->root,
|
||||
ATOMIC_RELAXED);
|
||||
if (rtree_root != NULL) {
|
||||
rtree_delete_subtree(tsdn, rtree, rtree_root, 0);
|
||||
if (RTREE_HEIGHT > 1) {
|
||||
rtree_node_elm_t *node = (rtree_node_elm_t *)atomic_load_p(
|
||||
&rtree->root, ATOMIC_RELAXED);
|
||||
if (node != NULL) {
|
||||
rtree_delete_subtree(tsdn, rtree, node, 0);
|
||||
}
|
||||
} else {
|
||||
rtree_leaf_elm_t *leaf =
|
||||
(rtree_leaf_elm_t *)atomic_load_p(&rtree->root,
|
||||
ATOMIC_RELAXED);
|
||||
if (leaf != NULL) {
|
||||
rtree_leaf_dalloc(tsdn, rtree, leaf);
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
static rtree_elm_t *
|
||||
static rtree_node_elm_t *
|
||||
rtree_node_init(tsdn_t *tsdn, rtree_t *rtree, unsigned level,
|
||||
atomic_p_t *elmp) {
|
||||
rtree_elm_t *node;
|
||||
|
||||
malloc_mutex_lock(tsdn, &rtree->init_lock);
|
||||
/*
|
||||
* If *elmp is non-null, then it was initialized with the init lock
|
||||
* held, so we can get by with 'relaxed' here.
|
||||
*/
|
||||
node = atomic_load_p(elmp, ATOMIC_RELAXED);
|
||||
rtree_node_elm_t *node = atomic_load_p(elmp, ATOMIC_RELAXED);
|
||||
if (node == NULL) {
|
||||
node = rtree_node_alloc(tsdn, rtree, ZU(1) <<
|
||||
rtree_levels[level].bits);
|
||||
@@ -104,90 +150,186 @@ rtree_node_init(tsdn_t *tsdn, rtree_t *rtree, unsigned level,
|
||||
return node;
|
||||
}
|
||||
|
||||
static rtree_leaf_elm_t *
|
||||
rtree_leaf_init(tsdn_t *tsdn, rtree_t *rtree, atomic_p_t *elmp) {
|
||||
malloc_mutex_lock(tsdn, &rtree->init_lock);
|
||||
/*
|
||||
* If *elmp is non-null, then it was initialized with the init lock
|
||||
* held, so we can get by with 'relaxed' here.
|
||||
*/
|
||||
rtree_leaf_elm_t *leaf = atomic_load_p(elmp, ATOMIC_RELAXED);
|
||||
if (leaf == NULL) {
|
||||
leaf = rtree_leaf_alloc(tsdn, rtree, ZU(1) <<
|
||||
rtree_levels[RTREE_HEIGHT-1].bits);
|
||||
if (leaf == NULL) {
|
||||
malloc_mutex_unlock(tsdn, &rtree->init_lock);
|
||||
return NULL;
|
||||
}
|
||||
/*
|
||||
* Even though we hold the lock, a later reader might not; we
|
||||
* need release semantics.
|
||||
*/
|
||||
atomic_store_p(elmp, leaf, ATOMIC_RELEASE);
|
||||
}
|
||||
malloc_mutex_unlock(tsdn, &rtree->init_lock);
|
||||
|
||||
return leaf;
|
||||
}
|
||||
|
||||
static bool
|
||||
rtree_node_valid(rtree_elm_t *node) {
|
||||
rtree_node_valid(rtree_node_elm_t *node) {
|
||||
return ((uintptr_t)node != (uintptr_t)0);
|
||||
}
|
||||
|
||||
static rtree_elm_t *
|
||||
rtree_child_tryread(rtree_elm_t *elm, bool dependent) {
|
||||
rtree_elm_t *child;
|
||||
static bool
|
||||
rtree_leaf_valid(rtree_leaf_elm_t *leaf) {
|
||||
return ((uintptr_t)leaf != (uintptr_t)0);
|
||||
}
|
||||
|
||||
static rtree_node_elm_t *
|
||||
rtree_child_node_tryread(rtree_node_elm_t *elm, bool dependent) {
|
||||
rtree_node_elm_t *node;
|
||||
|
||||
if (dependent) {
|
||||
child = (rtree_elm_t *)atomic_load_p(&elm->child_or_extent,
|
||||
node = (rtree_node_elm_t *)atomic_load_p(&elm->child,
|
||||
ATOMIC_RELAXED);
|
||||
} else {
|
||||
child = (rtree_elm_t *)atomic_load_p(&elm->child_or_extent,
|
||||
node = (rtree_node_elm_t *)atomic_load_p(&elm->child,
|
||||
ATOMIC_ACQUIRE);
|
||||
}
|
||||
|
||||
assert(!dependent || child != NULL);
|
||||
return child;
|
||||
assert(!dependent || node != NULL);
|
||||
return node;
|
||||
}
|
||||
|
||||
static rtree_elm_t *
|
||||
rtree_child_read(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *elm, unsigned level,
|
||||
bool dependent) {
|
||||
rtree_elm_t *child;
|
||||
static rtree_node_elm_t *
|
||||
rtree_child_node_read(tsdn_t *tsdn, rtree_t *rtree, rtree_node_elm_t *elm,
|
||||
unsigned level, bool dependent) {
|
||||
rtree_node_elm_t *node;
|
||||
|
||||
child = rtree_child_tryread(elm, dependent);
|
||||
if (!dependent && unlikely(!rtree_node_valid(child))) {
|
||||
child = rtree_node_init(tsdn, rtree, level + 1,
|
||||
&elm->child_or_extent);
|
||||
node = rtree_child_node_tryread(elm, dependent);
|
||||
if (!dependent && unlikely(!rtree_node_valid(node))) {
|
||||
node = rtree_node_init(tsdn, rtree, level + 1, &elm->child);
|
||||
}
|
||||
assert(!dependent || child != NULL);
|
||||
return child;
|
||||
assert(!dependent || node != NULL);
|
||||
return node;
|
||||
}
|
||||
|
||||
static rtree_elm_t *
|
||||
rtree_subtree_tryread(rtree_t *rtree, bool dependent) {
|
||||
rtree_elm_t *subtree;
|
||||
static rtree_leaf_elm_t *
|
||||
rtree_child_leaf_tryread(rtree_node_elm_t *elm, bool dependent) {
|
||||
rtree_leaf_elm_t *leaf;
|
||||
|
||||
if (dependent) {
|
||||
subtree = (rtree_elm_t *)atomic_load_p(&rtree->root,
|
||||
leaf = (rtree_leaf_elm_t *)atomic_load_p(&elm->child,
|
||||
ATOMIC_RELAXED);
|
||||
} else {
|
||||
subtree = (rtree_elm_t *)atomic_load_p(&rtree->root,
|
||||
leaf = (rtree_leaf_elm_t *)atomic_load_p(&elm->child,
|
||||
ATOMIC_ACQUIRE);
|
||||
}
|
||||
assert(!dependent || subtree != NULL);
|
||||
return subtree;
|
||||
|
||||
assert(!dependent || leaf != NULL);
|
||||
return leaf;
|
||||
}
|
||||
|
||||
static rtree_elm_t *
|
||||
rtree_subtree_read(tsdn_t *tsdn, rtree_t *rtree, bool dependent) {
|
||||
rtree_elm_t *subtree = rtree_subtree_tryread(rtree, dependent);
|
||||
if (!dependent && unlikely(!rtree_node_valid(subtree))) {
|
||||
subtree = rtree_node_init(tsdn, rtree, 0, &rtree->root);
|
||||
static rtree_leaf_elm_t *
|
||||
rtree_child_leaf_read(tsdn_t *tsdn, rtree_t *rtree, rtree_node_elm_t *elm,
|
||||
unsigned level, bool dependent) {
|
||||
rtree_leaf_elm_t *leaf;
|
||||
|
||||
leaf = rtree_child_leaf_tryread(elm, dependent);
|
||||
if (!dependent && unlikely(!rtree_leaf_valid(leaf))) {
|
||||
leaf = rtree_leaf_init(tsdn, rtree, &elm->child);
|
||||
}
|
||||
assert(!dependent || subtree != NULL);
|
||||
return subtree;
|
||||
assert(!dependent || leaf != NULL);
|
||||
return leaf;
|
||||
}
|
||||
|
||||
rtree_elm_t *
|
||||
rtree_elm_lookup_hard(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
||||
UNUSED static rtree_node_elm_t *
|
||||
rtree_root_node_tryread(rtree_t *rtree, bool dependent) {
|
||||
rtree_node_elm_t *node;
|
||||
if (dependent) {
|
||||
node = (rtree_node_elm_t *)atomic_load_p(&rtree->root,
|
||||
ATOMIC_RELAXED);
|
||||
} else {
|
||||
node = (rtree_node_elm_t *)atomic_load_p(&rtree->root,
|
||||
ATOMIC_ACQUIRE);
|
||||
}
|
||||
assert(!dependent || node != NULL);
|
||||
return node;
|
||||
}
|
||||
|
||||
UNUSED static rtree_node_elm_t *
|
||||
rtree_root_node_read(tsdn_t *tsdn, rtree_t *rtree, bool dependent) {
|
||||
rtree_node_elm_t *node = rtree_root_node_tryread(rtree, dependent);
|
||||
if (!dependent && unlikely(!rtree_node_valid(node))) {
|
||||
node = rtree_node_init(tsdn, rtree, 0, &rtree->root);
|
||||
}
|
||||
assert(!dependent || node != NULL);
|
||||
return node;
|
||||
}
|
||||
|
||||
UNUSED static rtree_leaf_elm_t *
|
||||
rtree_root_leaf_tryread(rtree_t *rtree, bool dependent) {
|
||||
rtree_leaf_elm_t *leaf;
|
||||
if (dependent) {
|
||||
leaf = (rtree_leaf_elm_t *)atomic_load_p(&rtree->root,
|
||||
ATOMIC_RELAXED);
|
||||
} else {
|
||||
leaf = (rtree_leaf_elm_t *)atomic_load_p(&rtree->root,
|
||||
ATOMIC_ACQUIRE);
|
||||
}
|
||||
assert(!dependent || leaf != NULL);
|
||||
return leaf;
|
||||
}
|
||||
|
||||
UNUSED static rtree_leaf_elm_t *
|
||||
rtree_root_leaf_read(tsdn_t *tsdn, rtree_t *rtree, bool dependent) {
|
||||
rtree_leaf_elm_t *leaf = rtree_root_leaf_tryread(rtree, dependent);
|
||||
if (!dependent && unlikely(!rtree_leaf_valid(leaf))) {
|
||||
leaf = rtree_leaf_init(tsdn, rtree, &rtree->root);
|
||||
}
|
||||
assert(!dependent || leaf != NULL);
|
||||
return leaf;
|
||||
}
|
||||
|
||||
rtree_leaf_elm_t *
|
||||
rtree_leaf_elm_lookup_hard(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
||||
uintptr_t key, bool dependent, bool init_missing) {
|
||||
rtree_elm_t *node = init_missing ? rtree_subtree_read(tsdn, rtree,
|
||||
dependent) : rtree_subtree_tryread(rtree, dependent);
|
||||
rtree_node_elm_t *node;
|
||||
rtree_leaf_elm_t *leaf;
|
||||
#if RTREE_HEIGHT > 1
|
||||
node = init_missing ? rtree_root_node_read(tsdn, rtree, dependent) :
|
||||
rtree_root_node_tryread(rtree, dependent);
|
||||
#else
|
||||
leaf = init_missing ? rtree_root_leaf_read(tsdn, rtree, dependent) :
|
||||
rtree_root_leaf_tryread(rtree, dependent);
|
||||
#endif
|
||||
|
||||
#define RTREE_GET_SUBTREE(level) { \
|
||||
#define RTREE_GET_CHILD(level) { \
|
||||
assert(level < RTREE_HEIGHT-1); \
|
||||
if (!dependent && unlikely(!rtree_node_valid(node))) { \
|
||||
return NULL; \
|
||||
} \
|
||||
uintptr_t subkey = rtree_subkey(key, level); \
|
||||
node = init_missing ? rtree_child_read(tsdn, rtree, \
|
||||
&node[subkey], level, dependent) : \
|
||||
rtree_child_tryread(&node[subkey], dependent); \
|
||||
if (level + 2 < RTREE_HEIGHT) { \
|
||||
node = init_missing ? \
|
||||
rtree_child_node_read(tsdn, rtree, \
|
||||
&node[subkey], level, dependent) : \
|
||||
rtree_child_node_tryread(&node[subkey], \
|
||||
dependent); \
|
||||
} else { \
|
||||
leaf = init_missing ? \
|
||||
rtree_child_leaf_read(tsdn, rtree, \
|
||||
&node[subkey], level, dependent) : \
|
||||
rtree_child_leaf_tryread(&node[subkey], \
|
||||
dependent); \
|
||||
} \
|
||||
}
|
||||
#define RTREE_GET_LEAF(level) { \
|
||||
assert(level == RTREE_HEIGHT-1); \
|
||||
if (!dependent && unlikely(!rtree_node_valid(node))) { \
|
||||
if (!dependent && unlikely(!rtree_leaf_valid(leaf))) { \
|
||||
return NULL; \
|
||||
} \
|
||||
/* \
|
||||
* node is a leaf, so it contains values rather than \
|
||||
* child pointers. \
|
||||
*/ \
|
||||
if (RTREE_CTX_NCACHE > 1) { \
|
||||
memmove(&rtree_ctx->cache[1], \
|
||||
&rtree_ctx->cache[0], \
|
||||
@@ -196,29 +338,29 @@ rtree_elm_lookup_hard(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
||||
} \
|
||||
uintptr_t leafkey = rtree_leafkey(key); \
|
||||
rtree_ctx->cache[0].leafkey = leafkey; \
|
||||
rtree_ctx->cache[0].leaf = node; \
|
||||
rtree_ctx->cache[0].leaf = leaf; \
|
||||
uintptr_t subkey = rtree_subkey(key, level); \
|
||||
return &node[subkey]; \
|
||||
return &leaf[subkey]; \
|
||||
}
|
||||
if (RTREE_HEIGHT > 1) {
|
||||
RTREE_GET_SUBTREE(0)
|
||||
RTREE_GET_CHILD(0)
|
||||
}
|
||||
if (RTREE_HEIGHT > 2) {
|
||||
RTREE_GET_SUBTREE(1)
|
||||
RTREE_GET_CHILD(1)
|
||||
}
|
||||
if (RTREE_HEIGHT > 3) {
|
||||
for (unsigned i = 2; i < RTREE_HEIGHT-1; i++) {
|
||||
RTREE_GET_SUBTREE(i)
|
||||
RTREE_GET_CHILD(i)
|
||||
}
|
||||
}
|
||||
RTREE_GET_LEAF(RTREE_HEIGHT-1)
|
||||
#undef RTREE_GET_SUBTREE
|
||||
#undef RTREE_GET_CHILD
|
||||
#undef RTREE_GET_LEAF
|
||||
not_reached();
|
||||
}
|
||||
|
||||
static int
|
||||
rtree_elm_witness_comp(const witness_t *a, void *oa, const witness_t *b,
|
||||
rtree_leaf_elm_witness_comp(const witness_t *a, void *oa, const witness_t *b,
|
||||
void *ob) {
|
||||
uintptr_t ka = (uintptr_t)oa;
|
||||
uintptr_t kb = (uintptr_t)ob;
|
||||
@@ -230,23 +372,24 @@ rtree_elm_witness_comp(const witness_t *a, void *oa, const witness_t *b,
|
||||
}
|
||||
|
||||
static witness_t *
|
||||
rtree_elm_witness_alloc(tsd_t *tsd, uintptr_t key, const rtree_elm_t *elm) {
|
||||
rtree_leaf_elm_witness_alloc(tsd_t *tsd, uintptr_t key,
|
||||
const rtree_leaf_elm_t *elm) {
|
||||
witness_t *witness;
|
||||
size_t i;
|
||||
rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd);
|
||||
rtree_leaf_elm_witness_tsd_t *witnesses =
|
||||
tsd_rtree_leaf_elm_witnessesp_get(tsd);
|
||||
|
||||
/* Iterate over entire array to detect double allocation attempts. */
|
||||
witness = NULL;
|
||||
for (i = 0; i < sizeof(rtree_elm_witness_tsd_t) / sizeof(witness_t);
|
||||
i++) {
|
||||
rtree_elm_witness_t *rew = &witnesses->witnesses[i];
|
||||
for (i = 0; i < RTREE_ELM_ACQUIRE_MAX; i++) {
|
||||
rtree_leaf_elm_witness_t *rew = &witnesses->witnesses[i];
|
||||
|
||||
assert(rew->elm != elm);
|
||||
if (rew->elm == NULL && witness == NULL) {
|
||||
rew->elm = elm;
|
||||
witness = &rew->witness;
|
||||
witness_init(witness, "rtree_elm",
|
||||
WITNESS_RANK_RTREE_ELM, rtree_elm_witness_comp,
|
||||
witness_init(witness, "rtree_leaf_elm",
|
||||
WITNESS_RANK_RTREE_ELM, rtree_leaf_elm_witness_comp,
|
||||
(void *)key);
|
||||
}
|
||||
}
|
||||
@@ -255,13 +398,13 @@ rtree_elm_witness_alloc(tsd_t *tsd, uintptr_t key, const rtree_elm_t *elm) {
|
||||
}
|
||||
|
||||
static witness_t *
|
||||
rtree_elm_witness_find(tsd_t *tsd, const rtree_elm_t *elm) {
|
||||
rtree_leaf_elm_witness_find(tsd_t *tsd, const rtree_leaf_elm_t *elm) {
|
||||
size_t i;
|
||||
rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd);
|
||||
rtree_leaf_elm_witness_tsd_t *witnesses =
|
||||
tsd_rtree_leaf_elm_witnessesp_get(tsd);
|
||||
|
||||
for (i = 0; i < sizeof(rtree_elm_witness_tsd_t) / sizeof(witness_t);
|
||||
i++) {
|
||||
rtree_elm_witness_t *rew = &witnesses->witnesses[i];
|
||||
for (i = 0; i < RTREE_ELM_ACQUIRE_MAX; i++) {
|
||||
rtree_leaf_elm_witness_t *rew = &witnesses->witnesses[i];
|
||||
|
||||
if (rew->elm == elm) {
|
||||
return &rew->witness;
|
||||
@@ -271,19 +414,19 @@ rtree_elm_witness_find(tsd_t *tsd, const rtree_elm_t *elm) {
|
||||
}
|
||||
|
||||
static void
|
||||
rtree_elm_witness_dalloc(tsd_t *tsd, witness_t *witness,
|
||||
const rtree_elm_t *elm) {
|
||||
rtree_leaf_elm_witness_dalloc(tsd_t *tsd, witness_t *witness,
|
||||
const rtree_leaf_elm_t *elm) {
|
||||
size_t i;
|
||||
rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd);
|
||||
rtree_leaf_elm_witness_tsd_t *witnesses =
|
||||
tsd_rtree_leaf_elm_witnessesp_get(tsd);
|
||||
|
||||
for (i = 0; i < sizeof(rtree_elm_witness_tsd_t) / sizeof(witness_t);
|
||||
i++) {
|
||||
rtree_elm_witness_t *rew = &witnesses->witnesses[i];
|
||||
for (i = 0; i < RTREE_ELM_ACQUIRE_MAX; i++) {
|
||||
rtree_leaf_elm_witness_t *rew = &witnesses->witnesses[i];
|
||||
|
||||
if (rew->elm == elm) {
|
||||
rew->elm = NULL;
|
||||
witness_init(&rew->witness, "rtree_elm",
|
||||
WITNESS_RANK_RTREE_ELM, rtree_elm_witness_comp,
|
||||
witness_init(&rew->witness, "rtree_leaf_elm",
|
||||
WITNESS_RANK_RTREE_ELM, rtree_leaf_elm_witness_comp,
|
||||
NULL);
|
||||
return;
|
||||
}
|
||||
@@ -292,41 +435,41 @@ rtree_elm_witness_dalloc(tsd_t *tsd, witness_t *witness,
|
||||
}
|
||||
|
||||
void
|
||||
rtree_elm_witness_acquire(tsdn_t *tsdn, const rtree_t *rtree, uintptr_t key,
|
||||
const rtree_elm_t *elm) {
|
||||
rtree_leaf_elm_witness_acquire(tsdn_t *tsdn, const rtree_t *rtree,
|
||||
uintptr_t key, const rtree_leaf_elm_t *elm) {
|
||||
witness_t *witness;
|
||||
|
||||
if (tsdn_null(tsdn)) {
|
||||
return;
|
||||
}
|
||||
|
||||
witness = rtree_elm_witness_alloc(tsdn_tsd(tsdn), key, elm);
|
||||
witness = rtree_leaf_elm_witness_alloc(tsdn_tsd(tsdn), key, elm);
|
||||
witness_lock(tsdn, witness);
|
||||
}
|
||||
|
||||
void
|
||||
rtree_elm_witness_access(tsdn_t *tsdn, const rtree_t *rtree,
|
||||
const rtree_elm_t *elm) {
|
||||
rtree_leaf_elm_witness_access(tsdn_t *tsdn, const rtree_t *rtree,
|
||||
const rtree_leaf_elm_t *elm) {
|
||||
witness_t *witness;
|
||||
|
||||
if (tsdn_null(tsdn)) {
|
||||
return;
|
||||
}
|
||||
|
||||
witness = rtree_elm_witness_find(tsdn_tsd(tsdn), elm);
|
||||
witness = rtree_leaf_elm_witness_find(tsdn_tsd(tsdn), elm);
|
||||
witness_assert_owner(tsdn, witness);
|
||||
}
|
||||
|
||||
void
|
||||
rtree_elm_witness_release(tsdn_t *tsdn, const rtree_t *rtree,
|
||||
const rtree_elm_t *elm) {
|
||||
rtree_leaf_elm_witness_release(tsdn_t *tsdn, const rtree_t *rtree,
|
||||
const rtree_leaf_elm_t *elm) {
|
||||
witness_t *witness;
|
||||
|
||||
if (tsdn_null(tsdn)) {
|
||||
return;
|
||||
}
|
||||
|
||||
witness = rtree_elm_witness_find(tsdn_tsd(tsdn), elm);
|
||||
witness = rtree_leaf_elm_witness_find(tsdn_tsd(tsdn), elm);
|
||||
witness_unlock(tsdn, witness);
|
||||
rtree_elm_witness_dalloc(tsdn_tsd(tsdn), witness, elm);
|
||||
rtree_leaf_elm_witness_dalloc(tsdn_tsd(tsdn), witness, elm);
|
||||
}
|
||||
|
Reference in New Issue
Block a user