2017-01-20 13:41:41 +08:00
|
|
|
#define JEMALLOC_RTREE_C_
|
2010-09-06 01:35:13 +08:00
|
|
|
#include "jemalloc/internal/jemalloc_internal.h"
|
|
|
|
|
2015-01-31 14:54:08 +08:00
|
|
|
static unsigned
|
2017-01-16 08:56:30 +08:00
|
|
|
hmin(unsigned ha, unsigned hb) {
|
2015-01-31 14:54:08 +08:00
|
|
|
return (ha < hb ? ha : hb);
|
|
|
|
}
|
|
|
|
|
2016-03-28 18:06:35 +08:00
|
|
|
/*
|
|
|
|
* Only the most significant bits of keys passed to rtree_{read,write}() are
|
|
|
|
* used.
|
|
|
|
*/
|
2015-01-31 14:54:08 +08:00
|
|
|
bool
|
2017-01-16 08:56:30 +08:00
|
|
|
rtree_new(rtree_t *rtree, unsigned bits) {
|
2015-01-31 14:54:08 +08:00
|
|
|
unsigned bits_in_leaf, height, i;
|
2010-09-06 01:35:13 +08:00
|
|
|
|
2016-03-23 08:54:35 +08:00
|
|
|
assert(RTREE_HEIGHT_MAX == ((ZU(1) << (LG_SIZEOF_PTR+3)) /
|
|
|
|
RTREE_BITS_PER_LEVEL));
|
2014-01-03 08:08:28 +08:00
|
|
|
assert(bits > 0 && bits <= (sizeof(uintptr_t) << 3));
|
|
|
|
|
2015-01-31 14:54:08 +08:00
|
|
|
bits_in_leaf = (bits % RTREE_BITS_PER_LEVEL) == 0 ? RTREE_BITS_PER_LEVEL
|
|
|
|
: (bits % RTREE_BITS_PER_LEVEL);
|
2014-01-03 09:36:38 +08:00
|
|
|
if (bits > bits_in_leaf) {
|
2015-01-31 14:54:08 +08:00
|
|
|
height = 1 + (bits - bits_in_leaf) / RTREE_BITS_PER_LEVEL;
|
2017-01-16 08:56:30 +08:00
|
|
|
if ((height-1) * RTREE_BITS_PER_LEVEL + bits_in_leaf != bits) {
|
2014-01-03 09:36:38 +08:00
|
|
|
height++;
|
2017-01-16 08:56:30 +08:00
|
|
|
}
|
|
|
|
} else {
|
2014-01-03 09:36:38 +08:00
|
|
|
height = 1;
|
2017-01-16 08:56:30 +08:00
|
|
|
}
|
2015-01-31 14:54:08 +08:00
|
|
|
assert((height-1) * RTREE_BITS_PER_LEVEL + bits_in_leaf == bits);
|
|
|
|
|
|
|
|
rtree->height = height;
|
|
|
|
|
|
|
|
/* Root level. */
|
|
|
|
rtree->levels[0].subtree = NULL;
|
|
|
|
rtree->levels[0].bits = (height > 1) ? RTREE_BITS_PER_LEVEL :
|
|
|
|
bits_in_leaf;
|
|
|
|
rtree->levels[0].cumbits = rtree->levels[0].bits;
|
|
|
|
/* Interior levels. */
|
|
|
|
for (i = 1; i < height-1; i++) {
|
|
|
|
rtree->levels[i].subtree = NULL;
|
|
|
|
rtree->levels[i].bits = RTREE_BITS_PER_LEVEL;
|
|
|
|
rtree->levels[i].cumbits = rtree->levels[i-1].cumbits +
|
|
|
|
RTREE_BITS_PER_LEVEL;
|
2014-01-03 09:36:38 +08:00
|
|
|
}
|
2015-01-31 14:54:08 +08:00
|
|
|
/* Leaf level. */
|
2014-01-03 09:36:38 +08:00
|
|
|
if (height > 1) {
|
2015-01-31 14:54:08 +08:00
|
|
|
rtree->levels[height-1].subtree = NULL;
|
|
|
|
rtree->levels[height-1].bits = bits_in_leaf;
|
|
|
|
rtree->levels[height-1].cumbits = bits;
|
|
|
|
}
|
2010-09-06 01:35:13 +08:00
|
|
|
|
2016-06-03 09:43:10 +08:00
|
|
|
/* Compute lookup table to be used by rtree_[ctx_]start_level(). */
|
2015-01-31 14:54:08 +08:00
|
|
|
for (i = 0; i < RTREE_HEIGHT_MAX; i++) {
|
|
|
|
rtree->start_level[i] = hmin(RTREE_HEIGHT_MAX - 1 - i, height -
|
|
|
|
1);
|
2010-09-06 01:35:13 +08:00
|
|
|
}
|
2016-06-03 09:43:10 +08:00
|
|
|
rtree->start_level[RTREE_HEIGHT_MAX] = 0;
|
2010-09-06 01:35:13 +08:00
|
|
|
|
2016-11-01 07:23:33 +08:00
|
|
|
malloc_mutex_init(&rtree->init_lock, "rtree", WITNESS_RANK_RTREE);
|
|
|
|
|
2017-01-20 10:15:45 +08:00
|
|
|
return false;
|
2010-09-06 01:35:13 +08:00
|
|
|
}
|
2012-10-10 05:46:22 +08:00
|
|
|
|
2016-04-16 15:36:11 +08:00
|
|
|
#ifdef JEMALLOC_JET
|
|
|
|
#undef rtree_node_alloc
|
2017-01-20 13:41:41 +08:00
|
|
|
#define rtree_node_alloc JEMALLOC_N(rtree_node_alloc_impl)
|
2016-04-16 15:36:11 +08:00
|
|
|
#endif
|
|
|
|
static rtree_elm_t *
|
2017-01-16 08:56:30 +08:00
|
|
|
rtree_node_alloc(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) {
|
2017-01-20 10:15:45 +08:00
|
|
|
return (rtree_elm_t *)base_alloc(tsdn, b0get(), nelms *
|
|
|
|
sizeof(rtree_elm_t), CACHELINE);
|
2016-04-16 15:36:11 +08:00
|
|
|
}
|
|
|
|
#ifdef JEMALLOC_JET
|
|
|
|
#undef rtree_node_alloc
|
2017-01-20 13:41:41 +08:00
|
|
|
#define rtree_node_alloc JEMALLOC_N(rtree_node_alloc)
|
2016-04-16 15:36:11 +08:00
|
|
|
rtree_node_alloc_t *rtree_node_alloc = JEMALLOC_N(rtree_node_alloc_impl);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#ifdef JEMALLOC_JET
|
|
|
|
#undef rtree_node_dalloc
|
2017-01-20 13:41:41 +08:00
|
|
|
#define rtree_node_dalloc JEMALLOC_N(rtree_node_dalloc_impl)
|
2016-04-16 15:36:11 +08:00
|
|
|
#endif
|
|
|
|
UNUSED static void
|
2017-01-16 08:56:30 +08:00
|
|
|
rtree_node_dalloc(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node) {
|
2016-04-16 15:36:11 +08:00
|
|
|
/* Nodes are never deleted during normal operation. */
|
|
|
|
not_reached();
|
|
|
|
}
|
|
|
|
#ifdef JEMALLOC_JET
|
|
|
|
#undef rtree_node_dalloc
|
2017-01-20 13:41:41 +08:00
|
|
|
#define rtree_node_dalloc JEMALLOC_N(rtree_node_dalloc)
|
2016-04-16 15:36:11 +08:00
|
|
|
rtree_node_dalloc_t *rtree_node_dalloc = JEMALLOC_N(rtree_node_dalloc_impl);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#ifdef JEMALLOC_JET
|
2014-01-03 08:08:28 +08:00
|
|
|
static void
|
2016-04-16 15:36:11 +08:00
|
|
|
rtree_delete_subtree(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node,
|
2017-01-16 08:56:30 +08:00
|
|
|
unsigned level) {
|
2015-03-12 14:14:50 +08:00
|
|
|
if (level + 1 < rtree->height) {
|
2014-01-03 08:08:28 +08:00
|
|
|
size_t nchildren, i;
|
|
|
|
|
2015-01-31 14:54:08 +08:00
|
|
|
nchildren = ZU(1) << rtree->levels[level].bits;
|
2014-01-03 08:08:28 +08:00
|
|
|
for (i = 0; i < nchildren; i++) {
|
2016-03-28 18:06:35 +08:00
|
|
|
rtree_elm_t *child = node[i].child;
|
2016-04-16 15:36:11 +08:00
|
|
|
if (child != NULL) {
|
|
|
|
rtree_delete_subtree(tsdn, rtree, child, level +
|
|
|
|
1);
|
|
|
|
}
|
2014-01-03 08:08:28 +08:00
|
|
|
}
|
|
|
|
}
|
2016-04-16 15:36:11 +08:00
|
|
|
rtree_node_dalloc(tsdn, rtree, node);
|
2014-01-03 08:08:28 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2017-01-16 08:56:30 +08:00
|
|
|
rtree_delete(tsdn_t *tsdn, rtree_t *rtree) {
|
2015-01-31 14:54:08 +08:00
|
|
|
unsigned i;
|
2014-01-03 08:08:28 +08:00
|
|
|
|
2015-01-31 14:54:08 +08:00
|
|
|
for (i = 0; i < rtree->height; i++) {
|
2016-03-28 18:06:35 +08:00
|
|
|
rtree_elm_t *subtree = rtree->levels[i].subtree;
|
2017-01-16 08:56:30 +08:00
|
|
|
if (subtree != NULL) {
|
2016-04-16 15:36:11 +08:00
|
|
|
rtree_delete_subtree(tsdn, rtree, subtree, i);
|
2017-01-16 08:56:30 +08:00
|
|
|
}
|
2015-01-31 14:54:08 +08:00
|
|
|
}
|
2014-01-03 08:08:28 +08:00
|
|
|
}
|
2016-04-16 15:36:11 +08:00
|
|
|
#endif
|
2014-01-03 08:08:28 +08:00
|
|
|
|
2016-03-28 18:06:35 +08:00
|
|
|
static rtree_elm_t *
|
2016-04-16 15:36:11 +08:00
|
|
|
rtree_node_init(tsdn_t *tsdn, rtree_t *rtree, unsigned level,
|
2017-01-16 08:56:30 +08:00
|
|
|
rtree_elm_t **elmp) {
|
2016-03-28 18:06:35 +08:00
|
|
|
rtree_elm_t *node;
|
2015-01-31 14:54:08 +08:00
|
|
|
|
2016-11-01 07:23:33 +08:00
|
|
|
malloc_mutex_lock(tsdn, &rtree->init_lock);
|
|
|
|
node = atomic_read_p((void**)elmp);
|
|
|
|
if (node == NULL) {
|
2016-04-16 15:36:11 +08:00
|
|
|
node = rtree_node_alloc(tsdn, rtree, ZU(1) <<
|
|
|
|
rtree->levels[level].bits);
|
2016-11-01 07:23:33 +08:00
|
|
|
if (node == NULL) {
|
|
|
|
malloc_mutex_unlock(tsdn, &rtree->init_lock);
|
2017-01-20 10:15:45 +08:00
|
|
|
return NULL;
|
2016-11-01 07:23:33 +08:00
|
|
|
}
|
2015-01-31 14:54:08 +08:00
|
|
|
atomic_write_p((void **)elmp, node);
|
|
|
|
}
|
2016-11-01 07:23:33 +08:00
|
|
|
malloc_mutex_unlock(tsdn, &rtree->init_lock);
|
2012-10-10 05:46:22 +08:00
|
|
|
|
2017-01-20 10:15:45 +08:00
|
|
|
return node;
|
2012-10-10 05:46:22 +08:00
|
|
|
}
|
|
|
|
|
2017-02-04 11:44:33 +08:00
|
|
|
static unsigned
|
|
|
|
rtree_start_level(const rtree_t *rtree, uintptr_t key) {
|
|
|
|
unsigned start_level;
|
|
|
|
|
|
|
|
if (unlikely(key == 0)) {
|
|
|
|
return rtree->height - 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
start_level = rtree->start_level[(lg_floor(key) + 1) >>
|
|
|
|
LG_RTREE_BITS_PER_LEVEL];
|
|
|
|
assert(start_level < rtree->height);
|
|
|
|
return start_level;
|
2012-10-10 05:46:22 +08:00
|
|
|
}
|
|
|
|
|
2017-02-04 11:44:33 +08:00
|
|
|
static bool
|
|
|
|
rtree_node_valid(rtree_elm_t *node) {
|
|
|
|
return ((uintptr_t)node != (uintptr_t)0);
|
|
|
|
}
|
|
|
|
|
|
|
|
static rtree_elm_t *
|
|
|
|
rtree_child_tryread(rtree_elm_t *elm, bool dependent) {
|
|
|
|
rtree_elm_t *child;
|
|
|
|
|
|
|
|
/* Double-checked read (first read may be stale). */
|
|
|
|
child = elm->child;
|
|
|
|
if (!dependent && !rtree_node_valid(child)) {
|
|
|
|
child = (rtree_elm_t *)atomic_read_p(&elm->pun);
|
|
|
|
}
|
|
|
|
assert(!dependent || child != NULL);
|
|
|
|
return child;
|
|
|
|
}
|
|
|
|
|
|
|
|
static rtree_elm_t *
|
|
|
|
rtree_child_read(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *elm, unsigned level,
|
|
|
|
bool dependent) {
|
|
|
|
rtree_elm_t *child;
|
|
|
|
|
|
|
|
child = rtree_child_tryread(elm, dependent);
|
|
|
|
if (!dependent && unlikely(!rtree_node_valid(child))) {
|
|
|
|
child = rtree_node_init(tsdn, rtree, level+1, &elm->child);
|
|
|
|
}
|
|
|
|
assert(!dependent || child != NULL);
|
|
|
|
return child;
|
|
|
|
}
|
|
|
|
|
|
|
|
static rtree_elm_t *
|
|
|
|
rtree_subtree_tryread(rtree_t *rtree, unsigned level, bool dependent) {
|
|
|
|
rtree_elm_t *subtree;
|
|
|
|
|
|
|
|
/* Double-checked read (first read may be stale). */
|
|
|
|
subtree = rtree->levels[level].subtree;
|
|
|
|
if (!dependent && unlikely(!rtree_node_valid(subtree))) {
|
|
|
|
subtree = (rtree_elm_t *)atomic_read_p(
|
|
|
|
&rtree->levels[level].subtree_pun);
|
|
|
|
}
|
|
|
|
assert(!dependent || subtree != NULL);
|
|
|
|
return subtree;
|
|
|
|
}
|
|
|
|
|
|
|
|
static rtree_elm_t *
|
|
|
|
rtree_subtree_read(tsdn_t *tsdn, rtree_t *rtree, unsigned level,
|
|
|
|
bool dependent) {
|
|
|
|
rtree_elm_t *subtree;
|
|
|
|
|
|
|
|
subtree = rtree_subtree_tryread(rtree, level, dependent);
|
|
|
|
if (!dependent && unlikely(!rtree_node_valid(subtree))) {
|
|
|
|
subtree = rtree_node_init(tsdn, rtree, level,
|
|
|
|
&rtree->levels[level].subtree);
|
|
|
|
}
|
|
|
|
assert(!dependent || subtree != NULL);
|
|
|
|
return subtree;
|
2012-10-10 05:46:22 +08:00
|
|
|
}
|
2016-04-18 03:55:10 +08:00
|
|
|
|
2017-02-04 12:12:49 +08:00
|
|
|
rtree_elm_t *
|
|
|
|
rtree_elm_lookup_hard(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
|
|
|
uintptr_t key, bool dependent, bool init_missing) {
|
|
|
|
unsigned start_level = rtree_start_level(rtree, key);
|
|
|
|
rtree_elm_t *node = init_missing ? rtree_subtree_read(tsdn, rtree,
|
|
|
|
start_level, dependent) : rtree_subtree_tryread(rtree, start_level,
|
|
|
|
dependent);
|
|
|
|
|
|
|
|
#define RTREE_GET_BIAS (RTREE_HEIGHT_MAX - rtree->height)
|
|
|
|
switch (start_level + RTREE_GET_BIAS) {
|
|
|
|
#define RTREE_GET_SUBTREE(level) \
|
|
|
|
case level: { \
|
|
|
|
assert(level < (RTREE_HEIGHT_MAX-1)); \
|
|
|
|
if (!dependent && unlikely(!rtree_node_valid(node))) { \
|
|
|
|
return NULL; \
|
|
|
|
} \
|
|
|
|
uintptr_t subkey = rtree_subkey(rtree, key, level - \
|
|
|
|
RTREE_GET_BIAS); \
|
|
|
|
node = init_missing ? rtree_child_read(tsdn, rtree, \
|
|
|
|
&node[subkey], level - RTREE_GET_BIAS, dependent) : \
|
|
|
|
rtree_child_tryread(&node[subkey], dependent); \
|
|
|
|
/* Fall through. */ \
|
|
|
|
}
|
|
|
|
#define RTREE_GET_LEAF(level) \
|
|
|
|
case level: { \
|
|
|
|
assert(level == (RTREE_HEIGHT_MAX-1)); \
|
|
|
|
if (!dependent && unlikely(!rtree_node_valid(node))) { \
|
|
|
|
return NULL; \
|
|
|
|
} \
|
|
|
|
/* \
|
|
|
|
* node is a leaf, so it contains values rather than \
|
|
|
|
* child pointers. \
|
|
|
|
*/ \
|
|
|
|
if (likely(key != 0)) { \
|
|
|
|
if (RTREE_CTX_NCACHE > 1) { \
|
|
|
|
memmove(&rtree_ctx->cache[1], \
|
|
|
|
&rtree_ctx->cache[0], \
|
|
|
|
sizeof(rtree_ctx_cache_elm_t) * \
|
|
|
|
(RTREE_CTX_NCACHE-1)); \
|
|
|
|
} \
|
|
|
|
uintptr_t leafkey = rtree_leafkey(rtree, key); \
|
|
|
|
rtree_ctx->cache[0].leafkey = leafkey; \
|
|
|
|
rtree_ctx->cache[0].leaf = node; \
|
|
|
|
} \
|
|
|
|
uintptr_t subkey = rtree_subkey(rtree, key, level - \
|
|
|
|
RTREE_GET_BIAS); \
|
|
|
|
return &node[subkey]; \
|
|
|
|
}
|
|
|
|
#if RTREE_HEIGHT_MAX > 1
|
|
|
|
RTREE_GET_SUBTREE(0)
|
|
|
|
#endif
|
|
|
|
#if RTREE_HEIGHT_MAX > 2
|
|
|
|
RTREE_GET_SUBTREE(1)
|
|
|
|
#endif
|
|
|
|
#if RTREE_HEIGHT_MAX > 3
|
|
|
|
RTREE_GET_SUBTREE(2)
|
|
|
|
#endif
|
|
|
|
#if RTREE_HEIGHT_MAX > 4
|
|
|
|
RTREE_GET_SUBTREE(3)
|
|
|
|
#endif
|
|
|
|
#if RTREE_HEIGHT_MAX > 5
|
|
|
|
RTREE_GET_SUBTREE(4)
|
|
|
|
#endif
|
|
|
|
#if RTREE_HEIGHT_MAX > 6
|
|
|
|
RTREE_GET_SUBTREE(5)
|
|
|
|
#endif
|
|
|
|
#if RTREE_HEIGHT_MAX > 7
|
|
|
|
RTREE_GET_SUBTREE(6)
|
|
|
|
#endif
|
|
|
|
#if RTREE_HEIGHT_MAX > 8
|
|
|
|
RTREE_GET_SUBTREE(7)
|
|
|
|
#endif
|
|
|
|
#if RTREE_HEIGHT_MAX > 9
|
|
|
|
RTREE_GET_SUBTREE(8)
|
|
|
|
#endif
|
|
|
|
#if RTREE_HEIGHT_MAX > 10
|
|
|
|
RTREE_GET_SUBTREE(9)
|
|
|
|
#endif
|
|
|
|
#if RTREE_HEIGHT_MAX > 11
|
|
|
|
RTREE_GET_SUBTREE(10)
|
|
|
|
#endif
|
|
|
|
#if RTREE_HEIGHT_MAX > 12
|
|
|
|
RTREE_GET_SUBTREE(11)
|
|
|
|
#endif
|
|
|
|
#if RTREE_HEIGHT_MAX > 13
|
|
|
|
RTREE_GET_SUBTREE(12)
|
|
|
|
#endif
|
|
|
|
#if RTREE_HEIGHT_MAX > 14
|
|
|
|
RTREE_GET_SUBTREE(13)
|
|
|
|
#endif
|
|
|
|
#if RTREE_HEIGHT_MAX > 15
|
|
|
|
RTREE_GET_SUBTREE(14)
|
|
|
|
#endif
|
|
|
|
#if RTREE_HEIGHT_MAX > 16
|
|
|
|
# error Unsupported RTREE_HEIGHT_MAX
|
|
|
|
#endif
|
|
|
|
RTREE_GET_LEAF(RTREE_HEIGHT_MAX-1)
|
|
|
|
#undef RTREE_GET_SUBTREE
|
|
|
|
#undef RTREE_GET_LEAF
|
|
|
|
default: not_reached();
|
|
|
|
}
|
|
|
|
#undef RTREE_GET_BIAS
|
|
|
|
not_reached();
|
|
|
|
}
|
|
|
|
|
2016-04-18 03:55:10 +08:00
|
|
|
static int
|
|
|
|
rtree_elm_witness_comp(const witness_t *a, void *oa, const witness_t *b,
|
2017-01-16 08:56:30 +08:00
|
|
|
void *ob) {
|
2016-04-18 03:55:10 +08:00
|
|
|
uintptr_t ka = (uintptr_t)oa;
|
|
|
|
uintptr_t kb = (uintptr_t)ob;
|
|
|
|
|
|
|
|
assert(ka != 0);
|
|
|
|
assert(kb != 0);
|
|
|
|
|
2017-01-20 10:15:45 +08:00
|
|
|
return (ka > kb) - (ka < kb);
|
2016-04-18 03:55:10 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
static witness_t *
|
2017-01-16 08:56:30 +08:00
|
|
|
rtree_elm_witness_alloc(tsd_t *tsd, uintptr_t key, const rtree_elm_t *elm) {
|
2016-04-18 03:55:10 +08:00
|
|
|
witness_t *witness;
|
|
|
|
size_t i;
|
|
|
|
rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd);
|
|
|
|
|
|
|
|
/* Iterate over entire array to detect double allocation attempts. */
|
|
|
|
witness = NULL;
|
|
|
|
for (i = 0; i < sizeof(rtree_elm_witness_tsd_t) / sizeof(witness_t);
|
|
|
|
i++) {
|
|
|
|
rtree_elm_witness_t *rew = &witnesses->witnesses[i];
|
|
|
|
|
|
|
|
assert(rew->elm != elm);
|
|
|
|
if (rew->elm == NULL && witness == NULL) {
|
|
|
|
rew->elm = elm;
|
|
|
|
witness = &rew->witness;
|
|
|
|
witness_init(witness, "rtree_elm",
|
|
|
|
WITNESS_RANK_RTREE_ELM, rtree_elm_witness_comp,
|
|
|
|
(void *)key);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
assert(witness != NULL);
|
2017-01-20 10:15:45 +08:00
|
|
|
return witness;
|
2016-04-18 03:55:10 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
static witness_t *
|
2017-01-16 08:56:30 +08:00
|
|
|
rtree_elm_witness_find(tsd_t *tsd, const rtree_elm_t *elm) {
|
2016-04-18 03:55:10 +08:00
|
|
|
size_t i;
|
|
|
|
rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd);
|
|
|
|
|
|
|
|
for (i = 0; i < sizeof(rtree_elm_witness_tsd_t) / sizeof(witness_t);
|
|
|
|
i++) {
|
|
|
|
rtree_elm_witness_t *rew = &witnesses->witnesses[i];
|
|
|
|
|
2017-01-16 08:56:30 +08:00
|
|
|
if (rew->elm == elm) {
|
2017-01-20 10:15:45 +08:00
|
|
|
return &rew->witness;
|
2017-01-16 08:56:30 +08:00
|
|
|
}
|
2016-04-18 03:55:10 +08:00
|
|
|
}
|
|
|
|
not_reached();
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
2017-01-16 08:56:30 +08:00
|
|
|
rtree_elm_witness_dalloc(tsd_t *tsd, witness_t *witness,
|
|
|
|
const rtree_elm_t *elm) {
|
2016-04-18 03:55:10 +08:00
|
|
|
size_t i;
|
|
|
|
rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd);
|
|
|
|
|
|
|
|
for (i = 0; i < sizeof(rtree_elm_witness_tsd_t) / sizeof(witness_t);
|
|
|
|
i++) {
|
|
|
|
rtree_elm_witness_t *rew = &witnesses->witnesses[i];
|
|
|
|
|
|
|
|
if (rew->elm == elm) {
|
|
|
|
rew->elm = NULL;
|
|
|
|
witness_init(&rew->witness, "rtree_elm",
|
|
|
|
WITNESS_RANK_RTREE_ELM, rtree_elm_witness_comp,
|
|
|
|
NULL);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
not_reached();
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
rtree_elm_witness_acquire(tsdn_t *tsdn, const rtree_t *rtree, uintptr_t key,
|
2017-01-16 08:56:30 +08:00
|
|
|
const rtree_elm_t *elm) {
|
2016-04-18 03:55:10 +08:00
|
|
|
witness_t *witness;
|
|
|
|
|
2017-01-16 08:56:30 +08:00
|
|
|
if (tsdn_null(tsdn)) {
|
2016-04-18 03:55:10 +08:00
|
|
|
return;
|
2017-01-16 08:56:30 +08:00
|
|
|
}
|
2016-04-18 03:55:10 +08:00
|
|
|
|
|
|
|
witness = rtree_elm_witness_alloc(tsdn_tsd(tsdn), key, elm);
|
|
|
|
witness_lock(tsdn, witness);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
rtree_elm_witness_access(tsdn_t *tsdn, const rtree_t *rtree,
|
2017-01-16 08:56:30 +08:00
|
|
|
const rtree_elm_t *elm) {
|
2016-04-18 03:55:10 +08:00
|
|
|
witness_t *witness;
|
|
|
|
|
2017-01-16 08:56:30 +08:00
|
|
|
if (tsdn_null(tsdn)) {
|
2016-04-18 03:55:10 +08:00
|
|
|
return;
|
2017-01-16 08:56:30 +08:00
|
|
|
}
|
2016-04-18 03:55:10 +08:00
|
|
|
|
|
|
|
witness = rtree_elm_witness_find(tsdn_tsd(tsdn), elm);
|
|
|
|
witness_assert_owner(tsdn, witness);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
rtree_elm_witness_release(tsdn_t *tsdn, const rtree_t *rtree,
|
2017-01-16 08:56:30 +08:00
|
|
|
const rtree_elm_t *elm) {
|
2016-04-18 03:55:10 +08:00
|
|
|
witness_t *witness;
|
|
|
|
|
2017-01-16 08:56:30 +08:00
|
|
|
if (tsdn_null(tsdn)) {
|
2016-04-18 03:55:10 +08:00
|
|
|
return;
|
2017-01-16 08:56:30 +08:00
|
|
|
}
|
2016-04-18 03:55:10 +08:00
|
|
|
|
|
|
|
witness = rtree_elm_witness_find(tsdn_tsd(tsdn), elm);
|
|
|
|
witness_unlock(tsdn, witness);
|
|
|
|
rtree_elm_witness_dalloc(tsdn_tsd(tsdn), witness, elm);
|
|
|
|
}
|