Split rtree_elm_t into rtree_{node,leaf}_elm_t.
This allows leaf elements to differ in size from internal node elements. In principle it would be more correct to use a different type for each level of the tree, but due to implementation details related to atomic operations, we use casts anyway, thus counteracting the value of additional type correctness. Furthermore, such a scheme would require function code generation (via cpp macros), as well as either unwieldy type names for leaves or type aliases, e.g. typedef struct rtree_elm_d2_s rtree_leaf_elm_t; This alternate strategy would be more correct, and with less code duplication, but probably not worth the complexity.
This commit is contained in:
parent
f50d6009fe
commit
944c8a3383
@ -413,17 +413,19 @@ psz2ind
|
|||||||
psz2u
|
psz2u
|
||||||
rtree_clear
|
rtree_clear
|
||||||
rtree_delete
|
rtree_delete
|
||||||
rtree_elm_acquire
|
rtree_leaf_alloc
|
||||||
rtree_elm_lookup
|
rtree_leaf_dalloc
|
||||||
rtree_elm_lookup_hard
|
rtree_leaf_elm_acquire
|
||||||
rtree_elm_read
|
rtree_leaf_elm_lookup
|
||||||
rtree_elm_read_acquired
|
rtree_leaf_elm_lookup_hard
|
||||||
rtree_elm_release
|
rtree_leaf_elm_read
|
||||||
rtree_elm_witness_access
|
rtree_leaf_elm_read_acquired
|
||||||
rtree_elm_witness_acquire
|
rtree_leaf_elm_release
|
||||||
rtree_elm_witness_release
|
rtree_leaf_elm_witness_access
|
||||||
rtree_elm_write
|
rtree_leaf_elm_witness_acquire
|
||||||
rtree_elm_write_acquired
|
rtree_leaf_elm_witness_release
|
||||||
|
rtree_leaf_elm_write
|
||||||
|
rtree_leaf_elm_write_acquired
|
||||||
rtree_leafkey
|
rtree_leafkey
|
||||||
rtree_new
|
rtree_new
|
||||||
rtree_node_alloc
|
rtree_node_alloc
|
||||||
@ -513,7 +515,7 @@ tsd_prof_tdata_get
|
|||||||
tsd_prof_tdata_set
|
tsd_prof_tdata_set
|
||||||
tsd_prof_tdatap_get
|
tsd_prof_tdatap_get
|
||||||
tsd_rtree_ctxp_get
|
tsd_rtree_ctxp_get
|
||||||
tsd_rtree_elm_witnessesp_get
|
tsd_rtree_leaf_elm_witnessesp_get
|
||||||
tsd_set
|
tsd_set
|
||||||
tsd_tcache_enabled_get
|
tsd_tcache_enabled_get
|
||||||
tsd_tcache_enabled_set
|
tsd_tcache_enabled_set
|
||||||
|
@ -8,36 +8,40 @@
|
|||||||
* level.
|
* level.
|
||||||
*/
|
*/
|
||||||
static const rtree_level_t rtree_levels[] = {
|
static const rtree_level_t rtree_levels[] = {
|
||||||
#if RTREE_NSB <= 10
|
#if RTREE_HEIGHT == 1
|
||||||
{RTREE_NSB, RTREE_NHIB + RTREE_NSB}
|
{RTREE_NSB, RTREE_NHIB + RTREE_NSB}
|
||||||
#elif RTREE_NSB <= 36
|
#elif RTREE_HEIGHT == 2
|
||||||
{RTREE_NSB/2, RTREE_NHIB + RTREE_NSB/2},
|
{RTREE_NSB/2, RTREE_NHIB + RTREE_NSB/2},
|
||||||
{RTREE_NSB/2 + RTREE_NSB%2, RTREE_NHIB + RTREE_NSB}
|
{RTREE_NSB/2 + RTREE_NSB%2, RTREE_NHIB + RTREE_NSB}
|
||||||
#elif RTREE_NSB <= 52
|
#elif RTREE_HEIGHT == 3
|
||||||
{RTREE_NSB/3, RTREE_NHIB + RTREE_NSB/3},
|
{RTREE_NSB/3, RTREE_NHIB + RTREE_NSB/3},
|
||||||
{RTREE_NSB/3 + RTREE_NSB%3/2,
|
{RTREE_NSB/3 + RTREE_NSB%3/2,
|
||||||
RTREE_NHIB + RTREE_NSB/3*2 + RTREE_NSB%3/2},
|
RTREE_NHIB + RTREE_NSB/3*2 + RTREE_NSB%3/2},
|
||||||
{RTREE_NSB/3 + RTREE_NSB%3 - RTREE_NSB%3/2, RTREE_NHIB + RTREE_NSB}
|
{RTREE_NSB/3 + RTREE_NSB%3 - RTREE_NSB%3/2, RTREE_NHIB + RTREE_NSB}
|
||||||
#else
|
#else
|
||||||
# error Unsupported number of significant virtual address bits
|
# error Unsupported rtree height
|
||||||
#endif
|
#endif
|
||||||
};
|
};
|
||||||
|
|
||||||
bool rtree_new(rtree_t *rtree);
|
bool rtree_new(rtree_t *rtree);
|
||||||
#ifdef JEMALLOC_JET
|
#ifdef JEMALLOC_JET
|
||||||
typedef rtree_elm_t *(rtree_node_alloc_t)(tsdn_t *, rtree_t *, size_t);
|
typedef rtree_node_elm_t *(rtree_node_alloc_t)(tsdn_t *, rtree_t *, size_t);
|
||||||
extern rtree_node_alloc_t *rtree_node_alloc;
|
extern rtree_node_alloc_t *rtree_node_alloc;
|
||||||
typedef void (rtree_node_dalloc_t)(tsdn_t *, rtree_t *, rtree_elm_t *);
|
typedef rtree_leaf_elm_t *(rtree_leaf_alloc_t)(tsdn_t *, rtree_t *, size_t);
|
||||||
|
extern rtree_leaf_alloc_t *rtree_leaf_alloc;
|
||||||
|
typedef void (rtree_node_dalloc_t)(tsdn_t *, rtree_t *, rtree_node_elm_t *);
|
||||||
extern rtree_node_dalloc_t *rtree_node_dalloc;
|
extern rtree_node_dalloc_t *rtree_node_dalloc;
|
||||||
|
typedef void (rtree_leaf_dalloc_t)(tsdn_t *, rtree_t *, rtree_leaf_elm_t *);
|
||||||
|
extern rtree_leaf_dalloc_t *rtree_leaf_dalloc;
|
||||||
void rtree_delete(tsdn_t *tsdn, rtree_t *rtree);
|
void rtree_delete(tsdn_t *tsdn, rtree_t *rtree);
|
||||||
#endif
|
#endif
|
||||||
rtree_elm_t *rtree_elm_lookup_hard(tsdn_t *tsdn, rtree_t *rtree,
|
rtree_leaf_elm_t *rtree_leaf_elm_lookup_hard(tsdn_t *tsdn, rtree_t *rtree,
|
||||||
rtree_ctx_t *rtree_ctx, uintptr_t key, bool dependent, bool init_missing);
|
rtree_ctx_t *rtree_ctx, uintptr_t key, bool dependent, bool init_missing);
|
||||||
void rtree_elm_witness_acquire(tsdn_t *tsdn, const rtree_t *rtree,
|
void rtree_leaf_elm_witness_acquire(tsdn_t *tsdn, const rtree_t *rtree,
|
||||||
uintptr_t key, const rtree_elm_t *elm);
|
uintptr_t key, const rtree_leaf_elm_t *elm);
|
||||||
void rtree_elm_witness_access(tsdn_t *tsdn, const rtree_t *rtree,
|
void rtree_leaf_elm_witness_access(tsdn_t *tsdn, const rtree_t *rtree,
|
||||||
const rtree_elm_t *elm);
|
const rtree_leaf_elm_t *elm);
|
||||||
void rtree_elm_witness_release(tsdn_t *tsdn, const rtree_t *rtree,
|
void rtree_leaf_elm_witness_release(tsdn_t *tsdn, const rtree_t *rtree,
|
||||||
const rtree_elm_t *elm);
|
const rtree_leaf_elm_t *elm);
|
||||||
|
|
||||||
#endif /* JEMALLOC_INTERNAL_RTREE_EXTERNS_H */
|
#endif /* JEMALLOC_INTERNAL_RTREE_EXTERNS_H */
|
||||||
|
@ -4,21 +4,22 @@
|
|||||||
#ifndef JEMALLOC_ENABLE_INLINE
|
#ifndef JEMALLOC_ENABLE_INLINE
|
||||||
uintptr_t rtree_leafkey(uintptr_t key);
|
uintptr_t rtree_leafkey(uintptr_t key);
|
||||||
uintptr_t rtree_subkey(uintptr_t key, unsigned level);
|
uintptr_t rtree_subkey(uintptr_t key, unsigned level);
|
||||||
extent_t *rtree_elm_read(rtree_elm_t *elm, bool dependent);
|
extent_t *rtree_leaf_elm_read(rtree_leaf_elm_t *elm, bool dependent);
|
||||||
void rtree_elm_write(rtree_elm_t *elm, const extent_t *extent);
|
void rtree_leaf_elm_write(rtree_leaf_elm_t *elm, const extent_t *extent);
|
||||||
rtree_elm_t *rtree_elm_lookup(tsdn_t *tsdn, rtree_t *rtree,
|
rtree_leaf_elm_t *rtree_leaf_elm_lookup(tsdn_t *tsdn, rtree_t *rtree,
|
||||||
rtree_ctx_t *rtree_ctx, uintptr_t key, bool dependent, bool init_missing);
|
rtree_ctx_t *rtree_ctx, uintptr_t key, bool dependent, bool init_missing);
|
||||||
bool rtree_write(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
bool rtree_write(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
||||||
uintptr_t key, const extent_t *extent);
|
uintptr_t key, const extent_t *extent);
|
||||||
extent_t *rtree_read(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
extent_t *rtree_read(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
||||||
uintptr_t key, bool dependent);
|
uintptr_t key, bool dependent);
|
||||||
rtree_elm_t *rtree_elm_acquire(tsdn_t *tsdn, rtree_t *rtree,
|
rtree_leaf_elm_t *rtree_leaf_elm_acquire(tsdn_t *tsdn, rtree_t *rtree,
|
||||||
rtree_ctx_t *rtree_ctx, uintptr_t key, bool dependent, bool init_missing);
|
rtree_ctx_t *rtree_ctx, uintptr_t key, bool dependent, bool init_missing);
|
||||||
extent_t *rtree_elm_read_acquired(tsdn_t *tsdn, const rtree_t *rtree,
|
extent_t *rtree_leaf_elm_read_acquired(tsdn_t *tsdn, const rtree_t *rtree,
|
||||||
rtree_elm_t *elm);
|
rtree_leaf_elm_t *elm);
|
||||||
void rtree_elm_write_acquired(tsdn_t *tsdn, const rtree_t *rtree,
|
void rtree_leaf_elm_write_acquired(tsdn_t *tsdn, const rtree_t *rtree,
|
||||||
rtree_elm_t *elm, const extent_t *extent);
|
rtree_leaf_elm_t *elm, const extent_t *extent);
|
||||||
void rtree_elm_release(tsdn_t *tsdn, const rtree_t *rtree, rtree_elm_t *elm);
|
void rtree_leaf_elm_release(tsdn_t *tsdn, const rtree_t *rtree,
|
||||||
|
rtree_leaf_elm_t *elm);
|
||||||
void rtree_clear(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
void rtree_clear(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
||||||
uintptr_t key);
|
uintptr_t key);
|
||||||
#endif
|
#endif
|
||||||
@ -45,7 +46,7 @@ rtree_subkey(uintptr_t key, unsigned level) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE extent_t *
|
JEMALLOC_ALWAYS_INLINE extent_t *
|
||||||
rtree_elm_read(rtree_elm_t *elm, bool dependent) {
|
rtree_leaf_elm_read(rtree_leaf_elm_t *elm, bool dependent) {
|
||||||
extent_t *extent;
|
extent_t *extent;
|
||||||
|
|
||||||
if (dependent) {
|
if (dependent) {
|
||||||
@ -55,7 +56,7 @@ rtree_elm_read(rtree_elm_t *elm, bool dependent) {
|
|||||||
* synchronization, because the rtree update became visible in
|
* synchronization, because the rtree update became visible in
|
||||||
* memory before the pointer came into existence.
|
* memory before the pointer came into existence.
|
||||||
*/
|
*/
|
||||||
extent = (extent_t *)atomic_load_p(&elm->child_or_extent,
|
extent = (extent_t *)atomic_load_p(&elm->extent,
|
||||||
ATOMIC_RELAXED);
|
ATOMIC_RELAXED);
|
||||||
} else {
|
} else {
|
||||||
/*
|
/*
|
||||||
@ -63,7 +64,7 @@ rtree_elm_read(rtree_elm_t *elm, bool dependent) {
|
|||||||
* dependent on a previous rtree write, which means a stale read
|
* dependent on a previous rtree write, which means a stale read
|
||||||
* could result if synchronization were omitted here.
|
* could result if synchronization were omitted here.
|
||||||
*/
|
*/
|
||||||
extent = (extent_t *)atomic_load_p(&elm->child_or_extent,
|
extent = (extent_t *)atomic_load_p(&elm->extent,
|
||||||
ATOMIC_ACQUIRE);
|
ATOMIC_ACQUIRE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -74,12 +75,12 @@ rtree_elm_read(rtree_elm_t *elm, bool dependent) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
rtree_elm_write(rtree_elm_t *elm, const extent_t *extent) {
|
rtree_leaf_elm_write(rtree_leaf_elm_t *elm, const extent_t *extent) {
|
||||||
atomic_store_p(&elm->child_or_extent, (void *)extent, ATOMIC_RELEASE);
|
atomic_store_p(&elm->extent, (void *)extent, ATOMIC_RELEASE);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE rtree_elm_t *
|
JEMALLOC_ALWAYS_INLINE rtree_leaf_elm_t *
|
||||||
rtree_elm_lookup(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
rtree_leaf_elm_lookup(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
||||||
uintptr_t key, bool dependent, bool init_missing) {
|
uintptr_t key, bool dependent, bool init_missing) {
|
||||||
assert(key != 0);
|
assert(key != 0);
|
||||||
assert(!dependent || !init_missing);
|
assert(!dependent || !init_missing);
|
||||||
@ -87,7 +88,7 @@ rtree_elm_lookup(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
|||||||
uintptr_t leafkey = rtree_leafkey(key);
|
uintptr_t leafkey = rtree_leafkey(key);
|
||||||
#define RTREE_CACHE_CHECK(i) do { \
|
#define RTREE_CACHE_CHECK(i) do { \
|
||||||
if (likely(rtree_ctx->cache[i].leafkey == leafkey)) { \
|
if (likely(rtree_ctx->cache[i].leafkey == leafkey)) { \
|
||||||
rtree_elm_t *leaf = rtree_ctx->cache[i].leaf; \
|
rtree_leaf_elm_t *leaf = rtree_ctx->cache[i].leaf; \
|
||||||
if (likely(leaf != NULL)) { \
|
if (likely(leaf != NULL)) { \
|
||||||
/* Reorder. */ \
|
/* Reorder. */ \
|
||||||
memmove(&rtree_ctx->cache[1], \
|
memmove(&rtree_ctx->cache[1], \
|
||||||
@ -117,24 +118,24 @@ rtree_elm_lookup(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
|||||||
}
|
}
|
||||||
#undef RTREE_CACHE_CHECK
|
#undef RTREE_CACHE_CHECK
|
||||||
|
|
||||||
return rtree_elm_lookup_hard(tsdn, rtree, rtree_ctx, key, dependent,
|
return rtree_leaf_elm_lookup_hard(tsdn, rtree, rtree_ctx, key,
|
||||||
init_missing);
|
dependent, init_missing);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
rtree_write(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx, uintptr_t key,
|
rtree_write(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx, uintptr_t key,
|
||||||
const extent_t *extent) {
|
const extent_t *extent) {
|
||||||
rtree_elm_t *elm;
|
rtree_leaf_elm_t *elm;
|
||||||
|
|
||||||
assert(extent != NULL); /* Use rtree_clear() for this case. */
|
assert(extent != NULL); /* Use rtree_clear() for this case. */
|
||||||
assert(((uintptr_t)extent & (uintptr_t)0x1) == (uintptr_t)0x0);
|
assert(((uintptr_t)extent & (uintptr_t)0x1) == (uintptr_t)0x0);
|
||||||
|
|
||||||
elm = rtree_elm_lookup(tsdn, rtree, rtree_ctx, key, false, true);
|
elm = rtree_leaf_elm_lookup(tsdn, rtree, rtree_ctx, key, false, true);
|
||||||
if (elm == NULL) {
|
if (elm == NULL) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
assert(rtree_elm_read(elm, false) == NULL);
|
assert(rtree_leaf_elm_read(elm, false) == NULL);
|
||||||
rtree_elm_write(elm, extent);
|
rtree_leaf_elm_write(elm, extent);
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -142,21 +143,22 @@ rtree_write(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx, uintptr_t key,
|
|||||||
JEMALLOC_ALWAYS_INLINE extent_t *
|
JEMALLOC_ALWAYS_INLINE extent_t *
|
||||||
rtree_read(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx, uintptr_t key,
|
rtree_read(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx, uintptr_t key,
|
||||||
bool dependent) {
|
bool dependent) {
|
||||||
rtree_elm_t *elm;
|
rtree_leaf_elm_t *elm;
|
||||||
|
|
||||||
elm = rtree_elm_lookup(tsdn, rtree, rtree_ctx, key, dependent, false);
|
elm = rtree_leaf_elm_lookup(tsdn, rtree, rtree_ctx, key, dependent,
|
||||||
|
false);
|
||||||
if (!dependent && elm == NULL) {
|
if (!dependent && elm == NULL) {
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
return rtree_elm_read(elm, dependent);
|
return rtree_leaf_elm_read(elm, dependent);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE rtree_elm_t *
|
JEMALLOC_INLINE rtree_leaf_elm_t *
|
||||||
rtree_elm_acquire(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
rtree_leaf_elm_acquire(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
||||||
uintptr_t key, bool dependent, bool init_missing) {
|
uintptr_t key, bool dependent, bool init_missing) {
|
||||||
rtree_elm_t *elm = rtree_elm_lookup(tsdn, rtree, rtree_ctx, key,
|
rtree_leaf_elm_t *elm = rtree_leaf_elm_lookup(tsdn, rtree, rtree_ctx,
|
||||||
dependent, init_missing);
|
key, dependent, init_missing);
|
||||||
if (!dependent && elm == NULL) {
|
if (!dependent && elm == NULL) {
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
@ -164,14 +166,14 @@ rtree_elm_acquire(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
|||||||
spin_t spinner = SPIN_INITIALIZER;
|
spin_t spinner = SPIN_INITIALIZER;
|
||||||
while (true) {
|
while (true) {
|
||||||
/* The least significant bit serves as a lock. */
|
/* The least significant bit serves as a lock. */
|
||||||
void *extent_and_lock = atomic_load_p(&elm->child_or_extent,
|
void *extent_and_lock = atomic_load_p(&elm->extent,
|
||||||
ATOMIC_RELAXED);
|
ATOMIC_RELAXED);
|
||||||
if (likely(((uintptr_t)extent_and_lock & (uintptr_t)0x1) == 0))
|
if (likely(((uintptr_t)extent_and_lock & (uintptr_t)0x1) == 0))
|
||||||
{
|
{
|
||||||
void *locked = (void *)((uintptr_t)extent_and_lock
|
void *locked = (void *)((uintptr_t)extent_and_lock
|
||||||
| (uintptr_t)0x1);
|
| (uintptr_t)0x1);
|
||||||
if (likely(atomic_compare_exchange_strong_p(
|
if (likely(atomic_compare_exchange_strong_p(
|
||||||
&elm->child_or_extent, &extent_and_lock, locked,
|
&elm->extent, &extent_and_lock, locked,
|
||||||
ATOMIC_ACQUIRE, ATOMIC_RELAXED))) {
|
ATOMIC_ACQUIRE, ATOMIC_RELAXED))) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -180,58 +182,61 @@ rtree_elm_acquire(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (config_debug) {
|
if (config_debug) {
|
||||||
rtree_elm_witness_acquire(tsdn, rtree, key, elm);
|
rtree_leaf_elm_witness_acquire(tsdn, rtree, key, elm);
|
||||||
}
|
}
|
||||||
|
|
||||||
return elm;
|
return elm;
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE extent_t *
|
JEMALLOC_INLINE extent_t *
|
||||||
rtree_elm_read_acquired(tsdn_t *tsdn, const rtree_t *rtree, rtree_elm_t *elm) {
|
rtree_leaf_elm_read_acquired(tsdn_t *tsdn, const rtree_t *rtree,
|
||||||
|
rtree_leaf_elm_t *elm) {
|
||||||
extent_t *extent;
|
extent_t *extent;
|
||||||
void *ptr = atomic_load_p(&elm->child_or_extent, ATOMIC_RELAXED);
|
void *ptr = atomic_load_p(&elm->extent, ATOMIC_RELAXED);
|
||||||
assert(((uintptr_t)ptr & (uintptr_t)0x1) == (uintptr_t)0x1);
|
assert(((uintptr_t)ptr & (uintptr_t)0x1) == (uintptr_t)0x1);
|
||||||
extent = (extent_t *)((uintptr_t)ptr & ~((uintptr_t)0x1));
|
extent = (extent_t *)((uintptr_t)ptr & ~((uintptr_t)0x1));
|
||||||
assert(((uintptr_t)extent & (uintptr_t)0x1) == (uintptr_t)0x0);
|
assert(((uintptr_t)extent & (uintptr_t)0x1) == (uintptr_t)0x0);
|
||||||
|
|
||||||
if (config_debug) {
|
if (config_debug) {
|
||||||
rtree_elm_witness_access(tsdn, rtree, elm);
|
rtree_leaf_elm_witness_access(tsdn, rtree, elm);
|
||||||
}
|
}
|
||||||
|
|
||||||
return extent;
|
return extent;
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
rtree_elm_write_acquired(tsdn_t *tsdn, const rtree_t *rtree, rtree_elm_t *elm,
|
rtree_leaf_elm_write_acquired(tsdn_t *tsdn, const rtree_t *rtree,
|
||||||
const extent_t *extent) {
|
rtree_leaf_elm_t *elm, const extent_t *extent) {
|
||||||
assert(((uintptr_t)extent & (uintptr_t)0x1) == (uintptr_t)0x0);
|
assert(((uintptr_t)extent & (uintptr_t)0x1) == (uintptr_t)0x0);
|
||||||
assert(((uintptr_t)atomic_load_p(&elm->child_or_extent, ATOMIC_RELAXED)
|
assert(((uintptr_t)atomic_load_p(&elm->extent, ATOMIC_RELAXED)
|
||||||
& (uintptr_t)0x1) == (uintptr_t)0x1);
|
& (uintptr_t)0x1) == (uintptr_t)0x1);
|
||||||
|
|
||||||
if (config_debug) {
|
if (config_debug) {
|
||||||
rtree_elm_witness_access(tsdn, rtree, elm);
|
rtree_leaf_elm_witness_access(tsdn, rtree, elm);
|
||||||
}
|
}
|
||||||
atomic_store_p(&elm->child_or_extent, (void *)((uintptr_t)extent
|
atomic_store_p(&elm->extent, (void *)((uintptr_t)extent |
|
||||||
| (uintptr_t)0x1), ATOMIC_RELEASE);
|
(uintptr_t)0x1), ATOMIC_RELEASE);
|
||||||
assert(rtree_elm_read_acquired(tsdn, rtree, elm) == extent);
|
assert(rtree_leaf_elm_read_acquired(tsdn, rtree, elm) == extent);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
rtree_elm_release(tsdn_t *tsdn, const rtree_t *rtree, rtree_elm_t *elm) {
|
rtree_leaf_elm_release(tsdn_t *tsdn, const rtree_t *rtree,
|
||||||
rtree_elm_write(elm, rtree_elm_read_acquired(tsdn, rtree, elm));
|
rtree_leaf_elm_t *elm) {
|
||||||
|
rtree_leaf_elm_write(elm, rtree_leaf_elm_read_acquired(tsdn, rtree,
|
||||||
|
elm));
|
||||||
if (config_debug) {
|
if (config_debug) {
|
||||||
rtree_elm_witness_release(tsdn, rtree, elm);
|
rtree_leaf_elm_witness_release(tsdn, rtree, elm);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
rtree_clear(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
rtree_clear(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
||||||
uintptr_t key) {
|
uintptr_t key) {
|
||||||
rtree_elm_t *elm;
|
rtree_leaf_elm_t *elm;
|
||||||
|
|
||||||
elm = rtree_elm_acquire(tsdn, rtree, rtree_ctx, key, true, false);
|
elm = rtree_leaf_elm_acquire(tsdn, rtree, rtree_ctx, key, true, false);
|
||||||
rtree_elm_write_acquired(tsdn, rtree, elm, NULL);
|
rtree_leaf_elm_write_acquired(tsdn, rtree, elm, NULL);
|
||||||
rtree_elm_release(tsdn, rtree, elm);
|
rtree_leaf_elm_release(tsdn, rtree, elm);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -1,18 +1,21 @@
|
|||||||
#ifndef JEMALLOC_INTERNAL_RTREE_STRUCTS_H
|
#ifndef JEMALLOC_INTERNAL_RTREE_STRUCTS_H
|
||||||
#define JEMALLOC_INTERNAL_RTREE_STRUCTS_H
|
#define JEMALLOC_INTERNAL_RTREE_STRUCTS_H
|
||||||
|
|
||||||
struct rtree_elm_s {
|
struct rtree_node_elm_s {
|
||||||
/* Either "rtree_elm_t *child;" or "extent_t *extent;". */
|
atomic_p_t child;
|
||||||
atomic_p_t child_or_extent;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
struct rtree_elm_witness_s {
|
struct rtree_leaf_elm_s {
|
||||||
const rtree_elm_t *elm;
|
atomic_p_t extent;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct rtree_leaf_elm_witness_s {
|
||||||
|
const rtree_leaf_elm_t *elm;
|
||||||
witness_t witness;
|
witness_t witness;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct rtree_elm_witness_tsd_s {
|
struct rtree_leaf_elm_witness_tsd_s {
|
||||||
rtree_elm_witness_t witnesses[RTREE_ELM_ACQUIRE_MAX];
|
rtree_leaf_elm_witness_t witnesses[RTREE_ELM_ACQUIRE_MAX];
|
||||||
};
|
};
|
||||||
|
|
||||||
struct rtree_level_s {
|
struct rtree_level_s {
|
||||||
@ -27,7 +30,7 @@ struct rtree_level_s {
|
|||||||
|
|
||||||
struct rtree_ctx_cache_elm_s {
|
struct rtree_ctx_cache_elm_s {
|
||||||
uintptr_t leafkey;
|
uintptr_t leafkey;
|
||||||
rtree_elm_t *leaf;
|
rtree_leaf_elm_t *leaf;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct rtree_ctx_s {
|
struct rtree_ctx_s {
|
||||||
@ -38,7 +41,7 @@ struct rtree_ctx_s {
|
|||||||
};
|
};
|
||||||
|
|
||||||
struct rtree_s {
|
struct rtree_s {
|
||||||
/* An rtree_elm_t *. */
|
/* An rtree_{internal,leaf}_elm_t *. */
|
||||||
atomic_p_t root;
|
atomic_p_t root;
|
||||||
malloc_mutex_t init_lock;
|
malloc_mutex_t init_lock;
|
||||||
};
|
};
|
||||||
|
@ -8,9 +8,10 @@
|
|||||||
*******************************************************************************
|
*******************************************************************************
|
||||||
*/
|
*/
|
||||||
|
|
||||||
typedef struct rtree_elm_s rtree_elm_t;
|
typedef struct rtree_node_elm_s rtree_node_elm_t;
|
||||||
typedef struct rtree_elm_witness_s rtree_elm_witness_t;
|
typedef struct rtree_leaf_elm_s rtree_leaf_elm_t;
|
||||||
typedef struct rtree_elm_witness_tsd_s rtree_elm_witness_tsd_t;
|
typedef struct rtree_leaf_elm_witness_s rtree_leaf_elm_witness_t;
|
||||||
|
typedef struct rtree_leaf_elm_witness_tsd_s rtree_leaf_elm_witness_tsd_t;
|
||||||
typedef struct rtree_level_s rtree_level_t;
|
typedef struct rtree_level_s rtree_level_t;
|
||||||
typedef struct rtree_ctx_cache_elm_s rtree_ctx_cache_elm_t;
|
typedef struct rtree_ctx_cache_elm_s rtree_ctx_cache_elm_t;
|
||||||
typedef struct rtree_ctx_s rtree_ctx_t;
|
typedef struct rtree_ctx_s rtree_ctx_t;
|
||||||
@ -23,7 +24,15 @@ typedef struct rtree_s rtree_t;
|
|||||||
/* Number of significant bits. */
|
/* Number of significant bits. */
|
||||||
#define RTREE_NSB (LG_VADDR - RTREE_NLIB)
|
#define RTREE_NSB (LG_VADDR - RTREE_NLIB)
|
||||||
/* Number of levels in radix tree. */
|
/* Number of levels in radix tree. */
|
||||||
#define RTREE_HEIGHT (sizeof(rtree_levels)/sizeof(rtree_level_t))
|
#if RTREE_NSB <= 10
|
||||||
|
# define RTREE_HEIGHT 1
|
||||||
|
#elif RTREE_NSB <= 36
|
||||||
|
# define RTREE_HEIGHT 2
|
||||||
|
#elif RTREE_NSB <= 52
|
||||||
|
# define RTREE_HEIGHT 3
|
||||||
|
#else
|
||||||
|
# error Unsupported number of significant virtual address bits
|
||||||
|
#endif
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Number of leafkey/leaf pairs to cache. Each entry supports an entire leaf,
|
* Number of leafkey/leaf pairs to cache. Each entry supports an entire leaf,
|
||||||
@ -47,16 +56,16 @@ typedef struct rtree_s rtree_t;
|
|||||||
|
|
||||||
/*
|
/*
|
||||||
* Maximum number of concurrently acquired elements per thread. This controls
|
* Maximum number of concurrently acquired elements per thread. This controls
|
||||||
* how many witness_t structures are embedded in tsd. Ideally rtree_elm_t would
|
* how many witness_t structures are embedded in tsd. Ideally rtree_leaf_elm_t
|
||||||
* have a witness_t directly embedded, but that would dramatically bloat the
|
* would have a witness_t directly embedded, but that would dramatically bloat
|
||||||
* tree. This must contain enough entries to e.g. coalesce two extents.
|
* the tree. This must contain enough entries to e.g. coalesce two extents.
|
||||||
*/
|
*/
|
||||||
#define RTREE_ELM_ACQUIRE_MAX 4
|
#define RTREE_ELM_ACQUIRE_MAX 4
|
||||||
|
|
||||||
/* Initializers for rtree_elm_witness_tsd_t. */
|
/* Initializers for rtree_leaf_elm_witness_tsd_t. */
|
||||||
#define RTREE_ELM_WITNESS_INITIALIZER { \
|
#define RTREE_ELM_WITNESS_INITIALIZER { \
|
||||||
NULL, \
|
NULL, \
|
||||||
WITNESS_INITIALIZER("rtree_elm", WITNESS_RANK_RTREE_ELM) \
|
WITNESS_INITIALIZER("rtree_leaf_elm", WITNESS_RANK_RTREE_ELM) \
|
||||||
}
|
}
|
||||||
|
|
||||||
#define RTREE_ELM_WITNESS_TSD_INITIALIZER { \
|
#define RTREE_ELM_WITNESS_TSD_INITIALIZER { \
|
||||||
|
@ -29,7 +29,7 @@ struct tsd_init_head_s {
|
|||||||
yes, no) \
|
yes, no) \
|
||||||
O(rtree_ctx, rtree_ctx_t, no, no) \
|
O(rtree_ctx, rtree_ctx_t, no, no) \
|
||||||
O(witnesses, witness_list_t, no, yes) \
|
O(witnesses, witness_list_t, no, yes) \
|
||||||
O(rtree_elm_witnesses, rtree_elm_witness_tsd_t, \
|
O(rtree_leaf_elm_witnesses, rtree_leaf_elm_witness_tsd_t, \
|
||||||
no, no) \
|
no, no) \
|
||||||
O(witness_fork, bool, yes, no) \
|
O(witness_fork, bool, yes, no) \
|
||||||
|
|
||||||
|
74
src/extent.c
74
src/extent.c
@ -450,8 +450,8 @@ extent_activate_locked(tsdn_t *tsdn, arena_t *arena, extents_t *extents,
|
|||||||
static bool
|
static bool
|
||||||
extent_rtree_acquire(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx,
|
extent_rtree_acquire(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx,
|
||||||
const extent_t *extent, bool dependent, bool init_missing,
|
const extent_t *extent, bool dependent, bool init_missing,
|
||||||
rtree_elm_t **r_elm_a, rtree_elm_t **r_elm_b) {
|
rtree_leaf_elm_t **r_elm_a, rtree_leaf_elm_t **r_elm_b) {
|
||||||
*r_elm_a = rtree_elm_acquire(tsdn, &extents_rtree, rtree_ctx,
|
*r_elm_a = rtree_leaf_elm_acquire(tsdn, &extents_rtree, rtree_ctx,
|
||||||
(uintptr_t)extent_base_get(extent), dependent, init_missing);
|
(uintptr_t)extent_base_get(extent), dependent, init_missing);
|
||||||
if (!dependent && *r_elm_a == NULL) {
|
if (!dependent && *r_elm_a == NULL) {
|
||||||
return true;
|
return true;
|
||||||
@ -459,11 +459,11 @@ extent_rtree_acquire(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx,
|
|||||||
assert(*r_elm_a != NULL);
|
assert(*r_elm_a != NULL);
|
||||||
|
|
||||||
if (extent_size_get(extent) > PAGE) {
|
if (extent_size_get(extent) > PAGE) {
|
||||||
*r_elm_b = rtree_elm_acquire(tsdn, &extents_rtree, rtree_ctx,
|
*r_elm_b = rtree_leaf_elm_acquire(tsdn, &extents_rtree,
|
||||||
(uintptr_t)extent_last_get(extent), dependent,
|
rtree_ctx, (uintptr_t)extent_last_get(extent), dependent,
|
||||||
init_missing);
|
init_missing);
|
||||||
if (!dependent && *r_elm_b == NULL) {
|
if (!dependent && *r_elm_b == NULL) {
|
||||||
rtree_elm_release(tsdn, &extents_rtree, *r_elm_a);
|
rtree_leaf_elm_release(tsdn, &extents_rtree, *r_elm_a);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
assert(*r_elm_b != NULL);
|
assert(*r_elm_b != NULL);
|
||||||
@ -475,19 +475,21 @@ extent_rtree_acquire(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx,
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
extent_rtree_write_acquired(tsdn_t *tsdn, rtree_elm_t *elm_a,
|
extent_rtree_write_acquired(tsdn_t *tsdn, rtree_leaf_elm_t *elm_a,
|
||||||
rtree_elm_t *elm_b, const extent_t *extent) {
|
rtree_leaf_elm_t *elm_b, const extent_t *extent) {
|
||||||
rtree_elm_write_acquired(tsdn, &extents_rtree, elm_a, extent);
|
rtree_leaf_elm_write_acquired(tsdn, &extents_rtree, elm_a, extent);
|
||||||
if (elm_b != NULL) {
|
if (elm_b != NULL) {
|
||||||
rtree_elm_write_acquired(tsdn, &extents_rtree, elm_b, extent);
|
rtree_leaf_elm_write_acquired(tsdn, &extents_rtree, elm_b,
|
||||||
|
extent);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
extent_rtree_release(tsdn_t *tsdn, rtree_elm_t *elm_a, rtree_elm_t *elm_b) {
|
extent_rtree_release(tsdn_t *tsdn, rtree_leaf_elm_t *elm_a,
|
||||||
rtree_elm_release(tsdn, &extents_rtree, elm_a);
|
rtree_leaf_elm_t *elm_b) {
|
||||||
|
rtree_leaf_elm_release(tsdn, &extents_rtree, elm_a);
|
||||||
if (elm_b != NULL) {
|
if (elm_b != NULL) {
|
||||||
rtree_elm_release(tsdn, &extents_rtree, elm_b);
|
rtree_leaf_elm_release(tsdn, &extents_rtree, elm_b);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -543,7 +545,7 @@ static bool
|
|||||||
extent_register_impl(tsdn_t *tsdn, const extent_t *extent, bool gdump_add) {
|
extent_register_impl(tsdn_t *tsdn, const extent_t *extent, bool gdump_add) {
|
||||||
rtree_ctx_t rtree_ctx_fallback;
|
rtree_ctx_t rtree_ctx_fallback;
|
||||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||||
rtree_elm_t *elm_a, *elm_b;
|
rtree_leaf_elm_t *elm_a, *elm_b;
|
||||||
|
|
||||||
if (extent_rtree_acquire(tsdn, rtree_ctx, extent, false, true, &elm_a,
|
if (extent_rtree_acquire(tsdn, rtree_ctx, extent, false, true, &elm_a,
|
||||||
&elm_b)) {
|
&elm_b)) {
|
||||||
@ -596,7 +598,7 @@ static void
|
|||||||
extent_deregister(tsdn_t *tsdn, extent_t *extent) {
|
extent_deregister(tsdn_t *tsdn, extent_t *extent) {
|
||||||
rtree_ctx_t rtree_ctx_fallback;
|
rtree_ctx_t rtree_ctx_fallback;
|
||||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||||
rtree_elm_t *elm_a, *elm_b;
|
rtree_leaf_elm_t *elm_a, *elm_b;
|
||||||
|
|
||||||
extent_rtree_acquire(tsdn, rtree_ctx, extent, true, false, &elm_a,
|
extent_rtree_acquire(tsdn, rtree_ctx, extent, true, false, &elm_a,
|
||||||
&elm_b);
|
&elm_b);
|
||||||
@ -651,13 +653,13 @@ extent_recycle_extract(tsdn_t *tsdn, arena_t *arena,
|
|||||||
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
||||||
extent_t *extent;
|
extent_t *extent;
|
||||||
if (new_addr != NULL) {
|
if (new_addr != NULL) {
|
||||||
rtree_elm_t *elm;
|
rtree_leaf_elm_t *elm;
|
||||||
|
|
||||||
elm = rtree_elm_acquire(tsdn, &extents_rtree, rtree_ctx,
|
elm = rtree_leaf_elm_acquire(tsdn, &extents_rtree, rtree_ctx,
|
||||||
(uintptr_t)new_addr, false, false);
|
(uintptr_t)new_addr, false, false);
|
||||||
if (elm != NULL) {
|
if (elm != NULL) {
|
||||||
extent = rtree_elm_read_acquired(tsdn, &extents_rtree,
|
extent = rtree_leaf_elm_read_acquired(tsdn,
|
||||||
elm);
|
&extents_rtree, elm);
|
||||||
if (extent != NULL) {
|
if (extent != NULL) {
|
||||||
assert(extent_base_get(extent) == new_addr);
|
assert(extent_base_get(extent) == new_addr);
|
||||||
if (extent_arena_get(extent) != arena ||
|
if (extent_arena_get(extent) != arena ||
|
||||||
@ -667,7 +669,7 @@ extent_recycle_extract(tsdn_t *tsdn, arena_t *arena,
|
|||||||
extent = NULL;
|
extent = NULL;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
rtree_elm_release(tsdn, &extents_rtree, elm);
|
rtree_leaf_elm_release(tsdn, &extents_rtree, elm);
|
||||||
} else {
|
} else {
|
||||||
extent = NULL;
|
extent = NULL;
|
||||||
}
|
}
|
||||||
@ -1156,11 +1158,11 @@ extent_try_coalesce(tsdn_t *tsdn, arena_t *arena,
|
|||||||
again = false;
|
again = false;
|
||||||
|
|
||||||
/* Try to coalesce forward. */
|
/* Try to coalesce forward. */
|
||||||
rtree_elm_t *next_elm = rtree_elm_acquire(tsdn, &extents_rtree,
|
rtree_leaf_elm_t *next_elm = rtree_leaf_elm_acquire(tsdn,
|
||||||
rtree_ctx, (uintptr_t)extent_past_get(extent), false,
|
&extents_rtree, rtree_ctx,
|
||||||
false);
|
(uintptr_t)extent_past_get(extent), false, false);
|
||||||
if (next_elm != NULL) {
|
if (next_elm != NULL) {
|
||||||
extent_t *next = rtree_elm_read_acquired(tsdn,
|
extent_t *next = rtree_leaf_elm_read_acquired(tsdn,
|
||||||
&extents_rtree, next_elm);
|
&extents_rtree, next_elm);
|
||||||
/*
|
/*
|
||||||
* extents->mtx only protects against races for
|
* extents->mtx only protects against races for
|
||||||
@ -1169,7 +1171,7 @@ extent_try_coalesce(tsdn_t *tsdn, arena_t *arena,
|
|||||||
*/
|
*/
|
||||||
bool can_coalesce = (next != NULL &&
|
bool can_coalesce = (next != NULL &&
|
||||||
extent_can_coalesce(arena, extents, extent, next));
|
extent_can_coalesce(arena, extents, extent, next));
|
||||||
rtree_elm_release(tsdn, &extents_rtree, next_elm);
|
rtree_leaf_elm_release(tsdn, &extents_rtree, next_elm);
|
||||||
if (can_coalesce && !extent_coalesce(tsdn, arena,
|
if (can_coalesce && !extent_coalesce(tsdn, arena,
|
||||||
r_extent_hooks, extents, extent, next, true)) {
|
r_extent_hooks, extents, extent, next, true)) {
|
||||||
if (extents->delay_coalesce) {
|
if (extents->delay_coalesce) {
|
||||||
@ -1182,15 +1184,15 @@ extent_try_coalesce(tsdn_t *tsdn, arena_t *arena,
|
|||||||
}
|
}
|
||||||
|
|
||||||
/* Try to coalesce backward. */
|
/* Try to coalesce backward. */
|
||||||
rtree_elm_t *prev_elm = rtree_elm_acquire(tsdn, &extents_rtree,
|
rtree_leaf_elm_t *prev_elm = rtree_leaf_elm_acquire(tsdn,
|
||||||
rtree_ctx, (uintptr_t)extent_before_get(extent), false,
|
&extents_rtree, rtree_ctx,
|
||||||
false);
|
(uintptr_t)extent_before_get(extent), false, false);
|
||||||
if (prev_elm != NULL) {
|
if (prev_elm != NULL) {
|
||||||
extent_t *prev = rtree_elm_read_acquired(tsdn,
|
extent_t *prev = rtree_leaf_elm_read_acquired(tsdn,
|
||||||
&extents_rtree, prev_elm);
|
&extents_rtree, prev_elm);
|
||||||
bool can_coalesce = (prev != NULL &&
|
bool can_coalesce = (prev != NULL &&
|
||||||
extent_can_coalesce(arena, extents, extent, prev));
|
extent_can_coalesce(arena, extents, extent, prev));
|
||||||
rtree_elm_release(tsdn, &extents_rtree, prev_elm);
|
rtree_leaf_elm_release(tsdn, &extents_rtree, prev_elm);
|
||||||
if (can_coalesce && !extent_coalesce(tsdn, arena,
|
if (can_coalesce && !extent_coalesce(tsdn, arena,
|
||||||
r_extent_hooks, extents, extent, prev, false)) {
|
r_extent_hooks, extents, extent, prev, false)) {
|
||||||
extent = prev;
|
extent = prev;
|
||||||
@ -1472,7 +1474,7 @@ extent_split_wrapper(tsdn_t *tsdn, arena_t *arena,
|
|||||||
extent_t *trail;
|
extent_t *trail;
|
||||||
rtree_ctx_t rtree_ctx_fallback;
|
rtree_ctx_t rtree_ctx_fallback;
|
||||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||||
rtree_elm_t *lead_elm_a, *lead_elm_b, *trail_elm_a, *trail_elm_b;
|
rtree_leaf_elm_t *lead_elm_a, *lead_elm_b, *trail_elm_a, *trail_elm_b;
|
||||||
|
|
||||||
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
||||||
|
|
||||||
@ -1590,19 +1592,21 @@ extent_merge_wrapper(tsdn_t *tsdn, arena_t *arena,
|
|||||||
*/
|
*/
|
||||||
rtree_ctx_t rtree_ctx_fallback;
|
rtree_ctx_t rtree_ctx_fallback;
|
||||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||||
rtree_elm_t *a_elm_a, *a_elm_b, *b_elm_a, *b_elm_b;
|
rtree_leaf_elm_t *a_elm_a, *a_elm_b, *b_elm_a, *b_elm_b;
|
||||||
extent_rtree_acquire(tsdn, rtree_ctx, a, true, false, &a_elm_a,
|
extent_rtree_acquire(tsdn, rtree_ctx, a, true, false, &a_elm_a,
|
||||||
&a_elm_b);
|
&a_elm_b);
|
||||||
extent_rtree_acquire(tsdn, rtree_ctx, b, true, false, &b_elm_a,
|
extent_rtree_acquire(tsdn, rtree_ctx, b, true, false, &b_elm_a,
|
||||||
&b_elm_b);
|
&b_elm_b);
|
||||||
|
|
||||||
if (a_elm_b != NULL) {
|
if (a_elm_b != NULL) {
|
||||||
rtree_elm_write_acquired(tsdn, &extents_rtree, a_elm_b, NULL);
|
rtree_leaf_elm_write_acquired(tsdn, &extents_rtree, a_elm_b,
|
||||||
rtree_elm_release(tsdn, &extents_rtree, a_elm_b);
|
NULL);
|
||||||
|
rtree_leaf_elm_release(tsdn, &extents_rtree, a_elm_b);
|
||||||
}
|
}
|
||||||
if (b_elm_b != NULL) {
|
if (b_elm_b != NULL) {
|
||||||
rtree_elm_write_acquired(tsdn, &extents_rtree, b_elm_a, NULL);
|
rtree_leaf_elm_write_acquired(tsdn, &extents_rtree, b_elm_a,
|
||||||
rtree_elm_release(tsdn, &extents_rtree, b_elm_a);
|
NULL);
|
||||||
|
rtree_leaf_elm_release(tsdn, &extents_rtree, b_elm_a);
|
||||||
} else {
|
} else {
|
||||||
b_elm_b = b_elm_a;
|
b_elm_b = b_elm_a;
|
||||||
}
|
}
|
||||||
|
351
src/rtree.c
351
src/rtree.c
@ -19,10 +19,10 @@ rtree_new(rtree_t *rtree) {
|
|||||||
#undef rtree_node_alloc
|
#undef rtree_node_alloc
|
||||||
#define rtree_node_alloc JEMALLOC_N(rtree_node_alloc_impl)
|
#define rtree_node_alloc JEMALLOC_N(rtree_node_alloc_impl)
|
||||||
#endif
|
#endif
|
||||||
static rtree_elm_t *
|
static rtree_node_elm_t *
|
||||||
rtree_node_alloc(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) {
|
rtree_node_alloc(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) {
|
||||||
return (rtree_elm_t *)base_alloc(tsdn, b0get(), nelms *
|
return (rtree_node_elm_t *)base_alloc(tsdn, b0get(), nelms *
|
||||||
sizeof(rtree_elm_t), CACHELINE);
|
sizeof(rtree_node_elm_t), CACHELINE);
|
||||||
}
|
}
|
||||||
#ifdef JEMALLOC_JET
|
#ifdef JEMALLOC_JET
|
||||||
#undef rtree_node_alloc
|
#undef rtree_node_alloc
|
||||||
@ -35,7 +35,7 @@ rtree_node_alloc_t *rtree_node_alloc = JEMALLOC_N(rtree_node_alloc_impl);
|
|||||||
#define rtree_node_dalloc JEMALLOC_N(rtree_node_dalloc_impl)
|
#define rtree_node_dalloc JEMALLOC_N(rtree_node_dalloc_impl)
|
||||||
#endif
|
#endif
|
||||||
UNUSED static void
|
UNUSED static void
|
||||||
rtree_node_dalloc(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node) {
|
rtree_node_dalloc(tsdn_t *tsdn, rtree_t *rtree, rtree_node_elm_t *node) {
|
||||||
/* Nodes are never deleted during normal operation. */
|
/* Nodes are never deleted during normal operation. */
|
||||||
not_reached();
|
not_reached();
|
||||||
}
|
}
|
||||||
@ -46,46 +46,92 @@ rtree_node_dalloc_t *rtree_node_dalloc = JEMALLOC_N(rtree_node_dalloc_impl);
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef JEMALLOC_JET
|
#ifdef JEMALLOC_JET
|
||||||
static void
|
#undef rtree_leaf_alloc
|
||||||
rtree_delete_subtree(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node,
|
#define rtree_leaf_alloc JEMALLOC_N(rtree_leaf_alloc_impl)
|
||||||
unsigned level) {
|
#endif
|
||||||
if (level + 1 < RTREE_HEIGHT) {
|
static rtree_leaf_elm_t *
|
||||||
size_t nchildren, i;
|
rtree_leaf_alloc(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) {
|
||||||
|
return (rtree_leaf_elm_t *)base_alloc(tsdn, b0get(), nelms *
|
||||||
|
sizeof(rtree_leaf_elm_t), CACHELINE);
|
||||||
|
}
|
||||||
|
#ifdef JEMALLOC_JET
|
||||||
|
#undef rtree_leaf_alloc
|
||||||
|
#define rtree_leaf_alloc JEMALLOC_N(rtree_leaf_alloc)
|
||||||
|
rtree_leaf_alloc_t *rtree_leaf_alloc = JEMALLOC_N(rtree_leaf_alloc_impl);
|
||||||
|
#endif
|
||||||
|
|
||||||
nchildren = ZU(1) << rtree_levels[level].bits;
|
#ifdef JEMALLOC_JET
|
||||||
for (i = 0; i < nchildren; i++) {
|
#undef rtree_leaf_dalloc
|
||||||
rtree_elm_t *child = (rtree_elm_t *)atomic_load_p(
|
#define rtree_leaf_dalloc JEMALLOC_N(rtree_leaf_dalloc_impl)
|
||||||
&node[i].child_or_extent, ATOMIC_RELAXED);
|
#endif
|
||||||
if (child != NULL) {
|
UNUSED static void
|
||||||
rtree_delete_subtree(tsdn, rtree, child, level +
|
rtree_leaf_dalloc(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *leaf) {
|
||||||
|
/* Leaves are never deleted during normal operation. */
|
||||||
|
not_reached();
|
||||||
|
}
|
||||||
|
#ifdef JEMALLOC_JET
|
||||||
|
#undef rtree_leaf_dalloc
|
||||||
|
#define rtree_leaf_dalloc JEMALLOC_N(rtree_leaf_dalloc)
|
||||||
|
rtree_leaf_dalloc_t *rtree_leaf_dalloc = JEMALLOC_N(rtree_leaf_dalloc_impl);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef JEMALLOC_JET
|
||||||
|
static void
|
||||||
|
rtree_delete_subtree(tsdn_t *tsdn, rtree_t *rtree, rtree_node_elm_t *subtree,
|
||||||
|
unsigned level) {
|
||||||
|
size_t nchildren = ZU(1) << rtree_levels[level].bits;
|
||||||
|
if (level + 2 < RTREE_HEIGHT) {
|
||||||
|
for (size_t i = 0; i < nchildren; i++) {
|
||||||
|
rtree_node_elm_t *node =
|
||||||
|
(rtree_node_elm_t *)atomic_load_p(&subtree[i].child,
|
||||||
|
ATOMIC_RELAXED);
|
||||||
|
if (node != NULL) {
|
||||||
|
rtree_delete_subtree(tsdn, rtree, node, level +
|
||||||
1);
|
1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
for (size_t i = 0; i < nchildren; i++) {
|
||||||
|
rtree_leaf_elm_t *leaf =
|
||||||
|
(rtree_leaf_elm_t *)atomic_load_p(&subtree[i].child,
|
||||||
|
ATOMIC_RELAXED);
|
||||||
|
if (leaf != NULL) {
|
||||||
|
rtree_leaf_dalloc(tsdn, rtree, leaf);
|
||||||
}
|
}
|
||||||
rtree_node_dalloc(tsdn, rtree, node);
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rtree_node_dalloc(tsdn, rtree, subtree);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
rtree_delete(tsdn_t *tsdn, rtree_t *rtree) {
|
rtree_delete(tsdn_t *tsdn, rtree_t *rtree) {
|
||||||
rtree_elm_t *rtree_root = (rtree_elm_t *)atomic_load_p(&rtree->root,
|
if (RTREE_HEIGHT > 1) {
|
||||||
|
rtree_node_elm_t *node = (rtree_node_elm_t *)atomic_load_p(
|
||||||
|
&rtree->root, ATOMIC_RELAXED);
|
||||||
|
if (node != NULL) {
|
||||||
|
rtree_delete_subtree(tsdn, rtree, node, 0);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
rtree_leaf_elm_t *leaf =
|
||||||
|
(rtree_leaf_elm_t *)atomic_load_p(&rtree->root,
|
||||||
ATOMIC_RELAXED);
|
ATOMIC_RELAXED);
|
||||||
if (rtree_root != NULL) {
|
if (leaf != NULL) {
|
||||||
rtree_delete_subtree(tsdn, rtree, rtree_root, 0);
|
rtree_leaf_dalloc(tsdn, rtree, leaf);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
static rtree_elm_t *
|
static rtree_node_elm_t *
|
||||||
rtree_node_init(tsdn_t *tsdn, rtree_t *rtree, unsigned level,
|
rtree_node_init(tsdn_t *tsdn, rtree_t *rtree, unsigned level,
|
||||||
atomic_p_t *elmp) {
|
atomic_p_t *elmp) {
|
||||||
rtree_elm_t *node;
|
|
||||||
|
|
||||||
malloc_mutex_lock(tsdn, &rtree->init_lock);
|
malloc_mutex_lock(tsdn, &rtree->init_lock);
|
||||||
/*
|
/*
|
||||||
* If *elmp is non-null, then it was initialized with the init lock
|
* If *elmp is non-null, then it was initialized with the init lock
|
||||||
* held, so we can get by with 'relaxed' here.
|
* held, so we can get by with 'relaxed' here.
|
||||||
*/
|
*/
|
||||||
node = atomic_load_p(elmp, ATOMIC_RELAXED);
|
rtree_node_elm_t *node = atomic_load_p(elmp, ATOMIC_RELAXED);
|
||||||
if (node == NULL) {
|
if (node == NULL) {
|
||||||
node = rtree_node_alloc(tsdn, rtree, ZU(1) <<
|
node = rtree_node_alloc(tsdn, rtree, ZU(1) <<
|
||||||
rtree_levels[level].bits);
|
rtree_levels[level].bits);
|
||||||
@ -104,90 +150,186 @@ rtree_node_init(tsdn_t *tsdn, rtree_t *rtree, unsigned level,
|
|||||||
return node;
|
return node;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static rtree_leaf_elm_t *
|
||||||
|
rtree_leaf_init(tsdn_t *tsdn, rtree_t *rtree, atomic_p_t *elmp) {
|
||||||
|
malloc_mutex_lock(tsdn, &rtree->init_lock);
|
||||||
|
/*
|
||||||
|
* If *elmp is non-null, then it was initialized with the init lock
|
||||||
|
* held, so we can get by with 'relaxed' here.
|
||||||
|
*/
|
||||||
|
rtree_leaf_elm_t *leaf = atomic_load_p(elmp, ATOMIC_RELAXED);
|
||||||
|
if (leaf == NULL) {
|
||||||
|
leaf = rtree_leaf_alloc(tsdn, rtree, ZU(1) <<
|
||||||
|
rtree_levels[RTREE_HEIGHT-1].bits);
|
||||||
|
if (leaf == NULL) {
|
||||||
|
malloc_mutex_unlock(tsdn, &rtree->init_lock);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
/*
|
||||||
|
* Even though we hold the lock, a later reader might not; we
|
||||||
|
* need release semantics.
|
||||||
|
*/
|
||||||
|
atomic_store_p(elmp, leaf, ATOMIC_RELEASE);
|
||||||
|
}
|
||||||
|
malloc_mutex_unlock(tsdn, &rtree->init_lock);
|
||||||
|
|
||||||
|
return leaf;
|
||||||
|
}
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
rtree_node_valid(rtree_elm_t *node) {
|
rtree_node_valid(rtree_node_elm_t *node) {
|
||||||
return ((uintptr_t)node != (uintptr_t)0);
|
return ((uintptr_t)node != (uintptr_t)0);
|
||||||
}
|
}
|
||||||
|
|
||||||
static rtree_elm_t *
|
static bool
|
||||||
rtree_child_tryread(rtree_elm_t *elm, bool dependent) {
|
rtree_leaf_valid(rtree_leaf_elm_t *leaf) {
|
||||||
rtree_elm_t *child;
|
return ((uintptr_t)leaf != (uintptr_t)0);
|
||||||
|
}
|
||||||
|
|
||||||
|
static rtree_node_elm_t *
|
||||||
|
rtree_child_node_tryread(rtree_node_elm_t *elm, bool dependent) {
|
||||||
|
rtree_node_elm_t *node;
|
||||||
|
|
||||||
if (dependent) {
|
if (dependent) {
|
||||||
child = (rtree_elm_t *)atomic_load_p(&elm->child_or_extent,
|
node = (rtree_node_elm_t *)atomic_load_p(&elm->child,
|
||||||
ATOMIC_RELAXED);
|
ATOMIC_RELAXED);
|
||||||
} else {
|
} else {
|
||||||
child = (rtree_elm_t *)atomic_load_p(&elm->child_or_extent,
|
node = (rtree_node_elm_t *)atomic_load_p(&elm->child,
|
||||||
ATOMIC_ACQUIRE);
|
ATOMIC_ACQUIRE);
|
||||||
}
|
}
|
||||||
|
|
||||||
assert(!dependent || child != NULL);
|
assert(!dependent || node != NULL);
|
||||||
return child;
|
return node;
|
||||||
}
|
}
|
||||||
|
|
||||||
static rtree_elm_t *
|
static rtree_node_elm_t *
|
||||||
rtree_child_read(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *elm, unsigned level,
|
rtree_child_node_read(tsdn_t *tsdn, rtree_t *rtree, rtree_node_elm_t *elm,
|
||||||
bool dependent) {
|
unsigned level, bool dependent) {
|
||||||
rtree_elm_t *child;
|
rtree_node_elm_t *node;
|
||||||
|
|
||||||
child = rtree_child_tryread(elm, dependent);
|
node = rtree_child_node_tryread(elm, dependent);
|
||||||
if (!dependent && unlikely(!rtree_node_valid(child))) {
|
if (!dependent && unlikely(!rtree_node_valid(node))) {
|
||||||
child = rtree_node_init(tsdn, rtree, level + 1,
|
node = rtree_node_init(tsdn, rtree, level + 1, &elm->child);
|
||||||
&elm->child_or_extent);
|
|
||||||
}
|
}
|
||||||
assert(!dependent || child != NULL);
|
assert(!dependent || node != NULL);
|
||||||
return child;
|
return node;
|
||||||
}
|
}
|
||||||
|
|
||||||
static rtree_elm_t *
|
static rtree_leaf_elm_t *
|
||||||
rtree_subtree_tryread(rtree_t *rtree, bool dependent) {
|
rtree_child_leaf_tryread(rtree_node_elm_t *elm, bool dependent) {
|
||||||
rtree_elm_t *subtree;
|
rtree_leaf_elm_t *leaf;
|
||||||
|
|
||||||
if (dependent) {
|
if (dependent) {
|
||||||
subtree = (rtree_elm_t *)atomic_load_p(&rtree->root,
|
leaf = (rtree_leaf_elm_t *)atomic_load_p(&elm->child,
|
||||||
ATOMIC_RELAXED);
|
ATOMIC_RELAXED);
|
||||||
} else {
|
} else {
|
||||||
subtree = (rtree_elm_t *)atomic_load_p(&rtree->root,
|
leaf = (rtree_leaf_elm_t *)atomic_load_p(&elm->child,
|
||||||
ATOMIC_ACQUIRE);
|
ATOMIC_ACQUIRE);
|
||||||
}
|
}
|
||||||
assert(!dependent || subtree != NULL);
|
|
||||||
return subtree;
|
assert(!dependent || leaf != NULL);
|
||||||
|
return leaf;
|
||||||
}
|
}
|
||||||
|
|
||||||
static rtree_elm_t *
|
static rtree_leaf_elm_t *
|
||||||
rtree_subtree_read(tsdn_t *tsdn, rtree_t *rtree, bool dependent) {
|
rtree_child_leaf_read(tsdn_t *tsdn, rtree_t *rtree, rtree_node_elm_t *elm,
|
||||||
rtree_elm_t *subtree = rtree_subtree_tryread(rtree, dependent);
|
unsigned level, bool dependent) {
|
||||||
if (!dependent && unlikely(!rtree_node_valid(subtree))) {
|
rtree_leaf_elm_t *leaf;
|
||||||
subtree = rtree_node_init(tsdn, rtree, 0, &rtree->root);
|
|
||||||
|
leaf = rtree_child_leaf_tryread(elm, dependent);
|
||||||
|
if (!dependent && unlikely(!rtree_leaf_valid(leaf))) {
|
||||||
|
leaf = rtree_leaf_init(tsdn, rtree, &elm->child);
|
||||||
}
|
}
|
||||||
assert(!dependent || subtree != NULL);
|
assert(!dependent || leaf != NULL);
|
||||||
return subtree;
|
return leaf;
|
||||||
}
|
}
|
||||||
|
|
||||||
rtree_elm_t *
|
UNUSED static rtree_node_elm_t *
|
||||||
rtree_elm_lookup_hard(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
rtree_root_node_tryread(rtree_t *rtree, bool dependent) {
|
||||||
|
rtree_node_elm_t *node;
|
||||||
|
if (dependent) {
|
||||||
|
node = (rtree_node_elm_t *)atomic_load_p(&rtree->root,
|
||||||
|
ATOMIC_RELAXED);
|
||||||
|
} else {
|
||||||
|
node = (rtree_node_elm_t *)atomic_load_p(&rtree->root,
|
||||||
|
ATOMIC_ACQUIRE);
|
||||||
|
}
|
||||||
|
assert(!dependent || node != NULL);
|
||||||
|
return node;
|
||||||
|
}
|
||||||
|
|
||||||
|
UNUSED static rtree_node_elm_t *
|
||||||
|
rtree_root_node_read(tsdn_t *tsdn, rtree_t *rtree, bool dependent) {
|
||||||
|
rtree_node_elm_t *node = rtree_root_node_tryread(rtree, dependent);
|
||||||
|
if (!dependent && unlikely(!rtree_node_valid(node))) {
|
||||||
|
node = rtree_node_init(tsdn, rtree, 0, &rtree->root);
|
||||||
|
}
|
||||||
|
assert(!dependent || node != NULL);
|
||||||
|
return node;
|
||||||
|
}
|
||||||
|
|
||||||
|
UNUSED static rtree_leaf_elm_t *
|
||||||
|
rtree_root_leaf_tryread(rtree_t *rtree, bool dependent) {
|
||||||
|
rtree_leaf_elm_t *leaf;
|
||||||
|
if (dependent) {
|
||||||
|
leaf = (rtree_leaf_elm_t *)atomic_load_p(&rtree->root,
|
||||||
|
ATOMIC_RELAXED);
|
||||||
|
} else {
|
||||||
|
leaf = (rtree_leaf_elm_t *)atomic_load_p(&rtree->root,
|
||||||
|
ATOMIC_ACQUIRE);
|
||||||
|
}
|
||||||
|
assert(!dependent || leaf != NULL);
|
||||||
|
return leaf;
|
||||||
|
}
|
||||||
|
|
||||||
|
UNUSED static rtree_leaf_elm_t *
|
||||||
|
rtree_root_leaf_read(tsdn_t *tsdn, rtree_t *rtree, bool dependent) {
|
||||||
|
rtree_leaf_elm_t *leaf = rtree_root_leaf_tryread(rtree, dependent);
|
||||||
|
if (!dependent && unlikely(!rtree_leaf_valid(leaf))) {
|
||||||
|
leaf = rtree_leaf_init(tsdn, rtree, &rtree->root);
|
||||||
|
}
|
||||||
|
assert(!dependent || leaf != NULL);
|
||||||
|
return leaf;
|
||||||
|
}
|
||||||
|
|
||||||
|
rtree_leaf_elm_t *
|
||||||
|
rtree_leaf_elm_lookup_hard(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
||||||
uintptr_t key, bool dependent, bool init_missing) {
|
uintptr_t key, bool dependent, bool init_missing) {
|
||||||
rtree_elm_t *node = init_missing ? rtree_subtree_read(tsdn, rtree,
|
rtree_node_elm_t *node;
|
||||||
dependent) : rtree_subtree_tryread(rtree, dependent);
|
rtree_leaf_elm_t *leaf;
|
||||||
|
#if RTREE_HEIGHT > 1
|
||||||
|
node = init_missing ? rtree_root_node_read(tsdn, rtree, dependent) :
|
||||||
|
rtree_root_node_tryread(rtree, dependent);
|
||||||
|
#else
|
||||||
|
leaf = init_missing ? rtree_root_leaf_read(tsdn, rtree, dependent) :
|
||||||
|
rtree_root_leaf_tryread(rtree, dependent);
|
||||||
|
#endif
|
||||||
|
|
||||||
#define RTREE_GET_SUBTREE(level) { \
|
#define RTREE_GET_CHILD(level) { \
|
||||||
assert(level < RTREE_HEIGHT-1); \
|
assert(level < RTREE_HEIGHT-1); \
|
||||||
if (!dependent && unlikely(!rtree_node_valid(node))) { \
|
if (!dependent && unlikely(!rtree_node_valid(node))) { \
|
||||||
return NULL; \
|
return NULL; \
|
||||||
} \
|
} \
|
||||||
uintptr_t subkey = rtree_subkey(key, level); \
|
uintptr_t subkey = rtree_subkey(key, level); \
|
||||||
node = init_missing ? rtree_child_read(tsdn, rtree, \
|
if (level + 2 < RTREE_HEIGHT) { \
|
||||||
|
node = init_missing ? \
|
||||||
|
rtree_child_node_read(tsdn, rtree, \
|
||||||
&node[subkey], level, dependent) : \
|
&node[subkey], level, dependent) : \
|
||||||
rtree_child_tryread(&node[subkey], dependent); \
|
rtree_child_node_tryread(&node[subkey], \
|
||||||
|
dependent); \
|
||||||
|
} else { \
|
||||||
|
leaf = init_missing ? \
|
||||||
|
rtree_child_leaf_read(tsdn, rtree, \
|
||||||
|
&node[subkey], level, dependent) : \
|
||||||
|
rtree_child_leaf_tryread(&node[subkey], \
|
||||||
|
dependent); \
|
||||||
|
} \
|
||||||
}
|
}
|
||||||
#define RTREE_GET_LEAF(level) { \
|
#define RTREE_GET_LEAF(level) { \
|
||||||
assert(level == RTREE_HEIGHT-1); \
|
assert(level == RTREE_HEIGHT-1); \
|
||||||
if (!dependent && unlikely(!rtree_node_valid(node))) { \
|
if (!dependent && unlikely(!rtree_leaf_valid(leaf))) { \
|
||||||
return NULL; \
|
return NULL; \
|
||||||
} \
|
} \
|
||||||
/* \
|
|
||||||
* node is a leaf, so it contains values rather than \
|
|
||||||
* child pointers. \
|
|
||||||
*/ \
|
|
||||||
if (RTREE_CTX_NCACHE > 1) { \
|
if (RTREE_CTX_NCACHE > 1) { \
|
||||||
memmove(&rtree_ctx->cache[1], \
|
memmove(&rtree_ctx->cache[1], \
|
||||||
&rtree_ctx->cache[0], \
|
&rtree_ctx->cache[0], \
|
||||||
@ -196,29 +338,29 @@ rtree_elm_lookup_hard(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
|||||||
} \
|
} \
|
||||||
uintptr_t leafkey = rtree_leafkey(key); \
|
uintptr_t leafkey = rtree_leafkey(key); \
|
||||||
rtree_ctx->cache[0].leafkey = leafkey; \
|
rtree_ctx->cache[0].leafkey = leafkey; \
|
||||||
rtree_ctx->cache[0].leaf = node; \
|
rtree_ctx->cache[0].leaf = leaf; \
|
||||||
uintptr_t subkey = rtree_subkey(key, level); \
|
uintptr_t subkey = rtree_subkey(key, level); \
|
||||||
return &node[subkey]; \
|
return &leaf[subkey]; \
|
||||||
}
|
}
|
||||||
if (RTREE_HEIGHT > 1) {
|
if (RTREE_HEIGHT > 1) {
|
||||||
RTREE_GET_SUBTREE(0)
|
RTREE_GET_CHILD(0)
|
||||||
}
|
}
|
||||||
if (RTREE_HEIGHT > 2) {
|
if (RTREE_HEIGHT > 2) {
|
||||||
RTREE_GET_SUBTREE(1)
|
RTREE_GET_CHILD(1)
|
||||||
}
|
}
|
||||||
if (RTREE_HEIGHT > 3) {
|
if (RTREE_HEIGHT > 3) {
|
||||||
for (unsigned i = 2; i < RTREE_HEIGHT-1; i++) {
|
for (unsigned i = 2; i < RTREE_HEIGHT-1; i++) {
|
||||||
RTREE_GET_SUBTREE(i)
|
RTREE_GET_CHILD(i)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
RTREE_GET_LEAF(RTREE_HEIGHT-1)
|
RTREE_GET_LEAF(RTREE_HEIGHT-1)
|
||||||
#undef RTREE_GET_SUBTREE
|
#undef RTREE_GET_CHILD
|
||||||
#undef RTREE_GET_LEAF
|
#undef RTREE_GET_LEAF
|
||||||
not_reached();
|
not_reached();
|
||||||
}
|
}
|
||||||
|
|
||||||
static int
|
static int
|
||||||
rtree_elm_witness_comp(const witness_t *a, void *oa, const witness_t *b,
|
rtree_leaf_elm_witness_comp(const witness_t *a, void *oa, const witness_t *b,
|
||||||
void *ob) {
|
void *ob) {
|
||||||
uintptr_t ka = (uintptr_t)oa;
|
uintptr_t ka = (uintptr_t)oa;
|
||||||
uintptr_t kb = (uintptr_t)ob;
|
uintptr_t kb = (uintptr_t)ob;
|
||||||
@ -230,23 +372,24 @@ rtree_elm_witness_comp(const witness_t *a, void *oa, const witness_t *b,
|
|||||||
}
|
}
|
||||||
|
|
||||||
static witness_t *
|
static witness_t *
|
||||||
rtree_elm_witness_alloc(tsd_t *tsd, uintptr_t key, const rtree_elm_t *elm) {
|
rtree_leaf_elm_witness_alloc(tsd_t *tsd, uintptr_t key,
|
||||||
|
const rtree_leaf_elm_t *elm) {
|
||||||
witness_t *witness;
|
witness_t *witness;
|
||||||
size_t i;
|
size_t i;
|
||||||
rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd);
|
rtree_leaf_elm_witness_tsd_t *witnesses =
|
||||||
|
tsd_rtree_leaf_elm_witnessesp_get(tsd);
|
||||||
|
|
||||||
/* Iterate over entire array to detect double allocation attempts. */
|
/* Iterate over entire array to detect double allocation attempts. */
|
||||||
witness = NULL;
|
witness = NULL;
|
||||||
for (i = 0; i < sizeof(rtree_elm_witness_tsd_t) / sizeof(witness_t);
|
for (i = 0; i < RTREE_ELM_ACQUIRE_MAX; i++) {
|
||||||
i++) {
|
rtree_leaf_elm_witness_t *rew = &witnesses->witnesses[i];
|
||||||
rtree_elm_witness_t *rew = &witnesses->witnesses[i];
|
|
||||||
|
|
||||||
assert(rew->elm != elm);
|
assert(rew->elm != elm);
|
||||||
if (rew->elm == NULL && witness == NULL) {
|
if (rew->elm == NULL && witness == NULL) {
|
||||||
rew->elm = elm;
|
rew->elm = elm;
|
||||||
witness = &rew->witness;
|
witness = &rew->witness;
|
||||||
witness_init(witness, "rtree_elm",
|
witness_init(witness, "rtree_leaf_elm",
|
||||||
WITNESS_RANK_RTREE_ELM, rtree_elm_witness_comp,
|
WITNESS_RANK_RTREE_ELM, rtree_leaf_elm_witness_comp,
|
||||||
(void *)key);
|
(void *)key);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -255,13 +398,13 @@ rtree_elm_witness_alloc(tsd_t *tsd, uintptr_t key, const rtree_elm_t *elm) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
static witness_t *
|
static witness_t *
|
||||||
rtree_elm_witness_find(tsd_t *tsd, const rtree_elm_t *elm) {
|
rtree_leaf_elm_witness_find(tsd_t *tsd, const rtree_leaf_elm_t *elm) {
|
||||||
size_t i;
|
size_t i;
|
||||||
rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd);
|
rtree_leaf_elm_witness_tsd_t *witnesses =
|
||||||
|
tsd_rtree_leaf_elm_witnessesp_get(tsd);
|
||||||
|
|
||||||
for (i = 0; i < sizeof(rtree_elm_witness_tsd_t) / sizeof(witness_t);
|
for (i = 0; i < RTREE_ELM_ACQUIRE_MAX; i++) {
|
||||||
i++) {
|
rtree_leaf_elm_witness_t *rew = &witnesses->witnesses[i];
|
||||||
rtree_elm_witness_t *rew = &witnesses->witnesses[i];
|
|
||||||
|
|
||||||
if (rew->elm == elm) {
|
if (rew->elm == elm) {
|
||||||
return &rew->witness;
|
return &rew->witness;
|
||||||
@ -271,19 +414,19 @@ rtree_elm_witness_find(tsd_t *tsd, const rtree_elm_t *elm) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
rtree_elm_witness_dalloc(tsd_t *tsd, witness_t *witness,
|
rtree_leaf_elm_witness_dalloc(tsd_t *tsd, witness_t *witness,
|
||||||
const rtree_elm_t *elm) {
|
const rtree_leaf_elm_t *elm) {
|
||||||
size_t i;
|
size_t i;
|
||||||
rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd);
|
rtree_leaf_elm_witness_tsd_t *witnesses =
|
||||||
|
tsd_rtree_leaf_elm_witnessesp_get(tsd);
|
||||||
|
|
||||||
for (i = 0; i < sizeof(rtree_elm_witness_tsd_t) / sizeof(witness_t);
|
for (i = 0; i < RTREE_ELM_ACQUIRE_MAX; i++) {
|
||||||
i++) {
|
rtree_leaf_elm_witness_t *rew = &witnesses->witnesses[i];
|
||||||
rtree_elm_witness_t *rew = &witnesses->witnesses[i];
|
|
||||||
|
|
||||||
if (rew->elm == elm) {
|
if (rew->elm == elm) {
|
||||||
rew->elm = NULL;
|
rew->elm = NULL;
|
||||||
witness_init(&rew->witness, "rtree_elm",
|
witness_init(&rew->witness, "rtree_leaf_elm",
|
||||||
WITNESS_RANK_RTREE_ELM, rtree_elm_witness_comp,
|
WITNESS_RANK_RTREE_ELM, rtree_leaf_elm_witness_comp,
|
||||||
NULL);
|
NULL);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -292,41 +435,41 @@ rtree_elm_witness_dalloc(tsd_t *tsd, witness_t *witness,
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
rtree_elm_witness_acquire(tsdn_t *tsdn, const rtree_t *rtree, uintptr_t key,
|
rtree_leaf_elm_witness_acquire(tsdn_t *tsdn, const rtree_t *rtree,
|
||||||
const rtree_elm_t *elm) {
|
uintptr_t key, const rtree_leaf_elm_t *elm) {
|
||||||
witness_t *witness;
|
witness_t *witness;
|
||||||
|
|
||||||
if (tsdn_null(tsdn)) {
|
if (tsdn_null(tsdn)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
witness = rtree_elm_witness_alloc(tsdn_tsd(tsdn), key, elm);
|
witness = rtree_leaf_elm_witness_alloc(tsdn_tsd(tsdn), key, elm);
|
||||||
witness_lock(tsdn, witness);
|
witness_lock(tsdn, witness);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
rtree_elm_witness_access(tsdn_t *tsdn, const rtree_t *rtree,
|
rtree_leaf_elm_witness_access(tsdn_t *tsdn, const rtree_t *rtree,
|
||||||
const rtree_elm_t *elm) {
|
const rtree_leaf_elm_t *elm) {
|
||||||
witness_t *witness;
|
witness_t *witness;
|
||||||
|
|
||||||
if (tsdn_null(tsdn)) {
|
if (tsdn_null(tsdn)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
witness = rtree_elm_witness_find(tsdn_tsd(tsdn), elm);
|
witness = rtree_leaf_elm_witness_find(tsdn_tsd(tsdn), elm);
|
||||||
witness_assert_owner(tsdn, witness);
|
witness_assert_owner(tsdn, witness);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
rtree_elm_witness_release(tsdn_t *tsdn, const rtree_t *rtree,
|
rtree_leaf_elm_witness_release(tsdn_t *tsdn, const rtree_t *rtree,
|
||||||
const rtree_elm_t *elm) {
|
const rtree_leaf_elm_t *elm) {
|
||||||
witness_t *witness;
|
witness_t *witness;
|
||||||
|
|
||||||
if (tsdn_null(tsdn)) {
|
if (tsdn_null(tsdn)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
witness = rtree_elm_witness_find(tsdn_tsd(tsdn), elm);
|
witness = rtree_leaf_elm_witness_find(tsdn_tsd(tsdn), elm);
|
||||||
witness_unlock(tsdn, witness);
|
witness_unlock(tsdn, witness);
|
||||||
rtree_elm_witness_dalloc(tsdn_tsd(tsdn), witness, elm);
|
rtree_leaf_elm_witness_dalloc(tsdn_tsd(tsdn), witness, elm);
|
||||||
}
|
}
|
||||||
|
@ -2,19 +2,21 @@
|
|||||||
|
|
||||||
rtree_node_alloc_t *rtree_node_alloc_orig;
|
rtree_node_alloc_t *rtree_node_alloc_orig;
|
||||||
rtree_node_dalloc_t *rtree_node_dalloc_orig;
|
rtree_node_dalloc_t *rtree_node_dalloc_orig;
|
||||||
|
rtree_leaf_alloc_t *rtree_leaf_alloc_orig;
|
||||||
|
rtree_leaf_dalloc_t *rtree_leaf_dalloc_orig;
|
||||||
|
|
||||||
rtree_t *test_rtree;
|
rtree_t *test_rtree;
|
||||||
|
|
||||||
static rtree_elm_t *
|
static rtree_node_elm_t *
|
||||||
rtree_node_alloc_intercept(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) {
|
rtree_node_alloc_intercept(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) {
|
||||||
rtree_elm_t *node;
|
rtree_node_elm_t *node;
|
||||||
|
|
||||||
if (rtree != test_rtree) {
|
if (rtree != test_rtree) {
|
||||||
return rtree_node_alloc_orig(tsdn, rtree, nelms);
|
return rtree_node_alloc_orig(tsdn, rtree, nelms);
|
||||||
}
|
}
|
||||||
|
|
||||||
malloc_mutex_unlock(tsdn, &rtree->init_lock);
|
malloc_mutex_unlock(tsdn, &rtree->init_lock);
|
||||||
node = (rtree_elm_t *)calloc(nelms, sizeof(rtree_elm_t));
|
node = (rtree_node_elm_t *)calloc(nelms, sizeof(rtree_node_elm_t));
|
||||||
assert_ptr_not_null(node, "Unexpected calloc() failure");
|
assert_ptr_not_null(node, "Unexpected calloc() failure");
|
||||||
malloc_mutex_lock(tsdn, &rtree->init_lock);
|
malloc_mutex_lock(tsdn, &rtree->init_lock);
|
||||||
|
|
||||||
@ -22,7 +24,8 @@ rtree_node_alloc_intercept(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
rtree_node_dalloc_intercept(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node) {
|
rtree_node_dalloc_intercept(tsdn_t *tsdn, rtree_t *rtree,
|
||||||
|
rtree_node_elm_t *node) {
|
||||||
if (rtree != test_rtree) {
|
if (rtree != test_rtree) {
|
||||||
rtree_node_dalloc_orig(tsdn, rtree, node);
|
rtree_node_dalloc_orig(tsdn, rtree, node);
|
||||||
return;
|
return;
|
||||||
@ -31,6 +34,33 @@ rtree_node_dalloc_intercept(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node) {
|
|||||||
free(node);
|
free(node);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static rtree_leaf_elm_t *
|
||||||
|
rtree_leaf_alloc_intercept(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) {
|
||||||
|
rtree_leaf_elm_t *leaf;
|
||||||
|
|
||||||
|
if (rtree != test_rtree) {
|
||||||
|
return rtree_leaf_alloc_orig(tsdn, rtree, nelms);
|
||||||
|
}
|
||||||
|
|
||||||
|
malloc_mutex_unlock(tsdn, &rtree->init_lock);
|
||||||
|
leaf = (rtree_leaf_elm_t *)calloc(nelms, sizeof(rtree_leaf_elm_t));
|
||||||
|
assert_ptr_not_null(leaf, "Unexpected calloc() failure");
|
||||||
|
malloc_mutex_lock(tsdn, &rtree->init_lock);
|
||||||
|
|
||||||
|
return leaf;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void
|
||||||
|
rtree_leaf_dalloc_intercept(tsdn_t *tsdn, rtree_t *rtree,
|
||||||
|
rtree_leaf_elm_t *leaf) {
|
||||||
|
if (rtree != test_rtree) {
|
||||||
|
rtree_leaf_dalloc_orig(tsdn, rtree, leaf);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
free(leaf);
|
||||||
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_rtree_read_empty) {
|
TEST_BEGIN(test_rtree_read_empty) {
|
||||||
tsdn_t *tsdn;
|
tsdn_t *tsdn;
|
||||||
|
|
||||||
@ -75,22 +105,20 @@ thd_start(void *varg) {
|
|||||||
uintptr_t key = (uintptr_t)(gen_rand64(sfmt) & ((ZU(1) <<
|
uintptr_t key = (uintptr_t)(gen_rand64(sfmt) & ((ZU(1) <<
|
||||||
MAX_NBITS) - ZU(1)));
|
MAX_NBITS) - ZU(1)));
|
||||||
if (i % 2 == 0) {
|
if (i % 2 == 0) {
|
||||||
rtree_elm_t *elm;
|
rtree_leaf_elm_t *elm = rtree_leaf_elm_acquire(tsdn,
|
||||||
|
&arg->rtree, &rtree_ctx, key, false, true);
|
||||||
elm = rtree_elm_acquire(tsdn, &arg->rtree, &rtree_ctx,
|
|
||||||
key, false, true);
|
|
||||||
assert_ptr_not_null(elm,
|
assert_ptr_not_null(elm,
|
||||||
"Unexpected rtree_elm_acquire() failure");
|
"Unexpected rtree_leaf_elm_acquire() failure");
|
||||||
rtree_elm_write_acquired(tsdn, &arg->rtree, elm,
|
rtree_leaf_elm_write_acquired(tsdn, &arg->rtree, elm,
|
||||||
extent);
|
extent);
|
||||||
rtree_elm_release(tsdn, &arg->rtree, elm);
|
rtree_leaf_elm_release(tsdn, &arg->rtree, elm);
|
||||||
|
|
||||||
elm = rtree_elm_acquire(tsdn, &arg->rtree, &rtree_ctx,
|
elm = rtree_leaf_elm_acquire(tsdn, &arg->rtree,
|
||||||
key, true, false);
|
&rtree_ctx, key, true, false);
|
||||||
assert_ptr_not_null(elm,
|
assert_ptr_not_null(elm,
|
||||||
"Unexpected rtree_elm_acquire() failure");
|
"Unexpected rtree_leaf_elm_acquire() failure");
|
||||||
rtree_elm_read_acquired(tsdn, &arg->rtree, elm);
|
rtree_leaf_elm_read_acquired(tsdn, &arg->rtree, elm);
|
||||||
rtree_elm_release(tsdn, &arg->rtree, elm);
|
rtree_leaf_elm_release(tsdn, &arg->rtree, elm);
|
||||||
} else {
|
} else {
|
||||||
rtree_read(tsdn, &arg->rtree, &rtree_ctx, key, false);
|
rtree_read(tsdn, &arg->rtree, &rtree_ctx, key, false);
|
||||||
}
|
}
|
||||||
@ -201,19 +229,18 @@ TEST_BEGIN(test_rtree_random) {
|
|||||||
extent_t extent;
|
extent_t extent;
|
||||||
rtree_t rtree;
|
rtree_t rtree;
|
||||||
rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER;
|
rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER;
|
||||||
rtree_elm_t *elm;
|
|
||||||
|
|
||||||
test_rtree = &rtree;
|
test_rtree = &rtree;
|
||||||
assert_false(rtree_new(&rtree), "Unexpected rtree_new() failure");
|
assert_false(rtree_new(&rtree), "Unexpected rtree_new() failure");
|
||||||
|
|
||||||
for (unsigned i = 0; i < NSET; i++) {
|
for (unsigned i = 0; i < NSET; i++) {
|
||||||
keys[i] = (uintptr_t)gen_rand64(sfmt);
|
keys[i] = (uintptr_t)gen_rand64(sfmt);
|
||||||
elm = rtree_elm_acquire(tsdn, &rtree, &rtree_ctx, keys[i],
|
rtree_leaf_elm_t *elm = rtree_leaf_elm_acquire(tsdn, &rtree,
|
||||||
false, true);
|
&rtree_ctx, keys[i], false, true);
|
||||||
assert_ptr_not_null(elm,
|
assert_ptr_not_null(elm,
|
||||||
"Unexpected rtree_elm_acquire() failure");
|
"Unexpected rtree_leaf_elm_acquire() failure");
|
||||||
rtree_elm_write_acquired(tsdn, &rtree, elm, &extent);
|
rtree_leaf_elm_write_acquired(tsdn, &rtree, elm, &extent);
|
||||||
rtree_elm_release(tsdn, &rtree, elm);
|
rtree_leaf_elm_release(tsdn, &rtree, elm);
|
||||||
assert_ptr_eq(rtree_read(tsdn, &rtree, &rtree_ctx, keys[i],
|
assert_ptr_eq(rtree_read(tsdn, &rtree, &rtree_ctx, keys[i],
|
||||||
true), &extent,
|
true), &extent,
|
||||||
"rtree_read() should return previously set value");
|
"rtree_read() should return previously set value");
|
||||||
@ -248,6 +275,10 @@ main(void) {
|
|||||||
rtree_node_alloc = rtree_node_alloc_intercept;
|
rtree_node_alloc = rtree_node_alloc_intercept;
|
||||||
rtree_node_dalloc_orig = rtree_node_dalloc;
|
rtree_node_dalloc_orig = rtree_node_dalloc;
|
||||||
rtree_node_dalloc = rtree_node_dalloc_intercept;
|
rtree_node_dalloc = rtree_node_dalloc_intercept;
|
||||||
|
rtree_leaf_alloc_orig = rtree_leaf_alloc;
|
||||||
|
rtree_leaf_alloc = rtree_leaf_alloc_intercept;
|
||||||
|
rtree_leaf_dalloc_orig = rtree_leaf_dalloc;
|
||||||
|
rtree_leaf_dalloc = rtree_leaf_dalloc_intercept;
|
||||||
test_rtree = NULL;
|
test_rtree = NULL;
|
||||||
|
|
||||||
return test(
|
return test(
|
||||||
|
Loading…
Reference in New Issue
Block a user