Add rtree unit tests.
This commit is contained in:
parent
5aeeda6f92
commit
b980cc774a
@ -111,8 +111,8 @@ TESTS_UNIT := $(srcroot)test/unit/bitmap.c $(srcroot)test/unit/ckh.c \
|
||||
$(srcroot)test/unit/hash.c $(srcroot)test/unit/mallctl.c \
|
||||
$(srcroot)test/unit/math.c $(srcroot)test/unit/mq.c \
|
||||
$(srcroot)test/unit/mtx.c $(srcroot)test/unit/quarantine.c \
|
||||
$(srcroot)test/unit/SFMT.c $(srcroot)test/unit/stats.c \
|
||||
$(srcroot)test/unit/tsd.c
|
||||
$(srcroot)test/unit/rtree.c $(srcroot)test/unit/SFMT.c \
|
||||
$(srcroot)test/unit/stats.c $(srcroot)test/unit/tsd.c
|
||||
TESTS_INTEGRATION := $(srcroot)test/integration/aligned_alloc.c \
|
||||
$(srcroot)test/integration/allocated.c \
|
||||
$(srcroot)test/integration/mallocx.c \
|
||||
|
@ -327,6 +327,7 @@ quarantine_tsd_get_wrapper
|
||||
quarantine_tsd_init_head
|
||||
quarantine_tsd_set
|
||||
register_zone
|
||||
rtree_delete
|
||||
rtree_get
|
||||
rtree_get_locked
|
||||
rtree_new
|
||||
|
@ -20,11 +20,16 @@ typedef struct rtree_s rtree_t;
|
||||
# define RTREE_NODESIZE CACHELINE
|
||||
#endif
|
||||
|
||||
typedef void *(rtree_alloc_t)(size_t);
|
||||
typedef void (rtree_dalloc_t)(void *);
|
||||
|
||||
#endif /* JEMALLOC_H_TYPES */
|
||||
/******************************************************************************/
|
||||
#ifdef JEMALLOC_H_STRUCTS
|
||||
|
||||
struct rtree_s {
|
||||
rtree_alloc_t *alloc;
|
||||
rtree_dalloc_t *dalloc;
|
||||
malloc_mutex_t mutex;
|
||||
void **root;
|
||||
unsigned height;
|
||||
@ -35,7 +40,8 @@ struct rtree_s {
|
||||
/******************************************************************************/
|
||||
#ifdef JEMALLOC_H_EXTERNS
|
||||
|
||||
rtree_t *rtree_new(unsigned bits);
|
||||
rtree_t *rtree_new(unsigned bits, rtree_alloc_t *alloc, rtree_dalloc_t *dalloc);
|
||||
void rtree_delete(rtree_t *rtree);
|
||||
void rtree_prefork(rtree_t *rtree);
|
||||
void rtree_postfork_parent(rtree_t *rtree);
|
||||
void rtree_postfork_child(rtree_t *rtree);
|
||||
@ -45,7 +51,7 @@ void rtree_postfork_child(rtree_t *rtree);
|
||||
#ifdef JEMALLOC_H_INLINES
|
||||
|
||||
#ifndef JEMALLOC_ENABLE_INLINE
|
||||
#ifndef JEMALLOC_DEBUG
|
||||
#ifdef JEMALLOC_DEBUG
|
||||
void *rtree_get_locked(rtree_t *rtree, uintptr_t key);
|
||||
#endif
|
||||
void *rtree_get(rtree_t *rtree, uintptr_t key);
|
||||
@ -68,7 +74,7 @@ f(rtree_t *rtree, uintptr_t key) \
|
||||
i < height - 1; \
|
||||
i++, lshift += bits, node = child) { \
|
||||
bits = rtree->level2bits[i]; \
|
||||
subkey = (key << lshift) >> ((ZU(1) << (LG_SIZEOF_PTR + \
|
||||
subkey = (key << lshift) >> ((ZU(1) << (LG_SIZEOF_PTR + \
|
||||
3)) - bits); \
|
||||
child = (void**)node[subkey]; \
|
||||
if (child == NULL) { \
|
||||
@ -138,7 +144,7 @@ rtree_set(rtree_t *rtree, uintptr_t key, void *val)
|
||||
bits);
|
||||
child = (void**)node[subkey];
|
||||
if (child == NULL) {
|
||||
child = (void**)base_alloc(sizeof(void *) <<
|
||||
child = (void**)rtree->alloc(sizeof(void *) <<
|
||||
rtree->level2bits[i+1]);
|
||||
if (child == NULL) {
|
||||
malloc_mutex_unlock(&rtree->mutex);
|
||||
|
@ -356,7 +356,7 @@ chunk_boot(void)
|
||||
extent_tree_ad_new(&chunks_ad_dss);
|
||||
if (config_ivsalloc) {
|
||||
chunks_rtree = rtree_new((ZU(1) << (LG_SIZEOF_PTR+3)) -
|
||||
opt_lg_chunk);
|
||||
opt_lg_chunk, base_alloc, NULL);
|
||||
if (chunks_rtree == NULL)
|
||||
return (true);
|
||||
}
|
||||
|
35
src/rtree.c
35
src/rtree.c
@ -2,24 +2,28 @@
|
||||
#include "jemalloc/internal/jemalloc_internal.h"
|
||||
|
||||
rtree_t *
|
||||
rtree_new(unsigned bits)
|
||||
rtree_new(unsigned bits, rtree_alloc_t *alloc, rtree_dalloc_t *dalloc)
|
||||
{
|
||||
rtree_t *ret;
|
||||
unsigned bits_per_level, height, i;
|
||||
|
||||
assert(bits > 0 && bits <= (sizeof(uintptr_t) << 3));
|
||||
|
||||
bits_per_level = ffs(pow2_ceil((RTREE_NODESIZE / sizeof(void *)))) - 1;
|
||||
height = bits / bits_per_level;
|
||||
if (height * bits_per_level != bits)
|
||||
height++;
|
||||
assert(height * bits_per_level >= bits);
|
||||
|
||||
ret = (rtree_t*)base_alloc(offsetof(rtree_t, level2bits) +
|
||||
ret = (rtree_t*)alloc(offsetof(rtree_t, level2bits) +
|
||||
(sizeof(unsigned) * height));
|
||||
if (ret == NULL)
|
||||
return (NULL);
|
||||
memset(ret, 0, offsetof(rtree_t, level2bits) + (sizeof(unsigned) *
|
||||
height));
|
||||
|
||||
ret->alloc = alloc;
|
||||
ret->dalloc = dalloc;
|
||||
if (malloc_mutex_init(&ret->mutex)) {
|
||||
/* Leak the rtree. */
|
||||
return (NULL);
|
||||
@ -32,7 +36,7 @@ rtree_new(unsigned bits)
|
||||
for (i = 1; i < height; i++)
|
||||
ret->level2bits[i] = bits_per_level;
|
||||
|
||||
ret->root = (void**)base_alloc(sizeof(void *) << ret->level2bits[0]);
|
||||
ret->root = (void**)alloc(sizeof(void *) << ret->level2bits[0]);
|
||||
if (ret->root == NULL) {
|
||||
/*
|
||||
* We leak the rtree here, since there's no generic base
|
||||
@ -45,6 +49,31 @@ rtree_new(unsigned bits)
|
||||
return (ret);
|
||||
}
|
||||
|
||||
static void
|
||||
rtree_delete_subtree(rtree_t *rtree, void **node, unsigned level)
|
||||
{
|
||||
|
||||
if (level < rtree->height - 1) {
|
||||
size_t nchildren, i;
|
||||
|
||||
nchildren = ZU(1) << rtree->level2bits[level];
|
||||
for (i = 0; i < nchildren; i++) {
|
||||
void **child = (void **)node[i];
|
||||
if (child != NULL)
|
||||
rtree_delete_subtree(rtree, child, level + 1);
|
||||
}
|
||||
}
|
||||
rtree->dalloc(node);
|
||||
}
|
||||
|
||||
void
|
||||
rtree_delete(rtree_t *rtree)
|
||||
{
|
||||
|
||||
rtree_delete_subtree(rtree, rtree->root, 0);
|
||||
rtree->dalloc(rtree);
|
||||
}
|
||||
|
||||
void
|
||||
rtree_prefork(rtree_t *rtree)
|
||||
{
|
||||
|
119
test/unit/rtree.c
Normal file
119
test/unit/rtree.c
Normal file
@ -0,0 +1,119 @@
|
||||
#include "test/jemalloc_test.h"
|
||||
|
||||
TEST_BEGIN(test_rtree_get_empty)
|
||||
{
|
||||
unsigned i;
|
||||
|
||||
for (i = 1; i <= (sizeof(uintptr_t) << 3); i++) {
|
||||
rtree_t *rtree = rtree_new(i, imalloc, idalloc);
|
||||
assert_ptr_null(rtree_get(rtree, 0),
|
||||
"rtree_get() should return NULL for empty tree");
|
||||
rtree_delete(rtree);
|
||||
}
|
||||
}
|
||||
TEST_END
|
||||
|
||||
TEST_BEGIN(test_rtree_extrema)
|
||||
{
|
||||
unsigned i;
|
||||
|
||||
for (i = 1; i <= (sizeof(uintptr_t) << 3); i++) {
|
||||
rtree_t *rtree = rtree_new(i, imalloc, idalloc);
|
||||
|
||||
rtree_set(rtree, 0, (void *)1);
|
||||
assert_ptr_eq(rtree_get(rtree, 0), (void *)1,
|
||||
"rtree_get() should return previously set value");
|
||||
|
||||
rtree_set(rtree, ~((uintptr_t)0), (void *)1);
|
||||
assert_ptr_eq(rtree_get(rtree, ~((uintptr_t)0)), (void *)1,
|
||||
"rtree_get() should return previously set value");
|
||||
|
||||
rtree_delete(rtree);
|
||||
}
|
||||
}
|
||||
TEST_END
|
||||
|
||||
TEST_BEGIN(test_rtree_bits)
|
||||
{
|
||||
unsigned i, j, k;
|
||||
|
||||
for (i = 1; i < (sizeof(uintptr_t) << 3); i++) {
|
||||
uintptr_t keys[] = {0, 1,
|
||||
(((uintptr_t)1) << (sizeof(uintptr_t)*8-i)) - 1};
|
||||
rtree_t *rtree = rtree_new(i, imalloc, idalloc);
|
||||
|
||||
for (j = 0; j < sizeof(keys)/sizeof(uintptr_t); j++) {
|
||||
rtree_set(rtree, keys[j], (void *)1);
|
||||
for (k = 0; k < sizeof(keys)/sizeof(uintptr_t); k++) {
|
||||
assert_ptr_eq(rtree_get(rtree, keys[k]),
|
||||
(void *)1,
|
||||
"rtree_get() should return previously set "
|
||||
"value and ignore insignificant key bits; "
|
||||
"i=%u, j=%u, k=%u, set key=%#x, "
|
||||
"get key=%#x", i, j, k, keys[j], keys[k]);
|
||||
}
|
||||
assert_ptr_eq(rtree_get(rtree,
|
||||
(((uintptr_t)1) << (sizeof(uintptr_t)*8-i))),
|
||||
(void *)0,
|
||||
"Only leftmost rtree leaf should be set; "
|
||||
"i=%u, j=%u", i, j);
|
||||
rtree_set(rtree, keys[j], (void *)0);
|
||||
}
|
||||
|
||||
rtree_delete(rtree);
|
||||
}
|
||||
}
|
||||
TEST_END
|
||||
|
||||
TEST_BEGIN(test_rtree_random)
|
||||
{
|
||||
unsigned i;
|
||||
sfmt_t *sfmt;
|
||||
#define NSET 100
|
||||
#define SEED 42
|
||||
|
||||
sfmt = init_gen_rand(SEED);
|
||||
for (i = 1; i <= (sizeof(uintptr_t) << 3); i++) {
|
||||
rtree_t *rtree = rtree_new(i, imalloc, idalloc);
|
||||
uintptr_t keys[NSET];
|
||||
unsigned j;
|
||||
|
||||
for (j = 0; j < NSET; j++) {
|
||||
keys[j] = (uintptr_t)gen_rand64(sfmt);
|
||||
rtree_set(rtree, keys[j], (void *)1);
|
||||
assert_ptr_eq(rtree_get(rtree, keys[j]), (void *)1,
|
||||
"rtree_get() should return previously set value");
|
||||
}
|
||||
for (j = 0; j < NSET; j++) {
|
||||
assert_ptr_eq(rtree_get(rtree, keys[j]), (void *)1,
|
||||
"rtree_get() should return previously set value");
|
||||
}
|
||||
|
||||
for (j = 0; j < NSET; j++) {
|
||||
rtree_set(rtree, keys[j], (void *)0);
|
||||
assert_ptr_eq(rtree_get(rtree, keys[j]), (void *)0,
|
||||
"rtree_get() should return previously set value");
|
||||
}
|
||||
for (j = 0; j < NSET; j++) {
|
||||
assert_ptr_eq(rtree_get(rtree, keys[j]), (void *)0,
|
||||
"rtree_get() should return previously set value");
|
||||
}
|
||||
|
||||
rtree_delete(rtree);
|
||||
}
|
||||
fini_gen_rand(sfmt);
|
||||
#undef NSET
|
||||
#undef SEED
|
||||
}
|
||||
TEST_END
|
||||
|
||||
int
|
||||
main(void)
|
||||
{
|
||||
|
||||
return (test(
|
||||
test_rtree_get_empty,
|
||||
test_rtree_extrema,
|
||||
test_rtree_bits,
|
||||
test_rtree_random));
|
||||
}
|
Loading…
Reference in New Issue
Block a user