2014-01-03 08:08:28 +08:00
|
|
|
#include "test/jemalloc_test.h"
|
|
|
|
|
2016-04-16 15:36:11 +08:00
|
|
|
rtree_node_alloc_t *rtree_node_alloc_orig;
|
|
|
|
rtree_node_dalloc_t *rtree_node_dalloc_orig;
|
|
|
|
|
|
|
|
rtree_t *test_rtree;
|
|
|
|
|
2016-03-28 18:06:35 +08:00
|
|
|
static rtree_elm_t *
|
2017-01-16 08:56:30 +08:00
|
|
|
rtree_node_alloc_intercept(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) {
|
2016-03-28 18:06:35 +08:00
|
|
|
rtree_elm_t *node;
|
2015-01-31 14:54:08 +08:00
|
|
|
|
2017-01-16 08:56:30 +08:00
|
|
|
if (rtree != test_rtree) {
|
2016-04-16 15:36:11 +08:00
|
|
|
return rtree_node_alloc_orig(tsdn, rtree, nelms);
|
2017-01-16 08:56:30 +08:00
|
|
|
}
|
2016-04-16 15:36:11 +08:00
|
|
|
|
2016-11-01 07:23:33 +08:00
|
|
|
malloc_mutex_unlock(tsdn, &rtree->init_lock);
|
2016-03-28 18:06:35 +08:00
|
|
|
node = (rtree_elm_t *)calloc(nelms, sizeof(rtree_elm_t));
|
|
|
|
assert_ptr_not_null(node, "Unexpected calloc() failure");
|
2016-11-01 07:23:33 +08:00
|
|
|
malloc_mutex_lock(tsdn, &rtree->init_lock);
|
2016-03-28 18:06:35 +08:00
|
|
|
|
2017-01-20 10:15:45 +08:00
|
|
|
return node;
|
2015-01-31 14:54:08 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
2017-01-16 08:56:30 +08:00
|
|
|
rtree_node_dalloc_intercept(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node) {
|
2016-04-16 15:36:11 +08:00
|
|
|
if (rtree != test_rtree) {
|
|
|
|
rtree_node_dalloc_orig(tsdn, rtree, node);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2015-01-31 14:54:08 +08:00
|
|
|
free(node);
|
|
|
|
}
|
|
|
|
|
2017-01-16 08:56:30 +08:00
|
|
|
TEST_BEGIN(test_rtree_read_empty) {
|
2016-04-16 15:36:11 +08:00
|
|
|
tsdn_t *tsdn;
|
2014-01-03 08:08:28 +08:00
|
|
|
unsigned i;
|
|
|
|
|
2016-04-16 15:36:11 +08:00
|
|
|
tsdn = tsdn_fetch();
|
|
|
|
|
2014-01-03 08:08:28 +08:00
|
|
|
for (i = 1; i <= (sizeof(uintptr_t) << 3); i++) {
|
2015-01-31 14:54:08 +08:00
|
|
|
rtree_t rtree;
|
2016-06-03 09:43:10 +08:00
|
|
|
rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER;
|
2016-04-16 15:36:11 +08:00
|
|
|
test_rtree = &rtree;
|
|
|
|
assert_false(rtree_new(&rtree, i),
|
2015-01-31 14:54:08 +08:00
|
|
|
"Unexpected rtree_new() failure");
|
2016-06-03 09:43:10 +08:00
|
|
|
assert_ptr_null(rtree_read(tsdn, &rtree, &rtree_ctx, 0, false),
|
2016-03-28 18:06:35 +08:00
|
|
|
"rtree_read() should return NULL for empty tree");
|
2016-04-16 15:36:11 +08:00
|
|
|
rtree_delete(tsdn, &rtree);
|
|
|
|
test_rtree = NULL;
|
2014-01-03 08:08:28 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
TEST_END
|
|
|
|
|
2017-01-20 13:41:41 +08:00
|
|
|
#define NTHREADS 8
|
|
|
|
#define MAX_NBITS 18
|
|
|
|
#define NITERS 1000
|
|
|
|
#define SEED 42
|
2016-03-28 18:06:35 +08:00
|
|
|
|
|
|
|
typedef struct {
|
|
|
|
unsigned nbits;
|
|
|
|
rtree_t rtree;
|
|
|
|
uint32_t seed;
|
|
|
|
} thd_start_arg_t;
|
|
|
|
|
|
|
|
static void *
|
2017-01-16 08:56:30 +08:00
|
|
|
thd_start(void *varg) {
|
2016-03-28 18:06:35 +08:00
|
|
|
thd_start_arg_t *arg = (thd_start_arg_t *)varg;
|
2016-06-03 09:43:10 +08:00
|
|
|
rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER;
|
|
|
|
sfmt_t *sfmt;
|
2016-03-28 18:06:35 +08:00
|
|
|
extent_t *extent;
|
2016-04-16 15:36:11 +08:00
|
|
|
tsdn_t *tsdn;
|
2016-03-28 18:06:35 +08:00
|
|
|
unsigned i;
|
|
|
|
|
|
|
|
sfmt = init_gen_rand(arg->seed);
|
|
|
|
extent = (extent_t *)malloc(sizeof(extent));
|
|
|
|
assert_ptr_not_null(extent, "Unexpected malloc() failure");
|
2016-04-16 15:36:11 +08:00
|
|
|
tsdn = tsdn_fetch();
|
2016-03-28 18:06:35 +08:00
|
|
|
|
|
|
|
for (i = 0; i < NITERS; i++) {
|
|
|
|
uintptr_t key = (uintptr_t)gen_rand64(sfmt);
|
|
|
|
if (i % 2 == 0) {
|
|
|
|
rtree_elm_t *elm;
|
|
|
|
|
2016-06-03 09:43:10 +08:00
|
|
|
elm = rtree_elm_acquire(tsdn, &arg->rtree, &rtree_ctx,
|
|
|
|
key, false, true);
|
2016-03-28 18:06:35 +08:00
|
|
|
assert_ptr_not_null(elm,
|
|
|
|
"Unexpected rtree_elm_acquire() failure");
|
2016-06-03 09:43:10 +08:00
|
|
|
rtree_elm_write_acquired(tsdn, &arg->rtree, elm,
|
|
|
|
extent);
|
2016-04-18 03:55:10 +08:00
|
|
|
rtree_elm_release(tsdn, &arg->rtree, elm);
|
2016-03-28 18:06:35 +08:00
|
|
|
|
2016-06-03 09:43:10 +08:00
|
|
|
elm = rtree_elm_acquire(tsdn, &arg->rtree, &rtree_ctx,
|
|
|
|
key, true, false);
|
2016-03-28 18:06:35 +08:00
|
|
|
assert_ptr_not_null(elm,
|
|
|
|
"Unexpected rtree_elm_acquire() failure");
|
2016-04-18 03:55:10 +08:00
|
|
|
rtree_elm_read_acquired(tsdn, &arg->rtree, elm);
|
|
|
|
rtree_elm_release(tsdn, &arg->rtree, elm);
|
2017-01-16 08:56:30 +08:00
|
|
|
} else {
|
2016-06-03 09:43:10 +08:00
|
|
|
rtree_read(tsdn, &arg->rtree, &rtree_ctx, key, false);
|
2017-01-16 08:56:30 +08:00
|
|
|
}
|
2016-03-28 18:06:35 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
free(extent);
|
|
|
|
fini_gen_rand(sfmt);
|
2017-01-20 10:15:45 +08:00
|
|
|
return NULL;
|
2016-03-28 18:06:35 +08:00
|
|
|
}
|
|
|
|
|
2017-01-16 08:56:30 +08:00
|
|
|
TEST_BEGIN(test_rtree_concurrent) {
|
2016-03-28 18:06:35 +08:00
|
|
|
thd_start_arg_t arg;
|
|
|
|
thd_t thds[NTHREADS];
|
|
|
|
sfmt_t *sfmt;
|
2016-04-16 15:36:11 +08:00
|
|
|
tsdn_t *tsdn;
|
2016-03-28 18:06:35 +08:00
|
|
|
unsigned i, j;
|
|
|
|
|
|
|
|
sfmt = init_gen_rand(SEED);
|
2016-04-16 15:36:11 +08:00
|
|
|
tsdn = tsdn_fetch();
|
2016-03-28 18:06:35 +08:00
|
|
|
for (i = 1; i < MAX_NBITS; i++) {
|
|
|
|
arg.nbits = i;
|
2016-04-16 15:36:11 +08:00
|
|
|
test_rtree = &arg.rtree;
|
|
|
|
assert_false(rtree_new(&arg.rtree, arg.nbits),
|
|
|
|
"Unexpected rtree_new() failure");
|
2016-03-28 18:06:35 +08:00
|
|
|
arg.seed = gen_rand32(sfmt);
|
2017-01-16 08:56:30 +08:00
|
|
|
for (j = 0; j < NTHREADS; j++) {
|
2016-03-28 18:06:35 +08:00
|
|
|
thd_create(&thds[j], thd_start, (void *)&arg);
|
2017-01-16 08:56:30 +08:00
|
|
|
}
|
|
|
|
for (j = 0; j < NTHREADS; j++) {
|
2016-03-28 18:06:35 +08:00
|
|
|
thd_join(thds[j], NULL);
|
2017-01-16 08:56:30 +08:00
|
|
|
}
|
2016-04-16 15:36:11 +08:00
|
|
|
rtree_delete(tsdn, &arg.rtree);
|
|
|
|
test_rtree = NULL;
|
2016-03-28 18:06:35 +08:00
|
|
|
}
|
|
|
|
fini_gen_rand(sfmt);
|
|
|
|
}
|
|
|
|
TEST_END
|
|
|
|
|
|
|
|
#undef NTHREADS
|
|
|
|
#undef MAX_NBITS
|
|
|
|
#undef NITERS
|
|
|
|
#undef SEED
|
|
|
|
|
2017-01-16 08:56:30 +08:00
|
|
|
TEST_BEGIN(test_rtree_extrema) {
|
2014-01-03 08:08:28 +08:00
|
|
|
unsigned i;
|
2016-03-24 12:09:28 +08:00
|
|
|
extent_t extent_a, extent_b;
|
2016-04-16 15:36:11 +08:00
|
|
|
tsdn_t *tsdn;
|
|
|
|
|
|
|
|
tsdn = tsdn_fetch();
|
2014-01-03 08:08:28 +08:00
|
|
|
|
|
|
|
for (i = 1; i <= (sizeof(uintptr_t) << 3); i++) {
|
2015-01-31 14:54:08 +08:00
|
|
|
rtree_t rtree;
|
2016-06-03 09:43:10 +08:00
|
|
|
rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER;
|
2016-04-16 15:36:11 +08:00
|
|
|
test_rtree = &rtree;
|
|
|
|
assert_false(rtree_new(&rtree, i),
|
2015-01-31 14:54:08 +08:00
|
|
|
"Unexpected rtree_new() failure");
|
2014-01-03 08:08:28 +08:00
|
|
|
|
2016-06-03 09:43:10 +08:00
|
|
|
assert_false(rtree_write(tsdn, &rtree, &rtree_ctx, 0,
|
|
|
|
&extent_a), "Unexpected rtree_write() failure, i=%u", i);
|
|
|
|
assert_ptr_eq(rtree_read(tsdn, &rtree, &rtree_ctx, 0, true),
|
|
|
|
&extent_a,
|
2016-03-28 18:06:35 +08:00
|
|
|
"rtree_read() should return previously set value, i=%u", i);
|
2014-01-03 08:08:28 +08:00
|
|
|
|
2016-06-03 09:43:10 +08:00
|
|
|
assert_false(rtree_write(tsdn, &rtree, &rtree_ctx,
|
|
|
|
~((uintptr_t)0), &extent_b),
|
|
|
|
"Unexpected rtree_write() failure, i=%u", i);
|
|
|
|
assert_ptr_eq(rtree_read(tsdn, &rtree, &rtree_ctx,
|
|
|
|
~((uintptr_t)0), true), &extent_b,
|
2016-03-28 18:06:35 +08:00
|
|
|
"rtree_read() should return previously set value, i=%u", i);
|
2014-01-03 08:08:28 +08:00
|
|
|
|
2016-04-16 15:36:11 +08:00
|
|
|
rtree_delete(tsdn, &rtree);
|
|
|
|
test_rtree = NULL;
|
2014-01-03 08:08:28 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
TEST_END
|
|
|
|
|
2017-01-16 08:56:30 +08:00
|
|
|
TEST_BEGIN(test_rtree_bits) {
|
2016-04-16 15:36:11 +08:00
|
|
|
tsdn_t *tsdn;
|
2014-01-03 08:08:28 +08:00
|
|
|
unsigned i, j, k;
|
|
|
|
|
2016-04-16 15:36:11 +08:00
|
|
|
tsdn = tsdn_fetch();
|
|
|
|
|
2014-01-03 08:08:28 +08:00
|
|
|
for (i = 1; i < (sizeof(uintptr_t) << 3); i++) {
|
|
|
|
uintptr_t keys[] = {0, 1,
|
|
|
|
(((uintptr_t)1) << (sizeof(uintptr_t)*8-i)) - 1};
|
2016-03-24 12:09:28 +08:00
|
|
|
extent_t extent;
|
2015-01-31 14:54:08 +08:00
|
|
|
rtree_t rtree;
|
2016-06-03 09:43:10 +08:00
|
|
|
rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER;
|
2015-01-31 14:54:08 +08:00
|
|
|
|
2016-04-16 15:36:11 +08:00
|
|
|
test_rtree = &rtree;
|
|
|
|
assert_false(rtree_new(&rtree, i),
|
2015-01-31 14:54:08 +08:00
|
|
|
"Unexpected rtree_new() failure");
|
2014-01-03 08:08:28 +08:00
|
|
|
|
|
|
|
for (j = 0; j < sizeof(keys)/sizeof(uintptr_t); j++) {
|
2016-06-03 09:43:10 +08:00
|
|
|
assert_false(rtree_write(tsdn, &rtree, &rtree_ctx,
|
|
|
|
keys[j], &extent),
|
|
|
|
"Unexpected rtree_write() failure");
|
2014-01-03 08:08:28 +08:00
|
|
|
for (k = 0; k < sizeof(keys)/sizeof(uintptr_t); k++) {
|
2016-06-03 09:43:10 +08:00
|
|
|
assert_ptr_eq(rtree_read(tsdn, &rtree,
|
|
|
|
&rtree_ctx, keys[k], true), &extent,
|
|
|
|
"rtree_read() should return previously set "
|
|
|
|
"value and ignore insignificant key bits; "
|
|
|
|
"i=%u, j=%u, k=%u, set key=%#"FMTxPTR", "
|
|
|
|
"get key=%#"FMTxPTR, i, j, k, keys[j],
|
|
|
|
keys[k]);
|
2014-01-03 08:08:28 +08:00
|
|
|
}
|
2016-06-03 09:43:10 +08:00
|
|
|
assert_ptr_null(rtree_read(tsdn, &rtree, &rtree_ctx,
|
2015-05-16 08:02:30 +08:00
|
|
|
(((uintptr_t)1) << (sizeof(uintptr_t)*8-i)), false),
|
2014-01-03 08:08:28 +08:00
|
|
|
"Only leftmost rtree leaf should be set; "
|
|
|
|
"i=%u, j=%u", i, j);
|
2016-06-03 09:43:10 +08:00
|
|
|
rtree_clear(tsdn, &rtree, &rtree_ctx, keys[j]);
|
2014-01-03 08:08:28 +08:00
|
|
|
}
|
|
|
|
|
2016-04-16 15:36:11 +08:00
|
|
|
rtree_delete(tsdn, &rtree);
|
|
|
|
test_rtree = NULL;
|
2014-01-03 08:08:28 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
TEST_END
|
|
|
|
|
2017-01-16 08:56:30 +08:00
|
|
|
TEST_BEGIN(test_rtree_random) {
|
2014-01-03 08:08:28 +08:00
|
|
|
unsigned i;
|
|
|
|
sfmt_t *sfmt;
|
2016-04-16 15:36:11 +08:00
|
|
|
tsdn_t *tsdn;
|
2017-01-20 13:41:41 +08:00
|
|
|
#define NSET 16
|
|
|
|
#define SEED 42
|
2014-01-03 08:08:28 +08:00
|
|
|
|
|
|
|
sfmt = init_gen_rand(SEED);
|
2016-04-16 15:36:11 +08:00
|
|
|
tsdn = tsdn_fetch();
|
2014-01-03 08:08:28 +08:00
|
|
|
for (i = 1; i <= (sizeof(uintptr_t) << 3); i++) {
|
|
|
|
uintptr_t keys[NSET];
|
2016-03-24 12:09:28 +08:00
|
|
|
extent_t extent;
|
2014-01-03 08:08:28 +08:00
|
|
|
unsigned j;
|
2015-01-31 14:54:08 +08:00
|
|
|
rtree_t rtree;
|
2016-06-03 09:43:10 +08:00
|
|
|
rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER;
|
2016-03-28 18:06:35 +08:00
|
|
|
rtree_elm_t *elm;
|
2015-01-31 14:54:08 +08:00
|
|
|
|
2016-04-16 15:36:11 +08:00
|
|
|
test_rtree = &rtree;
|
|
|
|
assert_false(rtree_new(&rtree, i),
|
2015-01-31 14:54:08 +08:00
|
|
|
"Unexpected rtree_new() failure");
|
2014-01-03 08:08:28 +08:00
|
|
|
|
|
|
|
for (j = 0; j < NSET; j++) {
|
|
|
|
keys[j] = (uintptr_t)gen_rand64(sfmt);
|
2016-06-03 09:43:10 +08:00
|
|
|
elm = rtree_elm_acquire(tsdn, &rtree, &rtree_ctx,
|
|
|
|
keys[j], false, true);
|
2016-03-28 18:06:35 +08:00
|
|
|
assert_ptr_not_null(elm,
|
|
|
|
"Unexpected rtree_elm_acquire() failure");
|
2016-04-18 03:55:10 +08:00
|
|
|
rtree_elm_write_acquired(tsdn, &rtree, elm, &extent);
|
|
|
|
rtree_elm_release(tsdn, &rtree, elm);
|
2016-06-03 09:43:10 +08:00
|
|
|
assert_ptr_eq(rtree_read(tsdn, &rtree, &rtree_ctx,
|
|
|
|
keys[j], true), &extent,
|
2016-03-28 18:06:35 +08:00
|
|
|
"rtree_read() should return previously set value");
|
2014-01-03 08:08:28 +08:00
|
|
|
}
|
|
|
|
for (j = 0; j < NSET; j++) {
|
2016-06-03 09:43:10 +08:00
|
|
|
assert_ptr_eq(rtree_read(tsdn, &rtree, &rtree_ctx,
|
|
|
|
keys[j], true), &extent,
|
|
|
|
"rtree_read() should return previously set value, "
|
|
|
|
"j=%u", j);
|
2014-01-03 08:08:28 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
for (j = 0; j < NSET; j++) {
|
2016-06-03 09:43:10 +08:00
|
|
|
rtree_clear(tsdn, &rtree, &rtree_ctx, keys[j]);
|
|
|
|
assert_ptr_null(rtree_read(tsdn, &rtree, &rtree_ctx,
|
|
|
|
keys[j], true),
|
2016-03-28 18:06:35 +08:00
|
|
|
"rtree_read() should return previously set value");
|
2014-01-03 08:08:28 +08:00
|
|
|
}
|
|
|
|
for (j = 0; j < NSET; j++) {
|
2016-06-03 09:43:10 +08:00
|
|
|
assert_ptr_null(rtree_read(tsdn, &rtree, &rtree_ctx,
|
|
|
|
keys[j], true),
|
2016-03-28 18:06:35 +08:00
|
|
|
"rtree_read() should return previously set value");
|
2014-01-03 08:08:28 +08:00
|
|
|
}
|
|
|
|
|
2016-04-16 15:36:11 +08:00
|
|
|
rtree_delete(tsdn, &rtree);
|
|
|
|
test_rtree = NULL;
|
2014-01-03 08:08:28 +08:00
|
|
|
}
|
|
|
|
fini_gen_rand(sfmt);
|
|
|
|
#undef NSET
|
|
|
|
#undef SEED
|
|
|
|
}
|
|
|
|
TEST_END
|
|
|
|
|
|
|
|
int
|
2017-01-16 08:56:30 +08:00
|
|
|
main(void) {
|
2016-04-16 15:36:11 +08:00
|
|
|
rtree_node_alloc_orig = rtree_node_alloc;
|
|
|
|
rtree_node_alloc = rtree_node_alloc_intercept;
|
|
|
|
rtree_node_dalloc_orig = rtree_node_dalloc;
|
|
|
|
rtree_node_dalloc = rtree_node_dalloc_intercept;
|
|
|
|
test_rtree = NULL;
|
|
|
|
|
2017-01-20 10:15:45 +08:00
|
|
|
return test(
|
2016-03-28 18:06:35 +08:00
|
|
|
test_rtree_read_empty,
|
|
|
|
test_rtree_concurrent,
|
2014-01-03 08:08:28 +08:00
|
|
|
test_rtree_extrema,
|
|
|
|
test_rtree_bits,
|
2017-01-20 10:15:45 +08:00
|
|
|
test_rtree_random);
|
2014-01-03 08:08:28 +08:00
|
|
|
}
|