server-skynet-source-3rd-je.../test/unit/junk.c

141 lines
3.2 KiB
C
Raw Normal View History

#include "test/jemalloc_test.h"
#ifdef JEMALLOC_FILL
# ifndef JEMALLOC_TEST_JUNK_OPT
# define JEMALLOC_TEST_JUNK_OPT "junk:true"
# endif
const char *malloc_conf =
"abort:false,zero:false," JEMALLOC_TEST_JUNK_OPT;
#endif
static arena_dalloc_junk_small_t *arena_dalloc_junk_small_orig;
2016-06-01 05:50:21 +08:00
static large_dalloc_junk_t *large_dalloc_junk_orig;
static void *watch_for_junking;
static bool saw_junking;
static void
watch_junking(void *p)
{
watch_for_junking = p;
saw_junking = false;
}
static void
arena_dalloc_junk_small_intercept(void *ptr, const arena_bin_info_t *bin_info)
{
size_t i;
arena_dalloc_junk_small_orig(ptr, bin_info);
for (i = 0; i < bin_info->reg_size; i++) {
assert_u_eq(((uint8_t *)ptr)[i], JEMALLOC_FREE_JUNK,
"Missing junk fill for byte %zu/%zu of deallocated region",
i, bin_info->reg_size);
}
if (ptr == watch_for_junking)
saw_junking = true;
}
static void
2016-06-01 05:50:21 +08:00
large_dalloc_junk_intercept(void *ptr, size_t usize)
{
2016-06-01 05:50:21 +08:00
large_dalloc_junk_orig(ptr, usize);
/*
* The conditions under which junk filling actually occurs are nuanced
* enough that it doesn't make sense to duplicate the decision logic in
* test code, so don't actually check that the region is junk-filled.
*/
if (ptr == watch_for_junking)
saw_junking = true;
}
static void
test_junk(size_t sz_min, size_t sz_max)
{
uint8_t *s;
size_t sz_prev, sz, i;
if (opt_junk_free) {
arena_dalloc_junk_small_orig = arena_dalloc_junk_small;
arena_dalloc_junk_small = arena_dalloc_junk_small_intercept;
2016-06-01 05:50:21 +08:00
large_dalloc_junk_orig = large_dalloc_junk;
large_dalloc_junk = large_dalloc_junk_intercept;
}
sz_prev = 0;
s = (uint8_t *)mallocx(sz_min, 0);
assert_ptr_not_null((void *)s, "Unexpected mallocx() failure");
for (sz = sallocx(s, 0); sz <= sz_max;
sz_prev = sz, sz = sallocx(s, 0)) {
if (sz_prev > 0) {
assert_u_eq(s[0], 'a',
"Previously allocated byte %zu/%zu is corrupted",
ZU(0), sz_prev);
assert_u_eq(s[sz_prev-1], 'a',
"Previously allocated byte %zu/%zu is corrupted",
sz_prev-1, sz_prev);
}
for (i = sz_prev; i < sz; i++) {
if (opt_junk_alloc) {
assert_u_eq(s[i], JEMALLOC_ALLOC_JUNK,
"Newly allocated byte %zu/%zu isn't "
"junk-filled", i, sz);
}
s[i] = 'a';
}
if (xallocx(s, sz+1, 0, 0) == sz) {
uint8_t *t;
watch_junking(s);
t = (uint8_t *)rallocx(s, sz+1, 0);
assert_ptr_not_null((void *)t,
"Unexpected rallocx() failure");
assert_ptr_ne(s, t, "Unexpected in-place rallocx()");
assert_zu_ge(sallocx(t, 0), sz+1,
"Unexpectedly small rallocx() result");
assert_true(!opt_junk_free || saw_junking,
"Expected region of size %zu to be junk-filled",
sz);
s = t;
}
}
watch_junking(s);
dallocx(s, 0);
assert_true(!opt_junk_free || saw_junking,
"Expected region of size %zu to be junk-filled", sz);
if (opt_junk_free) {
arena_dalloc_junk_small = arena_dalloc_junk_small_orig;
2016-06-01 05:50:21 +08:00
large_dalloc_junk = large_dalloc_junk_orig;
}
}
TEST_BEGIN(test_junk_small)
{
test_skip_if(!config_fill);
test_junk(1, SMALL_MAXCLASS-1);
}
TEST_END
2016-06-01 05:50:21 +08:00
TEST_BEGIN(test_junk_large)
{
test_skip_if(!config_fill);
test_junk(SMALL_MAXCLASS+1, chunksize*2);
}
TEST_END
int
main(void)
{
return (test(
test_junk_small,
2016-06-01 05:50:21 +08:00
test_junk_large));
}