2014-01-08 08:47:56 +08:00
|
|
|
#include "test/jemalloc_test.h"
|
|
|
|
|
|
|
|
#ifdef JEMALLOC_FILL
|
2014-12-09 05:12:41 +08:00
|
|
|
# ifndef JEMALLOC_TEST_JUNK_OPT
|
|
|
|
# define JEMALLOC_TEST_JUNK_OPT "junk:true"
|
|
|
|
# endif
|
2014-01-08 08:47:56 +08:00
|
|
|
const char *malloc_conf =
|
2016-04-06 09:18:15 +08:00
|
|
|
"abort:false,zero:false," JEMALLOC_TEST_JUNK_OPT;
|
2014-01-08 08:47:56 +08:00
|
|
|
#endif
|
|
|
|
|
|
|
|
static arena_dalloc_junk_small_t *arena_dalloc_junk_small_orig;
|
2016-06-01 05:50:21 +08:00
|
|
|
static large_dalloc_junk_t *large_dalloc_junk_orig;
|
2016-06-04 11:04:30 +08:00
|
|
|
static large_dalloc_maybe_junk_t *large_dalloc_maybe_junk_orig;
|
2014-10-06 15:42:10 +08:00
|
|
|
static void *watch_for_junking;
|
|
|
|
static bool saw_junking;
|
|
|
|
|
|
|
|
static void
|
|
|
|
watch_junking(void *p)
|
|
|
|
{
|
|
|
|
watch_for_junking = p;
|
|
|
|
saw_junking = false;
|
|
|
|
}
|
2014-01-08 08:47:56 +08:00
|
|
|
|
|
|
|
static void
|
2016-04-07 20:04:12 +08:00
|
|
|
arena_dalloc_junk_small_intercept(void *ptr, const arena_bin_info_t *bin_info)
|
2014-01-08 08:47:56 +08:00
|
|
|
{
|
|
|
|
size_t i;
|
|
|
|
|
|
|
|
arena_dalloc_junk_small_orig(ptr, bin_info);
|
|
|
|
for (i = 0; i < bin_info->reg_size; i++) {
|
2016-04-05 07:55:19 +08:00
|
|
|
assert_u_eq(((uint8_t *)ptr)[i], JEMALLOC_FREE_JUNK,
|
2015-07-24 04:56:25 +08:00
|
|
|
"Missing junk fill for byte %zu/%zu of deallocated region",
|
|
|
|
i, bin_info->reg_size);
|
2014-01-08 08:47:56 +08:00
|
|
|
}
|
2014-10-06 15:42:10 +08:00
|
|
|
if (ptr == watch_for_junking)
|
|
|
|
saw_junking = true;
|
2014-01-08 08:47:56 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
2016-06-01 05:50:21 +08:00
|
|
|
large_dalloc_junk_intercept(void *ptr, size_t usize)
|
2014-01-08 08:47:56 +08:00
|
|
|
{
|
2016-06-04 11:04:30 +08:00
|
|
|
size_t i;
|
2014-01-08 08:47:56 +08:00
|
|
|
|
2016-06-01 05:50:21 +08:00
|
|
|
large_dalloc_junk_orig(ptr, usize);
|
2016-06-04 11:04:30 +08:00
|
|
|
for (i = 0; i < usize; i++) {
|
|
|
|
assert_u_eq(((uint8_t *)ptr)[i], JEMALLOC_FREE_JUNK,
|
|
|
|
"Missing junk fill for byte %zu/%zu of deallocated region",
|
|
|
|
i, usize);
|
|
|
|
}
|
|
|
|
if (ptr == watch_for_junking)
|
|
|
|
saw_junking = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
2016-10-14 03:18:38 +08:00
|
|
|
large_dalloc_maybe_junk_intercept(void *ptr, size_t usize)
|
2016-06-04 11:04:30 +08:00
|
|
|
{
|
2016-10-14 03:18:38 +08:00
|
|
|
large_dalloc_maybe_junk_orig(ptr, usize);
|
2014-10-06 15:42:10 +08:00
|
|
|
if (ptr == watch_for_junking)
|
|
|
|
saw_junking = true;
|
2014-01-08 08:47:56 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
test_junk(size_t sz_min, size_t sz_max)
|
|
|
|
{
|
2016-04-05 07:55:19 +08:00
|
|
|
uint8_t *s;
|
2014-01-08 08:47:56 +08:00
|
|
|
size_t sz_prev, sz, i;
|
|
|
|
|
2014-12-09 05:12:41 +08:00
|
|
|
if (opt_junk_free) {
|
|
|
|
arena_dalloc_junk_small_orig = arena_dalloc_junk_small;
|
|
|
|
arena_dalloc_junk_small = arena_dalloc_junk_small_intercept;
|
2016-06-01 05:50:21 +08:00
|
|
|
large_dalloc_junk_orig = large_dalloc_junk;
|
|
|
|
large_dalloc_junk = large_dalloc_junk_intercept;
|
2016-06-04 11:04:30 +08:00
|
|
|
large_dalloc_maybe_junk_orig = large_dalloc_maybe_junk;
|
|
|
|
large_dalloc_maybe_junk = large_dalloc_maybe_junk_intercept;
|
2014-12-09 05:12:41 +08:00
|
|
|
}
|
2014-01-08 08:47:56 +08:00
|
|
|
|
|
|
|
sz_prev = 0;
|
2016-04-05 07:55:19 +08:00
|
|
|
s = (uint8_t *)mallocx(sz_min, 0);
|
2014-01-08 08:47:56 +08:00
|
|
|
assert_ptr_not_null((void *)s, "Unexpected mallocx() failure");
|
|
|
|
|
|
|
|
for (sz = sallocx(s, 0); sz <= sz_max;
|
|
|
|
sz_prev = sz, sz = sallocx(s, 0)) {
|
|
|
|
if (sz_prev > 0) {
|
2016-04-05 07:55:19 +08:00
|
|
|
assert_u_eq(s[0], 'a',
|
2015-07-24 04:56:25 +08:00
|
|
|
"Previously allocated byte %zu/%zu is corrupted",
|
|
|
|
ZU(0), sz_prev);
|
2016-04-05 07:55:19 +08:00
|
|
|
assert_u_eq(s[sz_prev-1], 'a',
|
2015-07-24 04:56:25 +08:00
|
|
|
"Previously allocated byte %zu/%zu is corrupted",
|
|
|
|
sz_prev-1, sz_prev);
|
2014-01-08 08:47:56 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
for (i = sz_prev; i < sz; i++) {
|
2014-12-09 05:12:41 +08:00
|
|
|
if (opt_junk_alloc) {
|
2016-04-05 07:55:19 +08:00
|
|
|
assert_u_eq(s[i], JEMALLOC_ALLOC_JUNK,
|
2015-07-24 04:56:25 +08:00
|
|
|
"Newly allocated byte %zu/%zu isn't "
|
|
|
|
"junk-filled", i, sz);
|
2014-12-09 05:12:41 +08:00
|
|
|
}
|
2014-01-08 08:47:56 +08:00
|
|
|
s[i] = 'a';
|
|
|
|
}
|
|
|
|
|
|
|
|
if (xallocx(s, sz+1, 0, 0) == sz) {
|
2016-05-28 15:17:28 +08:00
|
|
|
uint8_t *t;
|
2014-10-06 15:42:10 +08:00
|
|
|
watch_junking(s);
|
2016-05-28 15:17:28 +08:00
|
|
|
t = (uint8_t *)rallocx(s, sz+1, 0);
|
|
|
|
assert_ptr_not_null((void *)t,
|
2014-01-08 08:47:56 +08:00
|
|
|
"Unexpected rallocx() failure");
|
2016-05-28 15:17:28 +08:00
|
|
|
assert_ptr_ne(s, t, "Unexpected in-place rallocx()");
|
|
|
|
assert_zu_ge(sallocx(t, 0), sz+1,
|
|
|
|
"Unexpectedly small rallocx() result");
|
2014-12-09 05:12:41 +08:00
|
|
|
assert_true(!opt_junk_free || saw_junking,
|
2015-07-24 04:56:25 +08:00
|
|
|
"Expected region of size %zu to be junk-filled",
|
|
|
|
sz);
|
2016-05-28 15:17:28 +08:00
|
|
|
s = t;
|
2014-01-08 08:47:56 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-10-06 15:42:10 +08:00
|
|
|
watch_junking(s);
|
2014-01-08 08:47:56 +08:00
|
|
|
dallocx(s, 0);
|
2014-12-09 05:12:41 +08:00
|
|
|
assert_true(!opt_junk_free || saw_junking,
|
2015-07-24 04:56:25 +08:00
|
|
|
"Expected region of size %zu to be junk-filled", sz);
|
2014-01-08 08:47:56 +08:00
|
|
|
|
2014-12-09 05:12:41 +08:00
|
|
|
if (opt_junk_free) {
|
|
|
|
arena_dalloc_junk_small = arena_dalloc_junk_small_orig;
|
2016-06-01 05:50:21 +08:00
|
|
|
large_dalloc_junk = large_dalloc_junk_orig;
|
2016-06-04 11:04:30 +08:00
|
|
|
large_dalloc_maybe_junk = large_dalloc_maybe_junk_orig;
|
2014-12-09 05:12:41 +08:00
|
|
|
}
|
2014-01-08 08:47:56 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
TEST_BEGIN(test_junk_small)
|
|
|
|
{
|
|
|
|
test_skip_if(!config_fill);
|
|
|
|
test_junk(1, SMALL_MAXCLASS-1);
|
|
|
|
}
|
|
|
|
TEST_END
|
|
|
|
|
2016-06-01 05:50:21 +08:00
|
|
|
TEST_BEGIN(test_junk_large)
|
2014-01-08 08:47:56 +08:00
|
|
|
{
|
|
|
|
test_skip_if(!config_fill);
|
2016-10-13 02:49:19 +08:00
|
|
|
test_junk(SMALL_MAXCLASS+1, (1U << (LG_LARGE_MINCLASS+1)));
|
2014-01-08 08:47:56 +08:00
|
|
|
}
|
|
|
|
TEST_END
|
|
|
|
|
|
|
|
int
|
|
|
|
main(void)
|
|
|
|
{
|
|
|
|
return (test(
|
|
|
|
test_junk_small,
|
2016-06-01 05:50:21 +08:00
|
|
|
test_junk_large));
|
2014-01-08 08:47:56 +08:00
|
|
|
}
|