Introduce two new modes of junk filling: "alloc" and "free".

In addition to true/false, opt.junk can now be either "alloc" or "free",
giving applications the possibility of junking memory only on allocation
or deallocation.

This resolves #172.
This commit is contained in:
Guilherme Goncalves 2014-12-08 19:12:41 -02:00 committed by Jason Evans
parent b74041fb6e
commit 2c5cb613df
14 changed files with 140 additions and 72 deletions

View File

@ -118,6 +118,8 @@ TESTS_UNIT := $(srcroot)test/unit/atomic.c \
$(srcroot)test/unit/ckh.c \ $(srcroot)test/unit/ckh.c \
$(srcroot)test/unit/hash.c \ $(srcroot)test/unit/hash.c \
$(srcroot)test/unit/junk.c \ $(srcroot)test/unit/junk.c \
$(srcroot)test/unit/junk_alloc.c \
$(srcroot)test/unit/junk_free.c \
$(srcroot)test/unit/lg_chunk.c \ $(srcroot)test/unit/lg_chunk.c \
$(srcroot)test/unit/mallctl.c \ $(srcroot)test/unit/mallctl.c \
$(srcroot)test/unit/math.c \ $(srcroot)test/unit/math.c \

View File

@ -930,18 +930,20 @@ for (i = 0; i < nbins; i++) {
<varlistentry id="opt.junk"> <varlistentry id="opt.junk">
<term> <term>
<mallctl>opt.junk</mallctl> <mallctl>opt.junk</mallctl>
(<type>bool</type>) (<type>const char *</type>)
<literal>r-</literal> <literal>r-</literal>
[<option>--enable-fill</option>] [<option>--enable-fill</option>]
</term> </term>
<listitem><para>Junk filling enabled/disabled. If enabled, each byte <listitem><para>Junk filling. If set to "alloc", each byte of
of uninitialized allocated memory will be initialized to uninitialized allocated memory will be initialized to
<literal>0xa5</literal>. All deallocated memory will be initialized to <literal>0xa5</literal>. If set to "free", all deallocated memory will
<literal>0x5a</literal>. This is intended for debugging and will be initialized to <literal>0x5a</literal>. If set to "true", both
impact performance negatively. This option is disabled by default allocated and deallocated memory will be initialized, and if set to
unless <option>--enable-debug</option> is specified during "false", junk filling be disabled entirely. This is intended for
configuration, in which case it is enabled by default unless running debugging and will impact performance negatively. This option is
inside <ulink "false" by default unless <option>--enable-debug</option> is specified
during configuration, in which case it is "true" by default unless
running inside <ulink
url="http://valgrind.org/">Valgrind</ulink>.</para></listitem> url="http://valgrind.org/">Valgrind</ulink>.</para></listitem>
</varlistentry> </varlistentry>

View File

@ -376,7 +376,9 @@ typedef unsigned index_t;
#define JEMALLOC_H_EXTERNS #define JEMALLOC_H_EXTERNS
extern bool opt_abort; extern bool opt_abort;
extern bool opt_junk; extern const char *opt_junk;
extern bool opt_junk_alloc;
extern bool opt_junk_free;
extern size_t opt_quarantine; extern size_t opt_quarantine;
extern bool opt_redzone; extern bool opt_redzone;
extern bool opt_utrace; extern bool opt_utrace;

View File

@ -274,6 +274,8 @@ nhbins
opt_abort opt_abort
opt_dss opt_dss
opt_junk opt_junk
opt_junk_alloc
opt_junk_free
opt_lg_chunk opt_lg_chunk
opt_lg_dirty_mult opt_lg_dirty_mult
opt_lg_prof_interval opt_lg_prof_interval

View File

@ -252,14 +252,14 @@ tcache_alloc_small(tcache_t *tcache, size_t size, bool zero)
if (likely(!zero)) { if (likely(!zero)) {
if (config_fill) { if (config_fill) {
if (unlikely(opt_junk)) { if (unlikely(opt_junk_alloc)) {
arena_alloc_junk_small(ret, arena_alloc_junk_small(ret,
&arena_bin_info[binind], false); &arena_bin_info[binind], false);
} else if (unlikely(opt_zero)) } else if (unlikely(opt_zero))
memset(ret, 0, usize); memset(ret, 0, usize);
} }
} else { } else {
if (config_fill && unlikely(opt_junk)) { if (config_fill && unlikely(opt_junk_alloc)) {
arena_alloc_junk_small(ret, &arena_bin_info[binind], arena_alloc_junk_small(ret, &arena_bin_info[binind],
true); true);
} }
@ -307,7 +307,7 @@ tcache_alloc_large(tcache_t *tcache, size_t size, bool zero)
} }
if (likely(!zero)) { if (likely(!zero)) {
if (config_fill) { if (config_fill) {
if (unlikely(opt_junk)) if (unlikely(opt_junk_alloc))
memset(ret, 0xa5, usize); memset(ret, 0xa5, usize);
else if (unlikely(opt_zero)) else if (unlikely(opt_zero))
memset(ret, 0, usize); memset(ret, 0, usize);
@ -333,7 +333,7 @@ tcache_dalloc_small(tcache_t *tcache, void *ptr, index_t binind)
assert(tcache_salloc(ptr) <= SMALL_MAXCLASS); assert(tcache_salloc(ptr) <= SMALL_MAXCLASS);
if (config_fill && unlikely(opt_junk)) if (config_fill && unlikely(opt_junk_free))
arena_dalloc_junk_small(ptr, &arena_bin_info[binind]); arena_dalloc_junk_small(ptr, &arena_bin_info[binind]);
tbin = &tcache->tbins[binind]; tbin = &tcache->tbins[binind];
@ -362,7 +362,7 @@ tcache_dalloc_large(tcache_t *tcache, void *ptr, size_t size)
binind = size2index(size); binind = size2index(size);
if (config_fill && unlikely(opt_junk)) if (config_fill && unlikely(opt_junk_free))
arena_dalloc_junk_large(ptr, size); arena_dalloc_junk_large(ptr, size);
tbin = &tcache->tbins[binind]; tbin = &tcache->tbins[binind];

View File

@ -1450,7 +1450,7 @@ arena_tcache_fill_small(arena_t *arena, tcache_bin_t *tbin, index_t binind,
} }
break; break;
} }
if (config_fill && unlikely(opt_junk)) { if (config_fill && unlikely(opt_junk_alloc)) {
arena_alloc_junk_small(ptr, &arena_bin_info[binind], arena_alloc_junk_small(ptr, &arena_bin_info[binind],
true); true);
} }
@ -1512,6 +1512,7 @@ arena_redzones_validate(void *ptr, arena_bin_info_t *bin_info, bool reset)
size_t i; size_t i;
bool error = false; bool error = false;
if (opt_junk_alloc) {
for (i = 1; i <= redzone_size; i++) { for (i = 1; i <= redzone_size; i++) {
uint8_t *byte = (uint8_t *)((uintptr_t)ptr - i); uint8_t *byte = (uint8_t *)((uintptr_t)ptr - i);
if (*byte != 0xa5) { if (*byte != 0xa5) {
@ -1530,6 +1531,8 @@ arena_redzones_validate(void *ptr, arena_bin_info_t *bin_info, bool reset)
*byte = 0xa5; *byte = 0xa5;
} }
} }
}
if (opt_abort && error) if (opt_abort && error)
abort(); abort();
} }
@ -1560,7 +1563,7 @@ arena_quarantine_junk_small(void *ptr, size_t usize)
index_t binind; index_t binind;
arena_bin_info_t *bin_info; arena_bin_info_t *bin_info;
cassert(config_fill); cassert(config_fill);
assert(opt_junk); assert(opt_junk_free);
assert(opt_quarantine); assert(opt_quarantine);
assert(usize <= SMALL_MAXCLASS); assert(usize <= SMALL_MAXCLASS);
@ -1604,7 +1607,7 @@ arena_malloc_small(arena_t *arena, size_t size, bool zero)
if (!zero) { if (!zero) {
if (config_fill) { if (config_fill) {
if (unlikely(opt_junk)) { if (unlikely(opt_junk_alloc)) {
arena_alloc_junk_small(ret, arena_alloc_junk_small(ret,
&arena_bin_info[binind], false); &arena_bin_info[binind], false);
} else if (unlikely(opt_zero)) } else if (unlikely(opt_zero))
@ -1612,7 +1615,7 @@ arena_malloc_small(arena_t *arena, size_t size, bool zero)
} }
JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ret, size); JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
} else { } else {
if (config_fill && unlikely(opt_junk)) { if (config_fill && unlikely(opt_junk_alloc)) {
arena_alloc_junk_small(ret, &arena_bin_info[binind], arena_alloc_junk_small(ret, &arena_bin_info[binind],
true); true);
} }
@ -1660,7 +1663,7 @@ arena_malloc_large(arena_t *arena, size_t size, bool zero)
if (!zero) { if (!zero) {
if (config_fill) { if (config_fill) {
if (unlikely(opt_junk)) if (unlikely(opt_junk_alloc))
memset(ret, 0xa5, usize); memset(ret, 0xa5, usize);
else if (unlikely(opt_zero)) else if (unlikely(opt_zero))
memset(ret, 0, usize); memset(ret, 0, usize);
@ -1732,7 +1735,7 @@ arena_palloc(arena_t *arena, size_t size, size_t alignment, bool zero)
malloc_mutex_unlock(&arena->lock); malloc_mutex_unlock(&arena->lock);
if (config_fill && !zero) { if (config_fill && !zero) {
if (unlikely(opt_junk)) if (unlikely(opt_junk_alloc))
memset(ret, 0xa5, size); memset(ret, 0xa5, size);
else if (unlikely(opt_zero)) else if (unlikely(opt_zero))
memset(ret, 0, size); memset(ret, 0, size);
@ -1845,7 +1848,7 @@ arena_dalloc_bin_locked_impl(arena_t *arena, arena_chunk_t *chunk, void *ptr,
bin = &arena->bins[binind]; bin = &arena->bins[binind];
bin_info = &arena_bin_info[binind]; bin_info = &arena_bin_info[binind];
if (!junked && config_fill && unlikely(opt_junk)) if (!junked && config_fill && unlikely(opt_junk_free))
arena_dalloc_junk_small(ptr, bin_info); arena_dalloc_junk_small(ptr, bin_info);
arena_run_reg_dalloc(run, ptr); arena_run_reg_dalloc(run, ptr);
@ -1908,7 +1911,7 @@ void
arena_dalloc_junk_large(void *ptr, size_t usize) arena_dalloc_junk_large(void *ptr, size_t usize)
{ {
if (config_fill && unlikely(opt_junk)) if (config_fill && unlikely(opt_junk_free))
memset(ptr, 0x5a, usize); memset(ptr, 0x5a, usize);
} }
#ifdef JEMALLOC_JET #ifdef JEMALLOC_JET
@ -2079,7 +2082,7 @@ static void
arena_ralloc_junk_large(void *ptr, size_t old_usize, size_t usize) arena_ralloc_junk_large(void *ptr, size_t old_usize, size_t usize)
{ {
if (config_fill && unlikely(opt_junk)) { if (config_fill && unlikely(opt_junk_free)) {
memset((void *)((uintptr_t)ptr + usize), 0x5a, memset((void *)((uintptr_t)ptr + usize), 0x5a,
old_usize - usize); old_usize - usize);
} }
@ -2126,7 +2129,7 @@ arena_ralloc_large(void *ptr, size_t oldsize, size_t size, size_t extra,
bool ret = arena_ralloc_large_grow(arena, chunk, ptr, bool ret = arena_ralloc_large_grow(arena, chunk, ptr,
oldsize, size, extra, zero); oldsize, size, extra, zero);
if (config_fill && !ret && !zero) { if (config_fill && !ret && !zero) {
if (unlikely(opt_junk)) { if (unlikely(opt_junk_alloc)) {
memset((void *)((uintptr_t)ptr + memset((void *)((uintptr_t)ptr +
oldsize), 0xa5, isalloc(ptr, oldsize), 0xa5, isalloc(ptr,
config_prof) - oldsize); config_prof) - oldsize);

View File

@ -1234,7 +1234,7 @@ CTL_RO_NL_GEN(opt_lg_chunk, opt_lg_chunk, size_t)
CTL_RO_NL_GEN(opt_narenas, opt_narenas, size_t) CTL_RO_NL_GEN(opt_narenas, opt_narenas, size_t)
CTL_RO_NL_GEN(opt_lg_dirty_mult, opt_lg_dirty_mult, ssize_t) CTL_RO_NL_GEN(opt_lg_dirty_mult, opt_lg_dirty_mult, ssize_t)
CTL_RO_NL_GEN(opt_stats_print, opt_stats_print, bool) CTL_RO_NL_GEN(opt_stats_print, opt_stats_print, bool)
CTL_RO_NL_CGEN(config_fill, opt_junk, opt_junk, bool) CTL_RO_NL_CGEN(config_fill, opt_junk, opt_junk, const char *)
CTL_RO_NL_CGEN(config_fill, opt_quarantine, opt_quarantine, size_t) CTL_RO_NL_CGEN(config_fill, opt_quarantine, opt_quarantine, size_t)
CTL_RO_NL_CGEN(config_fill, opt_redzone, opt_redzone, bool) CTL_RO_NL_CGEN(config_fill, opt_redzone, opt_redzone, bool)
CTL_RO_NL_CGEN(config_fill, opt_zero, opt_zero, bool) CTL_RO_NL_CGEN(config_fill, opt_zero, opt_zero, bool)

View File

@ -67,7 +67,7 @@ huge_palloc(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment,
if (zero || (config_fill && unlikely(opt_zero))) { if (zero || (config_fill && unlikely(opt_zero))) {
if (!is_zeroed) if (!is_zeroed)
memset(ret, 0, usize); memset(ret, 0, usize);
} else if (config_fill && unlikely(opt_junk)) } else if (config_fill && unlikely(opt_junk_alloc))
memset(ret, 0xa5, usize); memset(ret, 0xa5, usize);
return (ret); return (ret);
@ -81,7 +81,7 @@ static void
huge_dalloc_junk(void *ptr, size_t usize) huge_dalloc_junk(void *ptr, size_t usize)
{ {
if (config_fill && have_dss && unlikely(opt_junk)) { if (config_fill && have_dss && unlikely(opt_junk_free)) {
/* /*
* Only bother junk filling if the chunk isn't about to be * Only bother junk filling if the chunk isn't about to be
* unmapped. * unmapped.
@ -117,7 +117,7 @@ huge_ralloc_no_move_similar(void *ptr, size_t oldsize, size_t usize,
size_t sdiff = CHUNK_CEILING(usize) - usize; size_t sdiff = CHUNK_CEILING(usize) - usize;
zeroed = (sdiff != 0) ? !pages_purge((void *)((uintptr_t)ptr + zeroed = (sdiff != 0) ? !pages_purge((void *)((uintptr_t)ptr +
usize), sdiff) : true; usize), sdiff) : true;
if (config_fill && unlikely(opt_junk)) { if (config_fill && unlikely(opt_junk_free)) {
memset((void *)((uintptr_t)ptr + usize), 0x5a, oldsize - memset((void *)((uintptr_t)ptr + usize), 0x5a, oldsize -
usize); usize);
zeroed = false; zeroed = false;
@ -147,7 +147,7 @@ huge_ralloc_no_move_similar(void *ptr, size_t oldsize, size_t usize,
memset((void *)((uintptr_t)ptr + oldsize), 0, memset((void *)((uintptr_t)ptr + oldsize), 0,
usize - oldsize); usize - oldsize);
} }
} else if (config_fill && unlikely(opt_junk)) { } else if (config_fill && unlikely(opt_junk_alloc)) {
memset((void *)((uintptr_t)ptr + oldsize), 0xa5, usize - memset((void *)((uintptr_t)ptr + oldsize), 0xa5, usize -
oldsize); oldsize);
} }
@ -165,7 +165,7 @@ huge_ralloc_no_move_shrink(void *ptr, size_t oldsize, size_t usize)
sdiff = CHUNK_CEILING(usize) - usize; sdiff = CHUNK_CEILING(usize) - usize;
zeroed = (sdiff != 0) ? !pages_purge((void *)((uintptr_t)ptr + usize), zeroed = (sdiff != 0) ? !pages_purge((void *)((uintptr_t)ptr + usize),
sdiff) : true; sdiff) : true;
if (config_fill && unlikely(opt_junk)) { if (config_fill && unlikely(opt_junk_free)) {
huge_dalloc_junk((void *)((uintptr_t)ptr + usize), oldsize - huge_dalloc_junk((void *)((uintptr_t)ptr + usize), oldsize -
usize); usize);
zeroed = false; zeroed = false;
@ -234,7 +234,7 @@ huge_ralloc_no_move_expand(void *ptr, size_t oldsize, size_t size, bool zero) {
CHUNK_CEILING(oldsize)), 0, usize - CHUNK_CEILING(oldsize)), 0, usize -
CHUNK_CEILING(oldsize)); CHUNK_CEILING(oldsize));
} }
} else if (config_fill && unlikely(opt_junk)) { } else if (config_fill && unlikely(opt_junk_alloc)) {
memset((void *)((uintptr_t)ptr + oldsize), 0xa5, usize - memset((void *)((uintptr_t)ptr + oldsize), 0xa5, usize -
oldsize); oldsize);
} }

View File

@ -13,13 +13,28 @@ bool opt_abort =
false false
#endif #endif
; ;
bool opt_junk = const char *opt_junk =
#if (defined(JEMALLOC_DEBUG) && defined(JEMALLOC_FILL))
"true"
#else
"false"
#endif
;
bool opt_junk_alloc =
#if (defined(JEMALLOC_DEBUG) && defined(JEMALLOC_FILL)) #if (defined(JEMALLOC_DEBUG) && defined(JEMALLOC_FILL))
true true
#else #else
false false
#endif #endif
; ;
bool opt_junk_free =
#if (defined(JEMALLOC_DEBUG) && defined(JEMALLOC_FILL))
true
#else
false
#endif
;
size_t opt_quarantine = ZU(0); size_t opt_quarantine = ZU(0);
bool opt_redzone = false; bool opt_redzone = false;
bool opt_utrace = false; bool opt_utrace = false;
@ -784,7 +799,9 @@ malloc_conf_init(void)
if (config_valgrind) { if (config_valgrind) {
in_valgrind = (RUNNING_ON_VALGRIND != 0) ? true : false; in_valgrind = (RUNNING_ON_VALGRIND != 0) ? true : false;
if (config_fill && unlikely(in_valgrind)) { if (config_fill && unlikely(in_valgrind)) {
opt_junk = false; opt_junk = "false";
opt_junk_alloc = false;
opt_junk_free = false;
assert(!opt_zero); assert(!opt_zero);
opt_quarantine = JEMALLOC_VALGRIND_QUARANTINE_DEFAULT; opt_quarantine = JEMALLOC_VALGRIND_QUARANTINE_DEFAULT;
opt_redzone = true; opt_redzone = true;
@ -867,13 +884,13 @@ malloc_conf_init(void)
&vlen)) { &vlen)) {
#define CONF_MATCH(n) \ #define CONF_MATCH(n) \
(sizeof(n)-1 == klen && strncmp(n, k, klen) == 0) (sizeof(n)-1 == klen && strncmp(n, k, klen) == 0)
#define CONF_MATCH_VALUE(n) \
(sizeof(n)-1 == vlen && strncmp(n, v, vlen) == 0)
#define CONF_HANDLE_BOOL(o, n, cont) \ #define CONF_HANDLE_BOOL(o, n, cont) \
if (CONF_MATCH(n)) { \ if (CONF_MATCH(n)) { \
if (strncmp("true", v, vlen) == 0 && \ if (CONF_MATCH_VALUE("true")) \
vlen == sizeof("true")-1) \
o = true; \ o = true; \
else if (strncmp("false", v, vlen) == \ else if (CONF_MATCH_VALUE("false")) \
0 && vlen == sizeof("false")-1) \
o = false; \ o = false; \
else { \ else { \
malloc_conf_error( \ malloc_conf_error( \
@ -987,7 +1004,30 @@ malloc_conf_init(void)
-1, (sizeof(size_t) << 3) - 1) -1, (sizeof(size_t) << 3) - 1)
CONF_HANDLE_BOOL(opt_stats_print, "stats_print", true) CONF_HANDLE_BOOL(opt_stats_print, "stats_print", true)
if (config_fill) { if (config_fill) {
CONF_HANDLE_BOOL(opt_junk, "junk", true) if (CONF_MATCH("junk")) {
if (CONF_MATCH_VALUE("true")) {
opt_junk = "true";
opt_junk_alloc = opt_junk_free =
true;
} else if (CONF_MATCH_VALUE("false")) {
opt_junk = "false";
opt_junk_alloc = opt_junk_free =
false;
} else if (CONF_MATCH_VALUE("alloc")) {
opt_junk = "alloc";
opt_junk_alloc = true;
opt_junk_free = false;
} else if (CONF_MATCH_VALUE("free")) {
opt_junk = "free";
opt_junk_alloc = false;
opt_junk_free = true;
} else {
malloc_conf_error(
"Invalid conf value", k,
klen, v, vlen);
}
continue;
}
CONF_HANDLE_SIZE_T(opt_quarantine, "quarantine", CONF_HANDLE_SIZE_T(opt_quarantine, "quarantine",
0, SIZE_T_MAX, false) 0, SIZE_T_MAX, false)
CONF_HANDLE_BOOL(opt_redzone, "redzone", true) CONF_HANDLE_BOOL(opt_redzone, "redzone", true)

View File

@ -148,7 +148,7 @@ quarantine(tsd_t *tsd, void *ptr)
obj->usize = usize; obj->usize = usize;
quarantine->curbytes += usize; quarantine->curbytes += usize;
quarantine->curobjs++; quarantine->curobjs++;
if (config_fill && unlikely(opt_junk)) { if (config_fill && unlikely(opt_junk_free)) {
/* /*
* Only do redzone validation if Valgrind isn't in * Only do redzone validation if Valgrind isn't in
* operation. * operation.

View File

@ -1,8 +1,11 @@
#include "test/jemalloc_test.h" #include "test/jemalloc_test.h"
#ifdef JEMALLOC_FILL #ifdef JEMALLOC_FILL
# ifndef JEMALLOC_TEST_JUNK_OPT
# define JEMALLOC_TEST_JUNK_OPT "junk:true"
# endif
const char *malloc_conf = const char *malloc_conf =
"abort:false,junk:true,zero:false,redzone:true,quarantine:0"; "abort:false,zero:false,redzone:true,quarantine:0," JEMALLOC_TEST_JUNK_OPT;
#endif #endif
static arena_dalloc_junk_small_t *arena_dalloc_junk_small_orig; static arena_dalloc_junk_small_t *arena_dalloc_junk_small_orig;
@ -69,12 +72,14 @@ test_junk(size_t sz_min, size_t sz_max)
char *s; char *s;
size_t sz_prev, sz, i; size_t sz_prev, sz, i;
if (opt_junk_free) {
arena_dalloc_junk_small_orig = arena_dalloc_junk_small; arena_dalloc_junk_small_orig = arena_dalloc_junk_small;
arena_dalloc_junk_small = arena_dalloc_junk_small_intercept; arena_dalloc_junk_small = arena_dalloc_junk_small_intercept;
arena_dalloc_junk_large_orig = arena_dalloc_junk_large; arena_dalloc_junk_large_orig = arena_dalloc_junk_large;
arena_dalloc_junk_large = arena_dalloc_junk_large_intercept; arena_dalloc_junk_large = arena_dalloc_junk_large_intercept;
huge_dalloc_junk_orig = huge_dalloc_junk; huge_dalloc_junk_orig = huge_dalloc_junk;
huge_dalloc_junk = huge_dalloc_junk_intercept; huge_dalloc_junk = huge_dalloc_junk_intercept;
}
sz_prev = 0; sz_prev = 0;
s = (char *)mallocx(sz_min, 0); s = (char *)mallocx(sz_min, 0);
@ -92,9 +97,11 @@ test_junk(size_t sz_min, size_t sz_max)
} }
for (i = sz_prev; i < sz; i++) { for (i = sz_prev; i < sz; i++) {
if (opt_junk_alloc) {
assert_c_eq(s[i], 0xa5, assert_c_eq(s[i], 0xa5,
"Newly allocated byte %zu/%zu isn't junk-filled", "Newly allocated byte %zu/%zu isn't "
i, sz); "junk-filled", i, sz);
}
s[i] = 'a'; s[i] = 'a';
} }
@ -103,7 +110,7 @@ test_junk(size_t sz_min, size_t sz_max)
s = (char *)rallocx(s, sz+1, 0); s = (char *)rallocx(s, sz+1, 0);
assert_ptr_not_null((void *)s, assert_ptr_not_null((void *)s,
"Unexpected rallocx() failure"); "Unexpected rallocx() failure");
assert_true(saw_junking, assert_true(!opt_junk_free || saw_junking,
"Expected region of size %zu to be junk-filled", "Expected region of size %zu to be junk-filled",
sz); sz);
} }
@ -111,13 +118,15 @@ test_junk(size_t sz_min, size_t sz_max)
watch_junking(s); watch_junking(s);
dallocx(s, 0); dallocx(s, 0);
assert_true(saw_junking, assert_true(!opt_junk_free || saw_junking,
"Expected region of size %zu to be junk-filled", sz); "Expected region of size %zu to be junk-filled", sz);
if (opt_junk_free) {
arena_dalloc_junk_small = arena_dalloc_junk_small_orig; arena_dalloc_junk_small = arena_dalloc_junk_small_orig;
arena_dalloc_junk_large = arena_dalloc_junk_large_orig; arena_dalloc_junk_large = arena_dalloc_junk_large_orig;
huge_dalloc_junk = huge_dalloc_junk_orig; huge_dalloc_junk = huge_dalloc_junk_orig;
} }
}
TEST_BEGIN(test_junk_small) TEST_BEGIN(test_junk_small)
{ {
@ -204,6 +213,7 @@ TEST_BEGIN(test_junk_redzone)
arena_redzone_corruption_t *arena_redzone_corruption_orig; arena_redzone_corruption_t *arena_redzone_corruption_orig;
test_skip_if(!config_fill); test_skip_if(!config_fill);
test_skip_if(!opt_junk_alloc || !opt_junk_free);
arena_redzone_corruption_orig = arena_redzone_corruption; arena_redzone_corruption_orig = arena_redzone_corruption;
arena_redzone_corruption = arena_redzone_corruption_replacement; arena_redzone_corruption = arena_redzone_corruption_replacement;
@ -234,6 +244,7 @@ int
main(void) main(void)
{ {
assert(opt_junk_alloc || opt_junk_free);
return (test( return (test(
test_junk_small, test_junk_small,
test_junk_large, test_junk_large,

3
test/unit/junk_alloc.c Normal file
View File

@ -0,0 +1,3 @@
#define JEMALLOC_TEST_JUNK_OPT "junk:alloc"
#include "junk.c"
#undef JEMALLOC_TEST_JUNK_OPT

3
test/unit/junk_free.c Normal file
View File

@ -0,0 +1,3 @@
#define JEMALLOC_TEST_JUNK_OPT "junk:free"
#include "junk.c"
#undef JEMALLOC_TEST_JUNK_OPT

View File

@ -164,7 +164,7 @@ TEST_BEGIN(test_mallctl_opt)
TEST_MALLCTL_OPT(size_t, narenas, always); TEST_MALLCTL_OPT(size_t, narenas, always);
TEST_MALLCTL_OPT(ssize_t, lg_dirty_mult, always); TEST_MALLCTL_OPT(ssize_t, lg_dirty_mult, always);
TEST_MALLCTL_OPT(bool, stats_print, always); TEST_MALLCTL_OPT(bool, stats_print, always);
TEST_MALLCTL_OPT(bool, junk, fill); TEST_MALLCTL_OPT(const char *, junk, fill);
TEST_MALLCTL_OPT(size_t, quarantine, fill); TEST_MALLCTL_OPT(size_t, quarantine, fill);
TEST_MALLCTL_OPT(bool, redzone, fill); TEST_MALLCTL_OPT(bool, redzone, fill);
TEST_MALLCTL_OPT(bool, zero, fill); TEST_MALLCTL_OPT(bool, zero, fill);