Introduce two new modes of junk filling: "alloc" and "free".
In addition to true/false, opt.junk can now be either "alloc" or "free", giving applications the possibility of junking memory only on allocation or deallocation. This resolves #172.
This commit is contained in:
parent
b74041fb6e
commit
2c5cb613df
@ -118,6 +118,8 @@ TESTS_UNIT := $(srcroot)test/unit/atomic.c \
|
||||
$(srcroot)test/unit/ckh.c \
|
||||
$(srcroot)test/unit/hash.c \
|
||||
$(srcroot)test/unit/junk.c \
|
||||
$(srcroot)test/unit/junk_alloc.c \
|
||||
$(srcroot)test/unit/junk_free.c \
|
||||
$(srcroot)test/unit/lg_chunk.c \
|
||||
$(srcroot)test/unit/mallctl.c \
|
||||
$(srcroot)test/unit/math.c \
|
||||
|
@ -930,18 +930,20 @@ for (i = 0; i < nbins; i++) {
|
||||
<varlistentry id="opt.junk">
|
||||
<term>
|
||||
<mallctl>opt.junk</mallctl>
|
||||
(<type>bool</type>)
|
||||
(<type>const char *</type>)
|
||||
<literal>r-</literal>
|
||||
[<option>--enable-fill</option>]
|
||||
</term>
|
||||
<listitem><para>Junk filling enabled/disabled. If enabled, each byte
|
||||
of uninitialized allocated memory will be initialized to
|
||||
<literal>0xa5</literal>. All deallocated memory will be initialized to
|
||||
<literal>0x5a</literal>. This is intended for debugging and will
|
||||
impact performance negatively. This option is disabled by default
|
||||
unless <option>--enable-debug</option> is specified during
|
||||
configuration, in which case it is enabled by default unless running
|
||||
inside <ulink
|
||||
<listitem><para>Junk filling. If set to "alloc", each byte of
|
||||
uninitialized allocated memory will be initialized to
|
||||
<literal>0xa5</literal>. If set to "free", all deallocated memory will
|
||||
be initialized to <literal>0x5a</literal>. If set to "true", both
|
||||
allocated and deallocated memory will be initialized, and if set to
|
||||
"false", junk filling be disabled entirely. This is intended for
|
||||
debugging and will impact performance negatively. This option is
|
||||
"false" by default unless <option>--enable-debug</option> is specified
|
||||
during configuration, in which case it is "true" by default unless
|
||||
running inside <ulink
|
||||
url="http://valgrind.org/">Valgrind</ulink>.</para></listitem>
|
||||
</varlistentry>
|
||||
|
||||
|
@ -376,7 +376,9 @@ typedef unsigned index_t;
|
||||
#define JEMALLOC_H_EXTERNS
|
||||
|
||||
extern bool opt_abort;
|
||||
extern bool opt_junk;
|
||||
extern const char *opt_junk;
|
||||
extern bool opt_junk_alloc;
|
||||
extern bool opt_junk_free;
|
||||
extern size_t opt_quarantine;
|
||||
extern bool opt_redzone;
|
||||
extern bool opt_utrace;
|
||||
|
@ -274,6 +274,8 @@ nhbins
|
||||
opt_abort
|
||||
opt_dss
|
||||
opt_junk
|
||||
opt_junk_alloc
|
||||
opt_junk_free
|
||||
opt_lg_chunk
|
||||
opt_lg_dirty_mult
|
||||
opt_lg_prof_interval
|
||||
|
@ -252,14 +252,14 @@ tcache_alloc_small(tcache_t *tcache, size_t size, bool zero)
|
||||
|
||||
if (likely(!zero)) {
|
||||
if (config_fill) {
|
||||
if (unlikely(opt_junk)) {
|
||||
if (unlikely(opt_junk_alloc)) {
|
||||
arena_alloc_junk_small(ret,
|
||||
&arena_bin_info[binind], false);
|
||||
} else if (unlikely(opt_zero))
|
||||
memset(ret, 0, usize);
|
||||
}
|
||||
} else {
|
||||
if (config_fill && unlikely(opt_junk)) {
|
||||
if (config_fill && unlikely(opt_junk_alloc)) {
|
||||
arena_alloc_junk_small(ret, &arena_bin_info[binind],
|
||||
true);
|
||||
}
|
||||
@ -307,7 +307,7 @@ tcache_alloc_large(tcache_t *tcache, size_t size, bool zero)
|
||||
}
|
||||
if (likely(!zero)) {
|
||||
if (config_fill) {
|
||||
if (unlikely(opt_junk))
|
||||
if (unlikely(opt_junk_alloc))
|
||||
memset(ret, 0xa5, usize);
|
||||
else if (unlikely(opt_zero))
|
||||
memset(ret, 0, usize);
|
||||
@ -333,7 +333,7 @@ tcache_dalloc_small(tcache_t *tcache, void *ptr, index_t binind)
|
||||
|
||||
assert(tcache_salloc(ptr) <= SMALL_MAXCLASS);
|
||||
|
||||
if (config_fill && unlikely(opt_junk))
|
||||
if (config_fill && unlikely(opt_junk_free))
|
||||
arena_dalloc_junk_small(ptr, &arena_bin_info[binind]);
|
||||
|
||||
tbin = &tcache->tbins[binind];
|
||||
@ -362,7 +362,7 @@ tcache_dalloc_large(tcache_t *tcache, void *ptr, size_t size)
|
||||
|
||||
binind = size2index(size);
|
||||
|
||||
if (config_fill && unlikely(opt_junk))
|
||||
if (config_fill && unlikely(opt_junk_free))
|
||||
arena_dalloc_junk_large(ptr, size);
|
||||
|
||||
tbin = &tcache->tbins[binind];
|
||||
|
23
src/arena.c
23
src/arena.c
@ -1450,7 +1450,7 @@ arena_tcache_fill_small(arena_t *arena, tcache_bin_t *tbin, index_t binind,
|
||||
}
|
||||
break;
|
||||
}
|
||||
if (config_fill && unlikely(opt_junk)) {
|
||||
if (config_fill && unlikely(opt_junk_alloc)) {
|
||||
arena_alloc_junk_small(ptr, &arena_bin_info[binind],
|
||||
true);
|
||||
}
|
||||
@ -1512,6 +1512,7 @@ arena_redzones_validate(void *ptr, arena_bin_info_t *bin_info, bool reset)
|
||||
size_t i;
|
||||
bool error = false;
|
||||
|
||||
if (opt_junk_alloc) {
|
||||
for (i = 1; i <= redzone_size; i++) {
|
||||
uint8_t *byte = (uint8_t *)((uintptr_t)ptr - i);
|
||||
if (*byte != 0xa5) {
|
||||
@ -1530,6 +1531,8 @@ arena_redzones_validate(void *ptr, arena_bin_info_t *bin_info, bool reset)
|
||||
*byte = 0xa5;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (opt_abort && error)
|
||||
abort();
|
||||
}
|
||||
@ -1560,7 +1563,7 @@ arena_quarantine_junk_small(void *ptr, size_t usize)
|
||||
index_t binind;
|
||||
arena_bin_info_t *bin_info;
|
||||
cassert(config_fill);
|
||||
assert(opt_junk);
|
||||
assert(opt_junk_free);
|
||||
assert(opt_quarantine);
|
||||
assert(usize <= SMALL_MAXCLASS);
|
||||
|
||||
@ -1604,7 +1607,7 @@ arena_malloc_small(arena_t *arena, size_t size, bool zero)
|
||||
|
||||
if (!zero) {
|
||||
if (config_fill) {
|
||||
if (unlikely(opt_junk)) {
|
||||
if (unlikely(opt_junk_alloc)) {
|
||||
arena_alloc_junk_small(ret,
|
||||
&arena_bin_info[binind], false);
|
||||
} else if (unlikely(opt_zero))
|
||||
@ -1612,7 +1615,7 @@ arena_malloc_small(arena_t *arena, size_t size, bool zero)
|
||||
}
|
||||
JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
||||
} else {
|
||||
if (config_fill && unlikely(opt_junk)) {
|
||||
if (config_fill && unlikely(opt_junk_alloc)) {
|
||||
arena_alloc_junk_small(ret, &arena_bin_info[binind],
|
||||
true);
|
||||
}
|
||||
@ -1660,7 +1663,7 @@ arena_malloc_large(arena_t *arena, size_t size, bool zero)
|
||||
|
||||
if (!zero) {
|
||||
if (config_fill) {
|
||||
if (unlikely(opt_junk))
|
||||
if (unlikely(opt_junk_alloc))
|
||||
memset(ret, 0xa5, usize);
|
||||
else if (unlikely(opt_zero))
|
||||
memset(ret, 0, usize);
|
||||
@ -1732,7 +1735,7 @@ arena_palloc(arena_t *arena, size_t size, size_t alignment, bool zero)
|
||||
malloc_mutex_unlock(&arena->lock);
|
||||
|
||||
if (config_fill && !zero) {
|
||||
if (unlikely(opt_junk))
|
||||
if (unlikely(opt_junk_alloc))
|
||||
memset(ret, 0xa5, size);
|
||||
else if (unlikely(opt_zero))
|
||||
memset(ret, 0, size);
|
||||
@ -1845,7 +1848,7 @@ arena_dalloc_bin_locked_impl(arena_t *arena, arena_chunk_t *chunk, void *ptr,
|
||||
bin = &arena->bins[binind];
|
||||
bin_info = &arena_bin_info[binind];
|
||||
|
||||
if (!junked && config_fill && unlikely(opt_junk))
|
||||
if (!junked && config_fill && unlikely(opt_junk_free))
|
||||
arena_dalloc_junk_small(ptr, bin_info);
|
||||
|
||||
arena_run_reg_dalloc(run, ptr);
|
||||
@ -1908,7 +1911,7 @@ void
|
||||
arena_dalloc_junk_large(void *ptr, size_t usize)
|
||||
{
|
||||
|
||||
if (config_fill && unlikely(opt_junk))
|
||||
if (config_fill && unlikely(opt_junk_free))
|
||||
memset(ptr, 0x5a, usize);
|
||||
}
|
||||
#ifdef JEMALLOC_JET
|
||||
@ -2079,7 +2082,7 @@ static void
|
||||
arena_ralloc_junk_large(void *ptr, size_t old_usize, size_t usize)
|
||||
{
|
||||
|
||||
if (config_fill && unlikely(opt_junk)) {
|
||||
if (config_fill && unlikely(opt_junk_free)) {
|
||||
memset((void *)((uintptr_t)ptr + usize), 0x5a,
|
||||
old_usize - usize);
|
||||
}
|
||||
@ -2126,7 +2129,7 @@ arena_ralloc_large(void *ptr, size_t oldsize, size_t size, size_t extra,
|
||||
bool ret = arena_ralloc_large_grow(arena, chunk, ptr,
|
||||
oldsize, size, extra, zero);
|
||||
if (config_fill && !ret && !zero) {
|
||||
if (unlikely(opt_junk)) {
|
||||
if (unlikely(opt_junk_alloc)) {
|
||||
memset((void *)((uintptr_t)ptr +
|
||||
oldsize), 0xa5, isalloc(ptr,
|
||||
config_prof) - oldsize);
|
||||
|
@ -1234,7 +1234,7 @@ CTL_RO_NL_GEN(opt_lg_chunk, opt_lg_chunk, size_t)
|
||||
CTL_RO_NL_GEN(opt_narenas, opt_narenas, size_t)
|
||||
CTL_RO_NL_GEN(opt_lg_dirty_mult, opt_lg_dirty_mult, ssize_t)
|
||||
CTL_RO_NL_GEN(opt_stats_print, opt_stats_print, bool)
|
||||
CTL_RO_NL_CGEN(config_fill, opt_junk, opt_junk, bool)
|
||||
CTL_RO_NL_CGEN(config_fill, opt_junk, opt_junk, const char *)
|
||||
CTL_RO_NL_CGEN(config_fill, opt_quarantine, opt_quarantine, size_t)
|
||||
CTL_RO_NL_CGEN(config_fill, opt_redzone, opt_redzone, bool)
|
||||
CTL_RO_NL_CGEN(config_fill, opt_zero, opt_zero, bool)
|
||||
|
12
src/huge.c
12
src/huge.c
@ -67,7 +67,7 @@ huge_palloc(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment,
|
||||
if (zero || (config_fill && unlikely(opt_zero))) {
|
||||
if (!is_zeroed)
|
||||
memset(ret, 0, usize);
|
||||
} else if (config_fill && unlikely(opt_junk))
|
||||
} else if (config_fill && unlikely(opt_junk_alloc))
|
||||
memset(ret, 0xa5, usize);
|
||||
|
||||
return (ret);
|
||||
@ -81,7 +81,7 @@ static void
|
||||
huge_dalloc_junk(void *ptr, size_t usize)
|
||||
{
|
||||
|
||||
if (config_fill && have_dss && unlikely(opt_junk)) {
|
||||
if (config_fill && have_dss && unlikely(opt_junk_free)) {
|
||||
/*
|
||||
* Only bother junk filling if the chunk isn't about to be
|
||||
* unmapped.
|
||||
@ -117,7 +117,7 @@ huge_ralloc_no_move_similar(void *ptr, size_t oldsize, size_t usize,
|
||||
size_t sdiff = CHUNK_CEILING(usize) - usize;
|
||||
zeroed = (sdiff != 0) ? !pages_purge((void *)((uintptr_t)ptr +
|
||||
usize), sdiff) : true;
|
||||
if (config_fill && unlikely(opt_junk)) {
|
||||
if (config_fill && unlikely(opt_junk_free)) {
|
||||
memset((void *)((uintptr_t)ptr + usize), 0x5a, oldsize -
|
||||
usize);
|
||||
zeroed = false;
|
||||
@ -147,7 +147,7 @@ huge_ralloc_no_move_similar(void *ptr, size_t oldsize, size_t usize,
|
||||
memset((void *)((uintptr_t)ptr + oldsize), 0,
|
||||
usize - oldsize);
|
||||
}
|
||||
} else if (config_fill && unlikely(opt_junk)) {
|
||||
} else if (config_fill && unlikely(opt_junk_alloc)) {
|
||||
memset((void *)((uintptr_t)ptr + oldsize), 0xa5, usize -
|
||||
oldsize);
|
||||
}
|
||||
@ -165,7 +165,7 @@ huge_ralloc_no_move_shrink(void *ptr, size_t oldsize, size_t usize)
|
||||
sdiff = CHUNK_CEILING(usize) - usize;
|
||||
zeroed = (sdiff != 0) ? !pages_purge((void *)((uintptr_t)ptr + usize),
|
||||
sdiff) : true;
|
||||
if (config_fill && unlikely(opt_junk)) {
|
||||
if (config_fill && unlikely(opt_junk_free)) {
|
||||
huge_dalloc_junk((void *)((uintptr_t)ptr + usize), oldsize -
|
||||
usize);
|
||||
zeroed = false;
|
||||
@ -234,7 +234,7 @@ huge_ralloc_no_move_expand(void *ptr, size_t oldsize, size_t size, bool zero) {
|
||||
CHUNK_CEILING(oldsize)), 0, usize -
|
||||
CHUNK_CEILING(oldsize));
|
||||
}
|
||||
} else if (config_fill && unlikely(opt_junk)) {
|
||||
} else if (config_fill && unlikely(opt_junk_alloc)) {
|
||||
memset((void *)((uintptr_t)ptr + oldsize), 0xa5, usize -
|
||||
oldsize);
|
||||
}
|
||||
|
@ -13,13 +13,28 @@ bool opt_abort =
|
||||
false
|
||||
#endif
|
||||
;
|
||||
bool opt_junk =
|
||||
const char *opt_junk =
|
||||
#if (defined(JEMALLOC_DEBUG) && defined(JEMALLOC_FILL))
|
||||
"true"
|
||||
#else
|
||||
"false"
|
||||
#endif
|
||||
;
|
||||
bool opt_junk_alloc =
|
||||
#if (defined(JEMALLOC_DEBUG) && defined(JEMALLOC_FILL))
|
||||
true
|
||||
#else
|
||||
false
|
||||
#endif
|
||||
;
|
||||
bool opt_junk_free =
|
||||
#if (defined(JEMALLOC_DEBUG) && defined(JEMALLOC_FILL))
|
||||
true
|
||||
#else
|
||||
false
|
||||
#endif
|
||||
;
|
||||
|
||||
size_t opt_quarantine = ZU(0);
|
||||
bool opt_redzone = false;
|
||||
bool opt_utrace = false;
|
||||
@ -784,7 +799,9 @@ malloc_conf_init(void)
|
||||
if (config_valgrind) {
|
||||
in_valgrind = (RUNNING_ON_VALGRIND != 0) ? true : false;
|
||||
if (config_fill && unlikely(in_valgrind)) {
|
||||
opt_junk = false;
|
||||
opt_junk = "false";
|
||||
opt_junk_alloc = false;
|
||||
opt_junk_free = false;
|
||||
assert(!opt_zero);
|
||||
opt_quarantine = JEMALLOC_VALGRIND_QUARANTINE_DEFAULT;
|
||||
opt_redzone = true;
|
||||
@ -867,13 +884,13 @@ malloc_conf_init(void)
|
||||
&vlen)) {
|
||||
#define CONF_MATCH(n) \
|
||||
(sizeof(n)-1 == klen && strncmp(n, k, klen) == 0)
|
||||
#define CONF_MATCH_VALUE(n) \
|
||||
(sizeof(n)-1 == vlen && strncmp(n, v, vlen) == 0)
|
||||
#define CONF_HANDLE_BOOL(o, n, cont) \
|
||||
if (CONF_MATCH(n)) { \
|
||||
if (strncmp("true", v, vlen) == 0 && \
|
||||
vlen == sizeof("true")-1) \
|
||||
if (CONF_MATCH_VALUE("true")) \
|
||||
o = true; \
|
||||
else if (strncmp("false", v, vlen) == \
|
||||
0 && vlen == sizeof("false")-1) \
|
||||
else if (CONF_MATCH_VALUE("false")) \
|
||||
o = false; \
|
||||
else { \
|
||||
malloc_conf_error( \
|
||||
@ -987,7 +1004,30 @@ malloc_conf_init(void)
|
||||
-1, (sizeof(size_t) << 3) - 1)
|
||||
CONF_HANDLE_BOOL(opt_stats_print, "stats_print", true)
|
||||
if (config_fill) {
|
||||
CONF_HANDLE_BOOL(opt_junk, "junk", true)
|
||||
if (CONF_MATCH("junk")) {
|
||||
if (CONF_MATCH_VALUE("true")) {
|
||||
opt_junk = "true";
|
||||
opt_junk_alloc = opt_junk_free =
|
||||
true;
|
||||
} else if (CONF_MATCH_VALUE("false")) {
|
||||
opt_junk = "false";
|
||||
opt_junk_alloc = opt_junk_free =
|
||||
false;
|
||||
} else if (CONF_MATCH_VALUE("alloc")) {
|
||||
opt_junk = "alloc";
|
||||
opt_junk_alloc = true;
|
||||
opt_junk_free = false;
|
||||
} else if (CONF_MATCH_VALUE("free")) {
|
||||
opt_junk = "free";
|
||||
opt_junk_alloc = false;
|
||||
opt_junk_free = true;
|
||||
} else {
|
||||
malloc_conf_error(
|
||||
"Invalid conf value", k,
|
||||
klen, v, vlen);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
CONF_HANDLE_SIZE_T(opt_quarantine, "quarantine",
|
||||
0, SIZE_T_MAX, false)
|
||||
CONF_HANDLE_BOOL(opt_redzone, "redzone", true)
|
||||
|
@ -148,7 +148,7 @@ quarantine(tsd_t *tsd, void *ptr)
|
||||
obj->usize = usize;
|
||||
quarantine->curbytes += usize;
|
||||
quarantine->curobjs++;
|
||||
if (config_fill && unlikely(opt_junk)) {
|
||||
if (config_fill && unlikely(opt_junk_free)) {
|
||||
/*
|
||||
* Only do redzone validation if Valgrind isn't in
|
||||
* operation.
|
||||
|
@ -1,8 +1,11 @@
|
||||
#include "test/jemalloc_test.h"
|
||||
|
||||
#ifdef JEMALLOC_FILL
|
||||
# ifndef JEMALLOC_TEST_JUNK_OPT
|
||||
# define JEMALLOC_TEST_JUNK_OPT "junk:true"
|
||||
# endif
|
||||
const char *malloc_conf =
|
||||
"abort:false,junk:true,zero:false,redzone:true,quarantine:0";
|
||||
"abort:false,zero:false,redzone:true,quarantine:0," JEMALLOC_TEST_JUNK_OPT;
|
||||
#endif
|
||||
|
||||
static arena_dalloc_junk_small_t *arena_dalloc_junk_small_orig;
|
||||
@ -69,12 +72,14 @@ test_junk(size_t sz_min, size_t sz_max)
|
||||
char *s;
|
||||
size_t sz_prev, sz, i;
|
||||
|
||||
if (opt_junk_free) {
|
||||
arena_dalloc_junk_small_orig = arena_dalloc_junk_small;
|
||||
arena_dalloc_junk_small = arena_dalloc_junk_small_intercept;
|
||||
arena_dalloc_junk_large_orig = arena_dalloc_junk_large;
|
||||
arena_dalloc_junk_large = arena_dalloc_junk_large_intercept;
|
||||
huge_dalloc_junk_orig = huge_dalloc_junk;
|
||||
huge_dalloc_junk = huge_dalloc_junk_intercept;
|
||||
}
|
||||
|
||||
sz_prev = 0;
|
||||
s = (char *)mallocx(sz_min, 0);
|
||||
@ -92,9 +97,11 @@ test_junk(size_t sz_min, size_t sz_max)
|
||||
}
|
||||
|
||||
for (i = sz_prev; i < sz; i++) {
|
||||
if (opt_junk_alloc) {
|
||||
assert_c_eq(s[i], 0xa5,
|
||||
"Newly allocated byte %zu/%zu isn't junk-filled",
|
||||
i, sz);
|
||||
"Newly allocated byte %zu/%zu isn't "
|
||||
"junk-filled", i, sz);
|
||||
}
|
||||
s[i] = 'a';
|
||||
}
|
||||
|
||||
@ -103,7 +110,7 @@ test_junk(size_t sz_min, size_t sz_max)
|
||||
s = (char *)rallocx(s, sz+1, 0);
|
||||
assert_ptr_not_null((void *)s,
|
||||
"Unexpected rallocx() failure");
|
||||
assert_true(saw_junking,
|
||||
assert_true(!opt_junk_free || saw_junking,
|
||||
"Expected region of size %zu to be junk-filled",
|
||||
sz);
|
||||
}
|
||||
@ -111,12 +118,14 @@ test_junk(size_t sz_min, size_t sz_max)
|
||||
|
||||
watch_junking(s);
|
||||
dallocx(s, 0);
|
||||
assert_true(saw_junking,
|
||||
assert_true(!opt_junk_free || saw_junking,
|
||||
"Expected region of size %zu to be junk-filled", sz);
|
||||
|
||||
if (opt_junk_free) {
|
||||
arena_dalloc_junk_small = arena_dalloc_junk_small_orig;
|
||||
arena_dalloc_junk_large = arena_dalloc_junk_large_orig;
|
||||
huge_dalloc_junk = huge_dalloc_junk_orig;
|
||||
}
|
||||
}
|
||||
|
||||
TEST_BEGIN(test_junk_small)
|
||||
@ -204,6 +213,7 @@ TEST_BEGIN(test_junk_redzone)
|
||||
arena_redzone_corruption_t *arena_redzone_corruption_orig;
|
||||
|
||||
test_skip_if(!config_fill);
|
||||
test_skip_if(!opt_junk_alloc || !opt_junk_free);
|
||||
|
||||
arena_redzone_corruption_orig = arena_redzone_corruption;
|
||||
arena_redzone_corruption = arena_redzone_corruption_replacement;
|
||||
@ -234,6 +244,7 @@ int
|
||||
main(void)
|
||||
{
|
||||
|
||||
assert(opt_junk_alloc || opt_junk_free);
|
||||
return (test(
|
||||
test_junk_small,
|
||||
test_junk_large,
|
||||
|
3
test/unit/junk_alloc.c
Normal file
3
test/unit/junk_alloc.c
Normal file
@ -0,0 +1,3 @@
|
||||
#define JEMALLOC_TEST_JUNK_OPT "junk:alloc"
|
||||
#include "junk.c"
|
||||
#undef JEMALLOC_TEST_JUNK_OPT
|
3
test/unit/junk_free.c
Normal file
3
test/unit/junk_free.c
Normal file
@ -0,0 +1,3 @@
|
||||
#define JEMALLOC_TEST_JUNK_OPT "junk:free"
|
||||
#include "junk.c"
|
||||
#undef JEMALLOC_TEST_JUNK_OPT
|
@ -164,7 +164,7 @@ TEST_BEGIN(test_mallctl_opt)
|
||||
TEST_MALLCTL_OPT(size_t, narenas, always);
|
||||
TEST_MALLCTL_OPT(ssize_t, lg_dirty_mult, always);
|
||||
TEST_MALLCTL_OPT(bool, stats_print, always);
|
||||
TEST_MALLCTL_OPT(bool, junk, fill);
|
||||
TEST_MALLCTL_OPT(const char *, junk, fill);
|
||||
TEST_MALLCTL_OPT(size_t, quarantine, fill);
|
||||
TEST_MALLCTL_OPT(bool, redzone, fill);
|
||||
TEST_MALLCTL_OPT(bool, zero, fill);
|
||||
|
Loading…
Reference in New Issue
Block a user