Introduce two new modes of junk filling: "alloc" and "free".

In addition to true/false, opt.junk can now be either "alloc" or "free",
giving applications the possibility of junking memory only on allocation
or deallocation.

This resolves #172.
This commit is contained in:
Guilherme Goncalves
2014-12-08 19:12:41 -02:00
committed by Jason Evans
parent b74041fb6e
commit 2c5cb613df
14 changed files with 140 additions and 72 deletions

View File

@@ -1450,7 +1450,7 @@ arena_tcache_fill_small(arena_t *arena, tcache_bin_t *tbin, index_t binind,
}
break;
}
if (config_fill && unlikely(opt_junk)) {
if (config_fill && unlikely(opt_junk_alloc)) {
arena_alloc_junk_small(ptr, &arena_bin_info[binind],
true);
}
@@ -1512,24 +1512,27 @@ arena_redzones_validate(void *ptr, arena_bin_info_t *bin_info, bool reset)
size_t i;
bool error = false;
for (i = 1; i <= redzone_size; i++) {
uint8_t *byte = (uint8_t *)((uintptr_t)ptr - i);
if (*byte != 0xa5) {
error = true;
arena_redzone_corruption(ptr, size, false, i, *byte);
if (reset)
*byte = 0xa5;
}
}
for (i = 0; i < redzone_size; i++) {
uint8_t *byte = (uint8_t *)((uintptr_t)ptr + size + i);
if (*byte != 0xa5) {
error = true;
arena_redzone_corruption(ptr, size, true, i, *byte);
if (reset)
*byte = 0xa5;
if (opt_junk_alloc) {
for (i = 1; i <= redzone_size; i++) {
uint8_t *byte = (uint8_t *)((uintptr_t)ptr - i);
if (*byte != 0xa5) {
error = true;
arena_redzone_corruption(ptr, size, false, i, *byte);
if (reset)
*byte = 0xa5;
}
}
for (i = 0; i < redzone_size; i++) {
uint8_t *byte = (uint8_t *)((uintptr_t)ptr + size + i);
if (*byte != 0xa5) {
error = true;
arena_redzone_corruption(ptr, size, true, i, *byte);
if (reset)
*byte = 0xa5;
}
}
}
if (opt_abort && error)
abort();
}
@@ -1560,7 +1563,7 @@ arena_quarantine_junk_small(void *ptr, size_t usize)
index_t binind;
arena_bin_info_t *bin_info;
cassert(config_fill);
assert(opt_junk);
assert(opt_junk_free);
assert(opt_quarantine);
assert(usize <= SMALL_MAXCLASS);
@@ -1604,7 +1607,7 @@ arena_malloc_small(arena_t *arena, size_t size, bool zero)
if (!zero) {
if (config_fill) {
if (unlikely(opt_junk)) {
if (unlikely(opt_junk_alloc)) {
arena_alloc_junk_small(ret,
&arena_bin_info[binind], false);
} else if (unlikely(opt_zero))
@@ -1612,7 +1615,7 @@ arena_malloc_small(arena_t *arena, size_t size, bool zero)
}
JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
} else {
if (config_fill && unlikely(opt_junk)) {
if (config_fill && unlikely(opt_junk_alloc)) {
arena_alloc_junk_small(ret, &arena_bin_info[binind],
true);
}
@@ -1660,7 +1663,7 @@ arena_malloc_large(arena_t *arena, size_t size, bool zero)
if (!zero) {
if (config_fill) {
if (unlikely(opt_junk))
if (unlikely(opt_junk_alloc))
memset(ret, 0xa5, usize);
else if (unlikely(opt_zero))
memset(ret, 0, usize);
@@ -1732,7 +1735,7 @@ arena_palloc(arena_t *arena, size_t size, size_t alignment, bool zero)
malloc_mutex_unlock(&arena->lock);
if (config_fill && !zero) {
if (unlikely(opt_junk))
if (unlikely(opt_junk_alloc))
memset(ret, 0xa5, size);
else if (unlikely(opt_zero))
memset(ret, 0, size);
@@ -1845,7 +1848,7 @@ arena_dalloc_bin_locked_impl(arena_t *arena, arena_chunk_t *chunk, void *ptr,
bin = &arena->bins[binind];
bin_info = &arena_bin_info[binind];
if (!junked && config_fill && unlikely(opt_junk))
if (!junked && config_fill && unlikely(opt_junk_free))
arena_dalloc_junk_small(ptr, bin_info);
arena_run_reg_dalloc(run, ptr);
@@ -1908,7 +1911,7 @@ void
arena_dalloc_junk_large(void *ptr, size_t usize)
{
if (config_fill && unlikely(opt_junk))
if (config_fill && unlikely(opt_junk_free))
memset(ptr, 0x5a, usize);
}
#ifdef JEMALLOC_JET
@@ -2079,7 +2082,7 @@ static void
arena_ralloc_junk_large(void *ptr, size_t old_usize, size_t usize)
{
if (config_fill && unlikely(opt_junk)) {
if (config_fill && unlikely(opt_junk_free)) {
memset((void *)((uintptr_t)ptr + usize), 0x5a,
old_usize - usize);
}
@@ -2126,7 +2129,7 @@ arena_ralloc_large(void *ptr, size_t oldsize, size_t size, size_t extra,
bool ret = arena_ralloc_large_grow(arena, chunk, ptr,
oldsize, size, extra, zero);
if (config_fill && !ret && !zero) {
if (unlikely(opt_junk)) {
if (unlikely(opt_junk_alloc)) {
memset((void *)((uintptr_t)ptr +
oldsize), 0xa5, isalloc(ptr,
config_prof) - oldsize);

View File

@@ -1234,7 +1234,7 @@ CTL_RO_NL_GEN(opt_lg_chunk, opt_lg_chunk, size_t)
CTL_RO_NL_GEN(opt_narenas, opt_narenas, size_t)
CTL_RO_NL_GEN(opt_lg_dirty_mult, opt_lg_dirty_mult, ssize_t)
CTL_RO_NL_GEN(opt_stats_print, opt_stats_print, bool)
CTL_RO_NL_CGEN(config_fill, opt_junk, opt_junk, bool)
CTL_RO_NL_CGEN(config_fill, opt_junk, opt_junk, const char *)
CTL_RO_NL_CGEN(config_fill, opt_quarantine, opt_quarantine, size_t)
CTL_RO_NL_CGEN(config_fill, opt_redzone, opt_redzone, bool)
CTL_RO_NL_CGEN(config_fill, opt_zero, opt_zero, bool)

View File

@@ -67,7 +67,7 @@ huge_palloc(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment,
if (zero || (config_fill && unlikely(opt_zero))) {
if (!is_zeroed)
memset(ret, 0, usize);
} else if (config_fill && unlikely(opt_junk))
} else if (config_fill && unlikely(opt_junk_alloc))
memset(ret, 0xa5, usize);
return (ret);
@@ -81,7 +81,7 @@ static void
huge_dalloc_junk(void *ptr, size_t usize)
{
if (config_fill && have_dss && unlikely(opt_junk)) {
if (config_fill && have_dss && unlikely(opt_junk_free)) {
/*
* Only bother junk filling if the chunk isn't about to be
* unmapped.
@@ -117,7 +117,7 @@ huge_ralloc_no_move_similar(void *ptr, size_t oldsize, size_t usize,
size_t sdiff = CHUNK_CEILING(usize) - usize;
zeroed = (sdiff != 0) ? !pages_purge((void *)((uintptr_t)ptr +
usize), sdiff) : true;
if (config_fill && unlikely(opt_junk)) {
if (config_fill && unlikely(opt_junk_free)) {
memset((void *)((uintptr_t)ptr + usize), 0x5a, oldsize -
usize);
zeroed = false;
@@ -147,7 +147,7 @@ huge_ralloc_no_move_similar(void *ptr, size_t oldsize, size_t usize,
memset((void *)((uintptr_t)ptr + oldsize), 0,
usize - oldsize);
}
} else if (config_fill && unlikely(opt_junk)) {
} else if (config_fill && unlikely(opt_junk_alloc)) {
memset((void *)((uintptr_t)ptr + oldsize), 0xa5, usize -
oldsize);
}
@@ -165,7 +165,7 @@ huge_ralloc_no_move_shrink(void *ptr, size_t oldsize, size_t usize)
sdiff = CHUNK_CEILING(usize) - usize;
zeroed = (sdiff != 0) ? !pages_purge((void *)((uintptr_t)ptr + usize),
sdiff) : true;
if (config_fill && unlikely(opt_junk)) {
if (config_fill && unlikely(opt_junk_free)) {
huge_dalloc_junk((void *)((uintptr_t)ptr + usize), oldsize -
usize);
zeroed = false;
@@ -234,7 +234,7 @@ huge_ralloc_no_move_expand(void *ptr, size_t oldsize, size_t size, bool zero) {
CHUNK_CEILING(oldsize)), 0, usize -
CHUNK_CEILING(oldsize));
}
} else if (config_fill && unlikely(opt_junk)) {
} else if (config_fill && unlikely(opt_junk_alloc)) {
memset((void *)((uintptr_t)ptr + oldsize), 0xa5, usize -
oldsize);
}

View File

@@ -13,13 +13,28 @@ bool opt_abort =
false
#endif
;
bool opt_junk =
const char *opt_junk =
#if (defined(JEMALLOC_DEBUG) && defined(JEMALLOC_FILL))
"true"
#else
"false"
#endif
;
bool opt_junk_alloc =
#if (defined(JEMALLOC_DEBUG) && defined(JEMALLOC_FILL))
true
#else
false
#endif
;
bool opt_junk_free =
#if (defined(JEMALLOC_DEBUG) && defined(JEMALLOC_FILL))
true
#else
false
#endif
;
size_t opt_quarantine = ZU(0);
bool opt_redzone = false;
bool opt_utrace = false;
@@ -784,7 +799,9 @@ malloc_conf_init(void)
if (config_valgrind) {
in_valgrind = (RUNNING_ON_VALGRIND != 0) ? true : false;
if (config_fill && unlikely(in_valgrind)) {
opt_junk = false;
opt_junk = "false";
opt_junk_alloc = false;
opt_junk_free = false;
assert(!opt_zero);
opt_quarantine = JEMALLOC_VALGRIND_QUARANTINE_DEFAULT;
opt_redzone = true;
@@ -867,13 +884,13 @@ malloc_conf_init(void)
&vlen)) {
#define CONF_MATCH(n) \
(sizeof(n)-1 == klen && strncmp(n, k, klen) == 0)
#define CONF_MATCH_VALUE(n) \
(sizeof(n)-1 == vlen && strncmp(n, v, vlen) == 0)
#define CONF_HANDLE_BOOL(o, n, cont) \
if (CONF_MATCH(n)) { \
if (strncmp("true", v, vlen) == 0 && \
vlen == sizeof("true")-1) \
if (CONF_MATCH_VALUE("true")) \
o = true; \
else if (strncmp("false", v, vlen) == \
0 && vlen == sizeof("false")-1) \
else if (CONF_MATCH_VALUE("false")) \
o = false; \
else { \
malloc_conf_error( \
@@ -987,7 +1004,30 @@ malloc_conf_init(void)
-1, (sizeof(size_t) << 3) - 1)
CONF_HANDLE_BOOL(opt_stats_print, "stats_print", true)
if (config_fill) {
CONF_HANDLE_BOOL(opt_junk, "junk", true)
if (CONF_MATCH("junk")) {
if (CONF_MATCH_VALUE("true")) {
opt_junk = "true";
opt_junk_alloc = opt_junk_free =
true;
} else if (CONF_MATCH_VALUE("false")) {
opt_junk = "false";
opt_junk_alloc = opt_junk_free =
false;
} else if (CONF_MATCH_VALUE("alloc")) {
opt_junk = "alloc";
opt_junk_alloc = true;
opt_junk_free = false;
} else if (CONF_MATCH_VALUE("free")) {
opt_junk = "free";
opt_junk_alloc = false;
opt_junk_free = true;
} else {
malloc_conf_error(
"Invalid conf value", k,
klen, v, vlen);
}
continue;
}
CONF_HANDLE_SIZE_T(opt_quarantine, "quarantine",
0, SIZE_T_MAX, false)
CONF_HANDLE_BOOL(opt_redzone, "redzone", true)

View File

@@ -148,7 +148,7 @@ quarantine(tsd_t *tsd, void *ptr)
obj->usize = usize;
quarantine->curbytes += usize;
quarantine->curobjs++;
if (config_fill && unlikely(opt_junk)) {
if (config_fill && unlikely(opt_junk_free)) {
/*
* Only do redzone validation if Valgrind isn't in
* operation.