From 2c5cb613dfbdf58f88152321b63e60c58cd23972 Mon Sep 17 00:00:00 2001 From: Guilherme Goncalves Date: Mon, 8 Dec 2014 19:12:41 -0200 Subject: [PATCH] Introduce two new modes of junk filling: "alloc" and "free". In addition to true/false, opt.junk can now be either "alloc" or "free", giving applications the possibility of junking memory only on allocation or deallocation. This resolves #172. --- Makefile.in | 2 + doc/jemalloc.xml.in | 20 ++++--- .../jemalloc/internal/jemalloc_internal.h.in | 4 +- include/jemalloc/internal/private_symbols.txt | 2 + include/jemalloc/internal/tcache.h | 10 ++-- src/arena.c | 55 ++++++++++--------- src/ctl.c | 2 +- src/huge.c | 12 ++-- src/jemalloc.c | 54 +++++++++++++++--- src/quarantine.c | 2 +- test/unit/junk.c | 41 +++++++++----- test/unit/junk_alloc.c | 3 + test/unit/junk_free.c | 3 + test/unit/mallctl.c | 2 +- 14 files changed, 140 insertions(+), 72 deletions(-) create mode 100644 test/unit/junk_alloc.c create mode 100644 test/unit/junk_free.c diff --git a/Makefile.in b/Makefile.in index 40644ce8..c268d002 100644 --- a/Makefile.in +++ b/Makefile.in @@ -118,6 +118,8 @@ TESTS_UNIT := $(srcroot)test/unit/atomic.c \ $(srcroot)test/unit/ckh.c \ $(srcroot)test/unit/hash.c \ $(srcroot)test/unit/junk.c \ + $(srcroot)test/unit/junk_alloc.c \ + $(srcroot)test/unit/junk_free.c \ $(srcroot)test/unit/lg_chunk.c \ $(srcroot)test/unit/mallctl.c \ $(srcroot)test/unit/math.c \ diff --git a/doc/jemalloc.xml.in b/doc/jemalloc.xml.in index 71b4cd19..0148f038 100644 --- a/doc/jemalloc.xml.in +++ b/doc/jemalloc.xml.in @@ -930,18 +930,20 @@ for (i = 0; i < nbins; i++) { opt.junk - (bool) + (const char *) r- [] - Junk filling enabled/disabled. If enabled, each byte - of uninitialized allocated memory will be initialized to - 0xa5. All deallocated memory will be initialized to - 0x5a. This is intended for debugging and will - impact performance negatively. This option is disabled by default - unless is specified during - configuration, in which case it is enabled by default unless running - inside Junk filling. If set to "alloc", each byte of + uninitialized allocated memory will be initialized to + 0xa5. If set to "free", all deallocated memory will + be initialized to 0x5a. If set to "true", both + allocated and deallocated memory will be initialized, and if set to + "false", junk filling be disabled entirely. This is intended for + debugging and will impact performance negatively. This option is + "false" by default unless is specified + during configuration, in which case it is "true" by default unless + running inside Valgrind. diff --git a/include/jemalloc/internal/jemalloc_internal.h.in b/include/jemalloc/internal/jemalloc_internal.h.in index 9bd501c0..b7617dfd 100644 --- a/include/jemalloc/internal/jemalloc_internal.h.in +++ b/include/jemalloc/internal/jemalloc_internal.h.in @@ -376,7 +376,9 @@ typedef unsigned index_t; #define JEMALLOC_H_EXTERNS extern bool opt_abort; -extern bool opt_junk; +extern const char *opt_junk; +extern bool opt_junk_alloc; +extern bool opt_junk_free; extern size_t opt_quarantine; extern bool opt_redzone; extern bool opt_utrace; diff --git a/include/jemalloc/internal/private_symbols.txt b/include/jemalloc/internal/private_symbols.txt index ee973c9f..7e339152 100644 --- a/include/jemalloc/internal/private_symbols.txt +++ b/include/jemalloc/internal/private_symbols.txt @@ -274,6 +274,8 @@ nhbins opt_abort opt_dss opt_junk +opt_junk_alloc +opt_junk_free opt_lg_chunk opt_lg_dirty_mult opt_lg_prof_interval diff --git a/include/jemalloc/internal/tcache.h b/include/jemalloc/internal/tcache.h index 3a3fd496..6e97b3dd 100644 --- a/include/jemalloc/internal/tcache.h +++ b/include/jemalloc/internal/tcache.h @@ -252,14 +252,14 @@ tcache_alloc_small(tcache_t *tcache, size_t size, bool zero) if (likely(!zero)) { if (config_fill) { - if (unlikely(opt_junk)) { + if (unlikely(opt_junk_alloc)) { arena_alloc_junk_small(ret, &arena_bin_info[binind], false); } else if (unlikely(opt_zero)) memset(ret, 0, usize); } } else { - if (config_fill && unlikely(opt_junk)) { + if (config_fill && unlikely(opt_junk_alloc)) { arena_alloc_junk_small(ret, &arena_bin_info[binind], true); } @@ -307,7 +307,7 @@ tcache_alloc_large(tcache_t *tcache, size_t size, bool zero) } if (likely(!zero)) { if (config_fill) { - if (unlikely(opt_junk)) + if (unlikely(opt_junk_alloc)) memset(ret, 0xa5, usize); else if (unlikely(opt_zero)) memset(ret, 0, usize); @@ -333,7 +333,7 @@ tcache_dalloc_small(tcache_t *tcache, void *ptr, index_t binind) assert(tcache_salloc(ptr) <= SMALL_MAXCLASS); - if (config_fill && unlikely(opt_junk)) + if (config_fill && unlikely(opt_junk_free)) arena_dalloc_junk_small(ptr, &arena_bin_info[binind]); tbin = &tcache->tbins[binind]; @@ -362,7 +362,7 @@ tcache_dalloc_large(tcache_t *tcache, void *ptr, size_t size) binind = size2index(size); - if (config_fill && unlikely(opt_junk)) + if (config_fill && unlikely(opt_junk_free)) arena_dalloc_junk_large(ptr, size); tbin = &tcache->tbins[binind]; diff --git a/src/arena.c b/src/arena.c index 6f2410ac..bf789950 100644 --- a/src/arena.c +++ b/src/arena.c @@ -1450,7 +1450,7 @@ arena_tcache_fill_small(arena_t *arena, tcache_bin_t *tbin, index_t binind, } break; } - if (config_fill && unlikely(opt_junk)) { + if (config_fill && unlikely(opt_junk_alloc)) { arena_alloc_junk_small(ptr, &arena_bin_info[binind], true); } @@ -1512,24 +1512,27 @@ arena_redzones_validate(void *ptr, arena_bin_info_t *bin_info, bool reset) size_t i; bool error = false; - for (i = 1; i <= redzone_size; i++) { - uint8_t *byte = (uint8_t *)((uintptr_t)ptr - i); - if (*byte != 0xa5) { - error = true; - arena_redzone_corruption(ptr, size, false, i, *byte); - if (reset) - *byte = 0xa5; - } - } - for (i = 0; i < redzone_size; i++) { - uint8_t *byte = (uint8_t *)((uintptr_t)ptr + size + i); - if (*byte != 0xa5) { - error = true; - arena_redzone_corruption(ptr, size, true, i, *byte); - if (reset) - *byte = 0xa5; + if (opt_junk_alloc) { + for (i = 1; i <= redzone_size; i++) { + uint8_t *byte = (uint8_t *)((uintptr_t)ptr - i); + if (*byte != 0xa5) { + error = true; + arena_redzone_corruption(ptr, size, false, i, *byte); + if (reset) + *byte = 0xa5; + } + } + for (i = 0; i < redzone_size; i++) { + uint8_t *byte = (uint8_t *)((uintptr_t)ptr + size + i); + if (*byte != 0xa5) { + error = true; + arena_redzone_corruption(ptr, size, true, i, *byte); + if (reset) + *byte = 0xa5; + } } } + if (opt_abort && error) abort(); } @@ -1560,7 +1563,7 @@ arena_quarantine_junk_small(void *ptr, size_t usize) index_t binind; arena_bin_info_t *bin_info; cassert(config_fill); - assert(opt_junk); + assert(opt_junk_free); assert(opt_quarantine); assert(usize <= SMALL_MAXCLASS); @@ -1604,7 +1607,7 @@ arena_malloc_small(arena_t *arena, size_t size, bool zero) if (!zero) { if (config_fill) { - if (unlikely(opt_junk)) { + if (unlikely(opt_junk_alloc)) { arena_alloc_junk_small(ret, &arena_bin_info[binind], false); } else if (unlikely(opt_zero)) @@ -1612,7 +1615,7 @@ arena_malloc_small(arena_t *arena, size_t size, bool zero) } JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ret, size); } else { - if (config_fill && unlikely(opt_junk)) { + if (config_fill && unlikely(opt_junk_alloc)) { arena_alloc_junk_small(ret, &arena_bin_info[binind], true); } @@ -1660,7 +1663,7 @@ arena_malloc_large(arena_t *arena, size_t size, bool zero) if (!zero) { if (config_fill) { - if (unlikely(opt_junk)) + if (unlikely(opt_junk_alloc)) memset(ret, 0xa5, usize); else if (unlikely(opt_zero)) memset(ret, 0, usize); @@ -1732,7 +1735,7 @@ arena_palloc(arena_t *arena, size_t size, size_t alignment, bool zero) malloc_mutex_unlock(&arena->lock); if (config_fill && !zero) { - if (unlikely(opt_junk)) + if (unlikely(opt_junk_alloc)) memset(ret, 0xa5, size); else if (unlikely(opt_zero)) memset(ret, 0, size); @@ -1845,7 +1848,7 @@ arena_dalloc_bin_locked_impl(arena_t *arena, arena_chunk_t *chunk, void *ptr, bin = &arena->bins[binind]; bin_info = &arena_bin_info[binind]; - if (!junked && config_fill && unlikely(opt_junk)) + if (!junked && config_fill && unlikely(opt_junk_free)) arena_dalloc_junk_small(ptr, bin_info); arena_run_reg_dalloc(run, ptr); @@ -1908,7 +1911,7 @@ void arena_dalloc_junk_large(void *ptr, size_t usize) { - if (config_fill && unlikely(opt_junk)) + if (config_fill && unlikely(opt_junk_free)) memset(ptr, 0x5a, usize); } #ifdef JEMALLOC_JET @@ -2079,7 +2082,7 @@ static void arena_ralloc_junk_large(void *ptr, size_t old_usize, size_t usize) { - if (config_fill && unlikely(opt_junk)) { + if (config_fill && unlikely(opt_junk_free)) { memset((void *)((uintptr_t)ptr + usize), 0x5a, old_usize - usize); } @@ -2126,7 +2129,7 @@ arena_ralloc_large(void *ptr, size_t oldsize, size_t size, size_t extra, bool ret = arena_ralloc_large_grow(arena, chunk, ptr, oldsize, size, extra, zero); if (config_fill && !ret && !zero) { - if (unlikely(opt_junk)) { + if (unlikely(opt_junk_alloc)) { memset((void *)((uintptr_t)ptr + oldsize), 0xa5, isalloc(ptr, config_prof) - oldsize); diff --git a/src/ctl.c b/src/ctl.c index b367c9f6..90bad7ee 100644 --- a/src/ctl.c +++ b/src/ctl.c @@ -1234,7 +1234,7 @@ CTL_RO_NL_GEN(opt_lg_chunk, opt_lg_chunk, size_t) CTL_RO_NL_GEN(opt_narenas, opt_narenas, size_t) CTL_RO_NL_GEN(opt_lg_dirty_mult, opt_lg_dirty_mult, ssize_t) CTL_RO_NL_GEN(opt_stats_print, opt_stats_print, bool) -CTL_RO_NL_CGEN(config_fill, opt_junk, opt_junk, bool) +CTL_RO_NL_CGEN(config_fill, opt_junk, opt_junk, const char *) CTL_RO_NL_CGEN(config_fill, opt_quarantine, opt_quarantine, size_t) CTL_RO_NL_CGEN(config_fill, opt_redzone, opt_redzone, bool) CTL_RO_NL_CGEN(config_fill, opt_zero, opt_zero, bool) diff --git a/src/huge.c b/src/huge.c index 68839037..416cb172 100644 --- a/src/huge.c +++ b/src/huge.c @@ -67,7 +67,7 @@ huge_palloc(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment, if (zero || (config_fill && unlikely(opt_zero))) { if (!is_zeroed) memset(ret, 0, usize); - } else if (config_fill && unlikely(opt_junk)) + } else if (config_fill && unlikely(opt_junk_alloc)) memset(ret, 0xa5, usize); return (ret); @@ -81,7 +81,7 @@ static void huge_dalloc_junk(void *ptr, size_t usize) { - if (config_fill && have_dss && unlikely(opt_junk)) { + if (config_fill && have_dss && unlikely(opt_junk_free)) { /* * Only bother junk filling if the chunk isn't about to be * unmapped. @@ -117,7 +117,7 @@ huge_ralloc_no_move_similar(void *ptr, size_t oldsize, size_t usize, size_t sdiff = CHUNK_CEILING(usize) - usize; zeroed = (sdiff != 0) ? !pages_purge((void *)((uintptr_t)ptr + usize), sdiff) : true; - if (config_fill && unlikely(opt_junk)) { + if (config_fill && unlikely(opt_junk_free)) { memset((void *)((uintptr_t)ptr + usize), 0x5a, oldsize - usize); zeroed = false; @@ -147,7 +147,7 @@ huge_ralloc_no_move_similar(void *ptr, size_t oldsize, size_t usize, memset((void *)((uintptr_t)ptr + oldsize), 0, usize - oldsize); } - } else if (config_fill && unlikely(opt_junk)) { + } else if (config_fill && unlikely(opt_junk_alloc)) { memset((void *)((uintptr_t)ptr + oldsize), 0xa5, usize - oldsize); } @@ -165,7 +165,7 @@ huge_ralloc_no_move_shrink(void *ptr, size_t oldsize, size_t usize) sdiff = CHUNK_CEILING(usize) - usize; zeroed = (sdiff != 0) ? !pages_purge((void *)((uintptr_t)ptr + usize), sdiff) : true; - if (config_fill && unlikely(opt_junk)) { + if (config_fill && unlikely(opt_junk_free)) { huge_dalloc_junk((void *)((uintptr_t)ptr + usize), oldsize - usize); zeroed = false; @@ -234,7 +234,7 @@ huge_ralloc_no_move_expand(void *ptr, size_t oldsize, size_t size, bool zero) { CHUNK_CEILING(oldsize)), 0, usize - CHUNK_CEILING(oldsize)); } - } else if (config_fill && unlikely(opt_junk)) { + } else if (config_fill && unlikely(opt_junk_alloc)) { memset((void *)((uintptr_t)ptr + oldsize), 0xa5, usize - oldsize); } diff --git a/src/jemalloc.c b/src/jemalloc.c index 48de0da0..e63dab3e 100644 --- a/src/jemalloc.c +++ b/src/jemalloc.c @@ -13,13 +13,28 @@ bool opt_abort = false #endif ; -bool opt_junk = +const char *opt_junk = +#if (defined(JEMALLOC_DEBUG) && defined(JEMALLOC_FILL)) + "true" +#else + "false" +#endif + ; +bool opt_junk_alloc = #if (defined(JEMALLOC_DEBUG) && defined(JEMALLOC_FILL)) true #else false #endif ; +bool opt_junk_free = +#if (defined(JEMALLOC_DEBUG) && defined(JEMALLOC_FILL)) + true +#else + false +#endif + ; + size_t opt_quarantine = ZU(0); bool opt_redzone = false; bool opt_utrace = false; @@ -784,7 +799,9 @@ malloc_conf_init(void) if (config_valgrind) { in_valgrind = (RUNNING_ON_VALGRIND != 0) ? true : false; if (config_fill && unlikely(in_valgrind)) { - opt_junk = false; + opt_junk = "false"; + opt_junk_alloc = false; + opt_junk_free = false; assert(!opt_zero); opt_quarantine = JEMALLOC_VALGRIND_QUARANTINE_DEFAULT; opt_redzone = true; @@ -867,13 +884,13 @@ malloc_conf_init(void) &vlen)) { #define CONF_MATCH(n) \ (sizeof(n)-1 == klen && strncmp(n, k, klen) == 0) +#define CONF_MATCH_VALUE(n) \ + (sizeof(n)-1 == vlen && strncmp(n, v, vlen) == 0) #define CONF_HANDLE_BOOL(o, n, cont) \ if (CONF_MATCH(n)) { \ - if (strncmp("true", v, vlen) == 0 && \ - vlen == sizeof("true")-1) \ + if (CONF_MATCH_VALUE("true")) \ o = true; \ - else if (strncmp("false", v, vlen) == \ - 0 && vlen == sizeof("false")-1) \ + else if (CONF_MATCH_VALUE("false")) \ o = false; \ else { \ malloc_conf_error( \ @@ -987,7 +1004,30 @@ malloc_conf_init(void) -1, (sizeof(size_t) << 3) - 1) CONF_HANDLE_BOOL(opt_stats_print, "stats_print", true) if (config_fill) { - CONF_HANDLE_BOOL(opt_junk, "junk", true) + if (CONF_MATCH("junk")) { + if (CONF_MATCH_VALUE("true")) { + opt_junk = "true"; + opt_junk_alloc = opt_junk_free = + true; + } else if (CONF_MATCH_VALUE("false")) { + opt_junk = "false"; + opt_junk_alloc = opt_junk_free = + false; + } else if (CONF_MATCH_VALUE("alloc")) { + opt_junk = "alloc"; + opt_junk_alloc = true; + opt_junk_free = false; + } else if (CONF_MATCH_VALUE("free")) { + opt_junk = "free"; + opt_junk_alloc = false; + opt_junk_free = true; + } else { + malloc_conf_error( + "Invalid conf value", k, + klen, v, vlen); + } + continue; + } CONF_HANDLE_SIZE_T(opt_quarantine, "quarantine", 0, SIZE_T_MAX, false) CONF_HANDLE_BOOL(opt_redzone, "redzone", true) diff --git a/src/quarantine.c b/src/quarantine.c index c5fa566b..12c37e0a 100644 --- a/src/quarantine.c +++ b/src/quarantine.c @@ -148,7 +148,7 @@ quarantine(tsd_t *tsd, void *ptr) obj->usize = usize; quarantine->curbytes += usize; quarantine->curobjs++; - if (config_fill && unlikely(opt_junk)) { + if (config_fill && unlikely(opt_junk_free)) { /* * Only do redzone validation if Valgrind isn't in * operation. diff --git a/test/unit/junk.c b/test/unit/junk.c index 1522a610..733f661e 100644 --- a/test/unit/junk.c +++ b/test/unit/junk.c @@ -1,8 +1,11 @@ #include "test/jemalloc_test.h" #ifdef JEMALLOC_FILL +# ifndef JEMALLOC_TEST_JUNK_OPT +# define JEMALLOC_TEST_JUNK_OPT "junk:true" +# endif const char *malloc_conf = - "abort:false,junk:true,zero:false,redzone:true,quarantine:0"; + "abort:false,zero:false,redzone:true,quarantine:0," JEMALLOC_TEST_JUNK_OPT; #endif static arena_dalloc_junk_small_t *arena_dalloc_junk_small_orig; @@ -69,12 +72,14 @@ test_junk(size_t sz_min, size_t sz_max) char *s; size_t sz_prev, sz, i; - arena_dalloc_junk_small_orig = arena_dalloc_junk_small; - arena_dalloc_junk_small = arena_dalloc_junk_small_intercept; - arena_dalloc_junk_large_orig = arena_dalloc_junk_large; - arena_dalloc_junk_large = arena_dalloc_junk_large_intercept; - huge_dalloc_junk_orig = huge_dalloc_junk; - huge_dalloc_junk = huge_dalloc_junk_intercept; + if (opt_junk_free) { + arena_dalloc_junk_small_orig = arena_dalloc_junk_small; + arena_dalloc_junk_small = arena_dalloc_junk_small_intercept; + arena_dalloc_junk_large_orig = arena_dalloc_junk_large; + arena_dalloc_junk_large = arena_dalloc_junk_large_intercept; + huge_dalloc_junk_orig = huge_dalloc_junk; + huge_dalloc_junk = huge_dalloc_junk_intercept; + } sz_prev = 0; s = (char *)mallocx(sz_min, 0); @@ -92,9 +97,11 @@ test_junk(size_t sz_min, size_t sz_max) } for (i = sz_prev; i < sz; i++) { - assert_c_eq(s[i], 0xa5, - "Newly allocated byte %zu/%zu isn't junk-filled", - i, sz); + if (opt_junk_alloc) { + assert_c_eq(s[i], 0xa5, + "Newly allocated byte %zu/%zu isn't " + "junk-filled", i, sz); + } s[i] = 'a'; } @@ -103,7 +110,7 @@ test_junk(size_t sz_min, size_t sz_max) s = (char *)rallocx(s, sz+1, 0); assert_ptr_not_null((void *)s, "Unexpected rallocx() failure"); - assert_true(saw_junking, + assert_true(!opt_junk_free || saw_junking, "Expected region of size %zu to be junk-filled", sz); } @@ -111,12 +118,14 @@ test_junk(size_t sz_min, size_t sz_max) watch_junking(s); dallocx(s, 0); - assert_true(saw_junking, + assert_true(!opt_junk_free || saw_junking, "Expected region of size %zu to be junk-filled", sz); - arena_dalloc_junk_small = arena_dalloc_junk_small_orig; - arena_dalloc_junk_large = arena_dalloc_junk_large_orig; - huge_dalloc_junk = huge_dalloc_junk_orig; + if (opt_junk_free) { + arena_dalloc_junk_small = arena_dalloc_junk_small_orig; + arena_dalloc_junk_large = arena_dalloc_junk_large_orig; + huge_dalloc_junk = huge_dalloc_junk_orig; + } } TEST_BEGIN(test_junk_small) @@ -204,6 +213,7 @@ TEST_BEGIN(test_junk_redzone) arena_redzone_corruption_t *arena_redzone_corruption_orig; test_skip_if(!config_fill); + test_skip_if(!opt_junk_alloc || !opt_junk_free); arena_redzone_corruption_orig = arena_redzone_corruption; arena_redzone_corruption = arena_redzone_corruption_replacement; @@ -234,6 +244,7 @@ int main(void) { + assert(opt_junk_alloc || opt_junk_free); return (test( test_junk_small, test_junk_large, diff --git a/test/unit/junk_alloc.c b/test/unit/junk_alloc.c new file mode 100644 index 00000000..8db3331d --- /dev/null +++ b/test/unit/junk_alloc.c @@ -0,0 +1,3 @@ +#define JEMALLOC_TEST_JUNK_OPT "junk:alloc" +#include "junk.c" +#undef JEMALLOC_TEST_JUNK_OPT diff --git a/test/unit/junk_free.c b/test/unit/junk_free.c new file mode 100644 index 00000000..482a61d0 --- /dev/null +++ b/test/unit/junk_free.c @@ -0,0 +1,3 @@ +#define JEMALLOC_TEST_JUNK_OPT "junk:free" +#include "junk.c" +#undef JEMALLOC_TEST_JUNK_OPT diff --git a/test/unit/mallctl.c b/test/unit/mallctl.c index 028a9710..f4b7d1ab 100644 --- a/test/unit/mallctl.c +++ b/test/unit/mallctl.c @@ -164,7 +164,7 @@ TEST_BEGIN(test_mallctl_opt) TEST_MALLCTL_OPT(size_t, narenas, always); TEST_MALLCTL_OPT(ssize_t, lg_dirty_mult, always); TEST_MALLCTL_OPT(bool, stats_print, always); - TEST_MALLCTL_OPT(bool, junk, fill); + TEST_MALLCTL_OPT(const char *, junk, fill); TEST_MALLCTL_OPT(size_t, quarantine, fill); TEST_MALLCTL_OPT(bool, redzone, fill); TEST_MALLCTL_OPT(bool, zero, fill);