diff --git a/Makefile.in b/Makefile.in index 506d9da3..7d147583 100644 --- a/Makefile.in +++ b/Makefile.in @@ -294,7 +294,8 @@ CPP_SRCS := TESTS_INTEGRATION_CPP := endif TESTS_ANALYZE := $(srcroot)test/analyze/rand.c \ - $(srcroot)test/analyze/sizes.c + $(srcroot)test/analyze/sizes.c \ + $(srcroot)test/analyze/prof_bias.c TESTS_STRESS := $(srcroot)test/stress/microbench.c \ $(srcroot)test/stress/fill_flush.c \ $(srcroot)test/stress/large_microbench.c \ diff --git a/include/jemalloc/internal/prof_externs.h b/include/jemalloc/internal/prof_externs.h index a4a4aa61..4579ab02 100644 --- a/include/jemalloc/internal/prof_externs.h +++ b/include/jemalloc/internal/prof_externs.h @@ -43,6 +43,13 @@ extern size_t lg_prof_sample; extern bool prof_booted; +/* + * A hook to mock out backtrace functionality. This can be handy, since it's + * otherwise difficult to guarantee that two allocations are reported as coming + * from the exact same stack trace in the presence of an optimizing compiler. + */ +extern void (* JET_MUTABLE prof_backtrace_hook)(prof_bt_t *bt); + /* Functions only accessed in prof_inlines.h */ prof_tdata_t *prof_tdata_init(tsd_t *tsd); prof_tdata_t *prof_tdata_reinit(tsd_t *tsd, prof_tdata_t *tdata); diff --git a/src/prof_sys.c b/src/prof_sys.c index 4897988d..dddba4b6 100644 --- a/src/prof_sys.c +++ b/src/prof_sys.c @@ -27,6 +27,8 @@ malloc_mutex_t prof_dump_filename_mtx; +bool prof_do_mock = false; + static uint64_t prof_dump_seq; static uint64_t prof_dump_iseq; static uint64_t prof_dump_mseq; @@ -267,11 +269,14 @@ prof_backtrace_impl(prof_bt_t *bt) { } #endif + +void (* JET_MUTABLE prof_backtrace_hook)(prof_bt_t *bt) = &prof_backtrace_impl; + void prof_backtrace(tsd_t *tsd, prof_bt_t *bt) { cassert(config_prof); pre_reentrancy(tsd, NULL); - prof_backtrace_impl(bt); + prof_backtrace_hook(bt); post_reentrancy(tsd); } diff --git a/test/analyze/prof_bias.c b/test/analyze/prof_bias.c new file mode 100644 index 00000000..0aae766b --- /dev/null +++ b/test/analyze/prof_bias.c @@ -0,0 +1,60 @@ +#include "test/jemalloc_test.h" + +/* + * This is a helper utility, only meant to be run manually (and, for example, + * doesn't check for failures, try to skip execution in non-prof modes, etc.). + * It runs, allocates objects of two different sizes from the same stack trace, + * and exits. + * + * The idea is that some human operator will run it like: + * MALLOC_CONF="prof:true,prof_final:true" test/analyze/prof_bias + * and manually inspect the results. + * + * The results should be: + * jeprof --text test/analyze/prof_bias --inuse_space jeprof..0.f.heap: + * around 1024 MB + * jeprof --text test/analyze/prof_bias --inuse_objects jeprof..0.f.heap: + * around 33554448 = 16 + 32 * 1024 * 1024 + * + * And, if prof_accum is on: + * jeprof --text test/analyze/prof_bias --alloc_space jeprof..0.f.heap: + * around 2048 MB + * jeprof --text test/analyze/prof_bias --alloc_objects jeprof..0.f.heap: + * around 67108896 = 2 * (16 + 32 * 1024 * 1024) + */ + +static void +mock_backtrace(prof_bt_t *bt) { + bt->len = 4; + bt->vec[0] = (void *)0x111; + bt->vec[1] = (void *)0x222; + bt->vec[2] = (void *)0x333; + bt->vec[3] = (void *)0x444; +} + +static void +do_allocs(size_t sz, size_t cnt, bool do_frees) { + for (size_t i = 0; i < cnt; i++) { + void *ptr = mallocx(sz, 0); + assert_ptr_not_null(ptr, "Unexpected mallocx failure"); + if (do_frees) { + dallocx(ptr, 0); + } + } +} + +int +main(void) { + size_t lg_prof_sample = 19; + int err = mallctl("prof.reset", NULL, NULL, (void *)&lg_prof_sample, + sizeof(lg_prof_sample)); + assert(err == 0); + + prof_backtrace_hook = &mock_backtrace; + do_allocs(16, 32 * 1024 * 1024, /* do_frees */ true); + do_allocs(32 * 1024* 1024, 16, /* do_frees */ true); + do_allocs(16, 32 * 1024 * 1024, /* do_frees */ false); + do_allocs(32 * 1024* 1024, 16, /* do_frees */ false); + + return 0; +}