From 241abc601b947c5e0e56791bd73a924ce872b4a1 Mon Sep 17 00:00:00 2001 From: Jason Evans Date: Tue, 23 Jun 2015 18:47:07 -0700 Subject: [PATCH] Fix size class overflow handling when profiling is enabled. Fix size class overflow handling for malloc(), posix_memalign(), memalign(), calloc(), and realloc() when profiling is enabled. Remove an assertion that erroneously caused arena_sdalloc() to fail when profiling was enabled. This resolves #232. --- ChangeLog | 2 + Makefile.in | 9 +++- configure.ac | 1 + include/jemalloc/internal/arena.h | 1 - .../jemalloc/internal/jemalloc_internal.h.in | 11 ++--- src/jemalloc.c | 16 ++++-- test/include/test/jemalloc_test.h.in | 4 ++ test/integration/mallocx.c | 11 +++-- test/integration/overflow.c | 49 +++++++++++++++++++ 9 files changed, 86 insertions(+), 18 deletions(-) create mode 100644 test/integration/overflow.c diff --git a/ChangeLog b/ChangeLog index b6fa3661..156d3c0c 100644 --- a/ChangeLog +++ b/ChangeLog @@ -151,6 +151,8 @@ found in the git revision history: memory decreases. This regression was first released in 3.5.0. - Fix OOM handling in memalign() and valloc(). A variant of this bug existed in all releases since 2.0.0, which introduced these functions. + - Fix size class overflow handling for malloc(), posix_memalign(), memalign(), + calloc(), and realloc() when profiling is enabled. - Fix the "arena..dss" mallctl to return an error if "primary" or "secondary" precedence is specified, but sbrk(2) is not supported. - Fix fallback lg_floor() implementations to handle extremely large inputs. diff --git a/Makefile.in b/Makefile.in index 7f5ac763..0dcdb5f9 100644 --- a/Makefile.in +++ b/Makefile.in @@ -48,8 +48,10 @@ cfgoutputs_in := $(addprefix $(srcroot),@cfgoutputs_in@) cfgoutputs_out := @cfgoutputs_out@ enable_autogen := @enable_autogen@ enable_code_coverage := @enable_code_coverage@ +enable_prof := @enable_prof@ enable_valgrind := @enable_valgrind@ enable_zone_allocator := @enable_zone_allocator@ +MALLOC_CONF := @JEMALLOC_CPREFIX@MALLOC_CONF DSO_LDFLAGS = @DSO_LDFLAGS@ SOREV = @SOREV@ PIC_CFLAGS = @PIC_CFLAGS@ @@ -146,6 +148,7 @@ TESTS_INTEGRATION := $(srcroot)test/integration/aligned_alloc.c \ $(srcroot)test/integration/sdallocx.c \ $(srcroot)test/integration/mallocx.c \ $(srcroot)test/integration/MALLOCX_ARENA.c \ + $(srcroot)test/integration/overflow.c \ $(srcroot)test/integration/posix_memalign.c \ $(srcroot)test/integration/rallocx.c \ $(srcroot)test/integration/thread_arena.c \ @@ -344,11 +347,15 @@ check_dir: check_unit_dir check_integration_dir check_stress_dir check_unit: tests_unit check_unit_dir $(SHELL) $(objroot)test/test.sh $(TESTS_UNIT:$(srcroot)%.c=$(objroot)%) +check_integration_prof: tests_integration check_integration_dir +ifeq ($(enable_prof), 1) + $(MALLOC_CONF)="prof:true" $(SHELL) $(objroot)test/test.sh $(TESTS_INTEGRATION:$(srcroot)%.c=$(objroot)%) +endif check_integration: tests_integration check_integration_dir $(SHELL) $(objroot)test/test.sh $(TESTS_INTEGRATION:$(srcroot)%.c=$(objroot)%) check_stress: tests_stress check_stress_dir $(SHELL) $(objroot)test/test.sh $(TESTS_STRESS:$(srcroot)%.c=$(objroot)%) -check: tests check_dir +check: tests check_dir check_integration_prof $(SHELL) $(objroot)test/test.sh $(TESTS:$(srcroot)%.c=$(objroot)%) ifeq ($(enable_code_coverage), 1) diff --git a/configure.ac b/configure.ac index bb6f3a32..61adc2a7 100644 --- a/configure.ac +++ b/configure.ac @@ -513,6 +513,7 @@ if test "x$JEMALLOC_PREFIX" != "x" ; then AC_DEFINE_UNQUOTED([JEMALLOC_PREFIX], ["$JEMALLOC_PREFIX"]) AC_DEFINE_UNQUOTED([JEMALLOC_CPREFIX], ["$JEMALLOC_CPREFIX"]) fi +AC_SUBST([JEMALLOC_CPREFIX]) AC_ARG_WITH([export], [AS_HELP_STRING([--without-export], [disable exporting jemalloc public APIs])], diff --git a/include/jemalloc/internal/arena.h b/include/jemalloc/internal/arena.h index 58d87cb3..9990e45b 100644 --- a/include/jemalloc/internal/arena.h +++ b/include/jemalloc/internal/arena.h @@ -1213,7 +1213,6 @@ arena_sdalloc(tsd_t *tsd, void *ptr, size_t size, tcache_t *tcache) * Make sure to use promoted size, not request * size. */ - assert(((uintptr_t)ptr & PAGE_MASK) == 0); size = arena_mapbits_large_size_get(chunk, pageind) - large_pad; } diff --git a/include/jemalloc/internal/jemalloc_internal.h.in b/include/jemalloc/internal/jemalloc_internal.h.in index 0268245b..ff9412a3 100644 --- a/include/jemalloc/internal/jemalloc_internal.h.in +++ b/include/jemalloc/internal/jemalloc_internal.h.in @@ -525,7 +525,7 @@ size2index_compute(size_t size) size_t lg_tmin = LG_TINY_MAXCLASS - NTBINS + 1; size_t lg_ceil = lg_floor(pow2_ceil(size)); return (lg_ceil < lg_tmin ? 0 : lg_ceil - lg_tmin); - } else + } #endif { size_t x = lg_floor((size<<1)-1); @@ -565,8 +565,7 @@ size2index(size_t size) assert(size > 0); if (likely(size <= LOOKUP_MAXCLASS)) return (size2index_lookup(size)); - else - return (size2index_compute(size)); + return (size2index_compute(size)); } JEMALLOC_INLINE size_t @@ -576,7 +575,6 @@ index2size_compute(index_t index) #if (NTBINS > 0) if (index < NTBINS) return (ZU(1) << (LG_TINY_MAXCLASS - NTBINS + 1 + index)); - else #endif { size_t reduced_index = index - NTBINS; @@ -623,7 +621,7 @@ s2u_compute(size_t size) size_t lg_ceil = lg_floor(pow2_ceil(size)); return (lg_ceil < lg_tmin ? (ZU(1) << lg_tmin) : (ZU(1) << lg_ceil)); - } else + } #endif { size_t x = lg_floor((size<<1)-1); @@ -656,8 +654,7 @@ s2u(size_t size) assert(size > 0); if (likely(size <= LOOKUP_MAXCLASS)) return (s2u_lookup(size)); - else - return (s2u_compute(size)); + return (s2u_compute(size)); } /* diff --git a/src/jemalloc.c b/src/jemalloc.c index 094a247f..01cb394a 100644 --- a/src/jemalloc.c +++ b/src/jemalloc.c @@ -1382,6 +1382,8 @@ imalloc_body(size_t size, tsd_t **tsd, size_t *usize) if (config_prof && opt_prof) { *usize = s2u(size); + if (unlikely(*usize == 0)) + return (NULL); return (imalloc_prof(*tsd, *usize)); } @@ -1428,7 +1430,7 @@ imemalign_prof_sample(tsd_t *tsd, size_t alignment, size_t usize, return (NULL); if (usize <= SMALL_MAXCLASS) { assert(sa2u(LARGE_MINCLASS, alignment) == LARGE_MINCLASS); - p = imalloc(tsd, LARGE_MINCLASS); + p = ipalloc(tsd, LARGE_MINCLASS, alignment, false); if (p == NULL) return (NULL); arena_prof_promoted(p, usize); @@ -1623,6 +1625,10 @@ je_calloc(size_t num, size_t size) if (config_prof && opt_prof) { usize = s2u(num_size); + if (unlikely(usize == 0)) { + ret = NULL; + goto label_return; + } ret = icalloc_prof(tsd, usize); } else { if (config_stats || (config_valgrind && unlikely(in_valgrind))) @@ -1757,7 +1763,8 @@ je_realloc(void *ptr, size_t size) if (config_prof && opt_prof) { usize = s2u(size); - ret = irealloc_prof(tsd, ptr, old_usize, usize); + ret = unlikely(usize == 0) ? NULL : irealloc_prof(tsd, + ptr, old_usize, usize); } else { if (config_stats || (config_valgrind && unlikely(in_valgrind))) @@ -1903,7 +1910,7 @@ imallocx_flags_decode(tsd_t *tsd, size_t size, int flags, size_t *usize, if (likely(flags == 0)) { *usize = s2u(size); - assert(usize != 0); + assert(*usize != 0); *alignment = 0; *zero = false; *tcache = tcache_get(tsd, true); @@ -1946,7 +1953,8 @@ imallocx_prof_sample(tsd_t *tsd, size_t size, int flags, size_t usize, if (usize <= SMALL_MAXCLASS) { assert(((alignment == 0) ? s2u(LARGE_MINCLASS) : sa2u(LARGE_MINCLASS, alignment)) == LARGE_MINCLASS); - p = imalloct(tsd, LARGE_MINCLASS, tcache, arena); + p = imallocx_maybe_flags(tsd, LARGE_MINCLASS, flags, + LARGE_MINCLASS, alignment, zero, tcache, arena); if (p == NULL) return (NULL); arena_prof_promoted(p, usize); diff --git a/test/include/test/jemalloc_test.h.in b/test/include/test/jemalloc_test.h.in index 6018e58a..c72d09f1 100644 --- a/test/include/test/jemalloc_test.h.in +++ b/test/include/test/jemalloc_test.h.in @@ -1,3 +1,7 @@ +#include +#ifndef SIZE_T_MAX +# define SIZE_T_MAX SIZE_MAX +#endif #include #include #include diff --git a/test/integration/mallocx.c b/test/integration/mallocx.c index 23129c20..4b0e33f0 100644 --- a/test/integration/mallocx.c +++ b/test/integration/mallocx.c @@ -1,12 +1,8 @@ #include "test/jemalloc_test.h" -#define CHUNK 0x400000 -#define MAXALIGN (((size_t)1) << 25) -#define MAXSZ (((size_t)1) << 26) -#define NITER 4 - TEST_BEGIN(test_basic) { +#define MAXSZ (((size_t)1) << 26) size_t sz; for (sz = 1; sz < MAXSZ; sz = nallocx(sz, 0) + 1) { @@ -33,11 +29,14 @@ TEST_BEGIN(test_basic) assert_zu_eq(nsz, rsz, "nallocx()/sallocx() rsize mismatch"); dallocx(p, 0); } +#undef MAXSZ } TEST_END TEST_BEGIN(test_alignment_and_size) { +#define MAXALIGN (((size_t)1) << 25) +#define NITER 4 size_t nsz, rsz, sz, alignment, total; unsigned i; void *ps[NITER]; @@ -87,6 +86,8 @@ TEST_BEGIN(test_alignment_and_size) } } } +#undef MAXALIGN +#undef NITER } TEST_END diff --git a/test/integration/overflow.c b/test/integration/overflow.c new file mode 100644 index 00000000..303d9b2d --- /dev/null +++ b/test/integration/overflow.c @@ -0,0 +1,49 @@ +#include "test/jemalloc_test.h" + +TEST_BEGIN(test_overflow) +{ + unsigned nhchunks; + size_t mib[4]; + size_t sz, miblen, max_size_class; + void *p; + + sz = sizeof(unsigned); + assert_d_eq(mallctl("arenas.nhchunks", &nhchunks, &sz, NULL, 0), 0, + "Unexpected mallctl() error"); + + miblen = sizeof(mib) / sizeof(size_t); + assert_d_eq(mallctlnametomib("arenas.hchunk.0.size", mib, &miblen), 0, + "Unexpected mallctlnametomib() error"); + mib[2] = nhchunks - 1; + + sz = sizeof(size_t); + assert_d_eq(mallctlbymib(mib, miblen, &max_size_class, &sz, NULL, 0), 0, + "Unexpected mallctlbymib() error"); + + assert_ptr_null(malloc(max_size_class + 1), + "Expected OOM due to over-sized allocation request"); + assert_ptr_null(malloc(SIZE_T_MAX), + "Expected OOM due to over-sized allocation request"); + + assert_ptr_null(calloc(1, max_size_class + 1), + "Expected OOM due to over-sized allocation request"); + assert_ptr_null(calloc(1, SIZE_T_MAX), + "Expected OOM due to over-sized allocation request"); + + p = malloc(1); + assert_ptr_not_null(p, "Unexpected malloc() OOM"); + assert_ptr_null(realloc(p, max_size_class + 1), + "Expected OOM due to over-sized allocation request"); + assert_ptr_null(realloc(p, SIZE_T_MAX), + "Expected OOM due to over-sized allocation request"); + free(p); +} +TEST_END + +int +main(void) +{ + + return (test( + test_overflow)); +}