Fix size class overflow handling when profiling is enabled.

Fix size class overflow handling for malloc(), posix_memalign(),
memalign(), calloc(), and realloc() when profiling is enabled.

Remove an assertion that erroneously caused arena_sdalloc() to fail when
profiling was enabled.

This resolves #232.
This commit is contained in:
Jason Evans 2015-06-23 18:47:07 -07:00
parent 0a9f9a4d51
commit 241abc601b
9 changed files with 86 additions and 18 deletions

View File

@ -151,6 +151,8 @@ found in the git revision history:
memory decreases. This regression was first released in 3.5.0. memory decreases. This regression was first released in 3.5.0.
- Fix OOM handling in memalign() and valloc(). A variant of this bug existed - Fix OOM handling in memalign() and valloc(). A variant of this bug existed
in all releases since 2.0.0, which introduced these functions. in all releases since 2.0.0, which introduced these functions.
- Fix size class overflow handling for malloc(), posix_memalign(), memalign(),
calloc(), and realloc() when profiling is enabled.
- Fix the "arena.<i>.dss" mallctl to return an error if "primary" or - Fix the "arena.<i>.dss" mallctl to return an error if "primary" or
"secondary" precedence is specified, but sbrk(2) is not supported. "secondary" precedence is specified, but sbrk(2) is not supported.
- Fix fallback lg_floor() implementations to handle extremely large inputs. - Fix fallback lg_floor() implementations to handle extremely large inputs.

View File

@ -48,8 +48,10 @@ cfgoutputs_in := $(addprefix $(srcroot),@cfgoutputs_in@)
cfgoutputs_out := @cfgoutputs_out@ cfgoutputs_out := @cfgoutputs_out@
enable_autogen := @enable_autogen@ enable_autogen := @enable_autogen@
enable_code_coverage := @enable_code_coverage@ enable_code_coverage := @enable_code_coverage@
enable_prof := @enable_prof@
enable_valgrind := @enable_valgrind@ enable_valgrind := @enable_valgrind@
enable_zone_allocator := @enable_zone_allocator@ enable_zone_allocator := @enable_zone_allocator@
MALLOC_CONF := @JEMALLOC_CPREFIX@MALLOC_CONF
DSO_LDFLAGS = @DSO_LDFLAGS@ DSO_LDFLAGS = @DSO_LDFLAGS@
SOREV = @SOREV@ SOREV = @SOREV@
PIC_CFLAGS = @PIC_CFLAGS@ PIC_CFLAGS = @PIC_CFLAGS@
@ -146,6 +148,7 @@ TESTS_INTEGRATION := $(srcroot)test/integration/aligned_alloc.c \
$(srcroot)test/integration/sdallocx.c \ $(srcroot)test/integration/sdallocx.c \
$(srcroot)test/integration/mallocx.c \ $(srcroot)test/integration/mallocx.c \
$(srcroot)test/integration/MALLOCX_ARENA.c \ $(srcroot)test/integration/MALLOCX_ARENA.c \
$(srcroot)test/integration/overflow.c \
$(srcroot)test/integration/posix_memalign.c \ $(srcroot)test/integration/posix_memalign.c \
$(srcroot)test/integration/rallocx.c \ $(srcroot)test/integration/rallocx.c \
$(srcroot)test/integration/thread_arena.c \ $(srcroot)test/integration/thread_arena.c \
@ -344,11 +347,15 @@ check_dir: check_unit_dir check_integration_dir check_stress_dir
check_unit: tests_unit check_unit_dir check_unit: tests_unit check_unit_dir
$(SHELL) $(objroot)test/test.sh $(TESTS_UNIT:$(srcroot)%.c=$(objroot)%) $(SHELL) $(objroot)test/test.sh $(TESTS_UNIT:$(srcroot)%.c=$(objroot)%)
check_integration_prof: tests_integration check_integration_dir
ifeq ($(enable_prof), 1)
$(MALLOC_CONF)="prof:true" $(SHELL) $(objroot)test/test.sh $(TESTS_INTEGRATION:$(srcroot)%.c=$(objroot)%)
endif
check_integration: tests_integration check_integration_dir check_integration: tests_integration check_integration_dir
$(SHELL) $(objroot)test/test.sh $(TESTS_INTEGRATION:$(srcroot)%.c=$(objroot)%) $(SHELL) $(objroot)test/test.sh $(TESTS_INTEGRATION:$(srcroot)%.c=$(objroot)%)
check_stress: tests_stress check_stress_dir check_stress: tests_stress check_stress_dir
$(SHELL) $(objroot)test/test.sh $(TESTS_STRESS:$(srcroot)%.c=$(objroot)%) $(SHELL) $(objroot)test/test.sh $(TESTS_STRESS:$(srcroot)%.c=$(objroot)%)
check: tests check_dir check: tests check_dir check_integration_prof
$(SHELL) $(objroot)test/test.sh $(TESTS:$(srcroot)%.c=$(objroot)%) $(SHELL) $(objroot)test/test.sh $(TESTS:$(srcroot)%.c=$(objroot)%)
ifeq ($(enable_code_coverage), 1) ifeq ($(enable_code_coverage), 1)

View File

@ -513,6 +513,7 @@ if test "x$JEMALLOC_PREFIX" != "x" ; then
AC_DEFINE_UNQUOTED([JEMALLOC_PREFIX], ["$JEMALLOC_PREFIX"]) AC_DEFINE_UNQUOTED([JEMALLOC_PREFIX], ["$JEMALLOC_PREFIX"])
AC_DEFINE_UNQUOTED([JEMALLOC_CPREFIX], ["$JEMALLOC_CPREFIX"]) AC_DEFINE_UNQUOTED([JEMALLOC_CPREFIX], ["$JEMALLOC_CPREFIX"])
fi fi
AC_SUBST([JEMALLOC_CPREFIX])
AC_ARG_WITH([export], AC_ARG_WITH([export],
[AS_HELP_STRING([--without-export], [disable exporting jemalloc public APIs])], [AS_HELP_STRING([--without-export], [disable exporting jemalloc public APIs])],

View File

@ -1213,7 +1213,6 @@ arena_sdalloc(tsd_t *tsd, void *ptr, size_t size, tcache_t *tcache)
* Make sure to use promoted size, not request * Make sure to use promoted size, not request
* size. * size.
*/ */
assert(((uintptr_t)ptr & PAGE_MASK) == 0);
size = arena_mapbits_large_size_get(chunk, size = arena_mapbits_large_size_get(chunk,
pageind) - large_pad; pageind) - large_pad;
} }

View File

@ -525,7 +525,7 @@ size2index_compute(size_t size)
size_t lg_tmin = LG_TINY_MAXCLASS - NTBINS + 1; size_t lg_tmin = LG_TINY_MAXCLASS - NTBINS + 1;
size_t lg_ceil = lg_floor(pow2_ceil(size)); size_t lg_ceil = lg_floor(pow2_ceil(size));
return (lg_ceil < lg_tmin ? 0 : lg_ceil - lg_tmin); return (lg_ceil < lg_tmin ? 0 : lg_ceil - lg_tmin);
} else }
#endif #endif
{ {
size_t x = lg_floor((size<<1)-1); size_t x = lg_floor((size<<1)-1);
@ -565,8 +565,7 @@ size2index(size_t size)
assert(size > 0); assert(size > 0);
if (likely(size <= LOOKUP_MAXCLASS)) if (likely(size <= LOOKUP_MAXCLASS))
return (size2index_lookup(size)); return (size2index_lookup(size));
else return (size2index_compute(size));
return (size2index_compute(size));
} }
JEMALLOC_INLINE size_t JEMALLOC_INLINE size_t
@ -576,7 +575,6 @@ index2size_compute(index_t index)
#if (NTBINS > 0) #if (NTBINS > 0)
if (index < NTBINS) if (index < NTBINS)
return (ZU(1) << (LG_TINY_MAXCLASS - NTBINS + 1 + index)); return (ZU(1) << (LG_TINY_MAXCLASS - NTBINS + 1 + index));
else
#endif #endif
{ {
size_t reduced_index = index - NTBINS; size_t reduced_index = index - NTBINS;
@ -623,7 +621,7 @@ s2u_compute(size_t size)
size_t lg_ceil = lg_floor(pow2_ceil(size)); size_t lg_ceil = lg_floor(pow2_ceil(size));
return (lg_ceil < lg_tmin ? (ZU(1) << lg_tmin) : return (lg_ceil < lg_tmin ? (ZU(1) << lg_tmin) :
(ZU(1) << lg_ceil)); (ZU(1) << lg_ceil));
} else }
#endif #endif
{ {
size_t x = lg_floor((size<<1)-1); size_t x = lg_floor((size<<1)-1);
@ -656,8 +654,7 @@ s2u(size_t size)
assert(size > 0); assert(size > 0);
if (likely(size <= LOOKUP_MAXCLASS)) if (likely(size <= LOOKUP_MAXCLASS))
return (s2u_lookup(size)); return (s2u_lookup(size));
else return (s2u_compute(size));
return (s2u_compute(size));
} }
/* /*

View File

@ -1382,6 +1382,8 @@ imalloc_body(size_t size, tsd_t **tsd, size_t *usize)
if (config_prof && opt_prof) { if (config_prof && opt_prof) {
*usize = s2u(size); *usize = s2u(size);
if (unlikely(*usize == 0))
return (NULL);
return (imalloc_prof(*tsd, *usize)); return (imalloc_prof(*tsd, *usize));
} }
@ -1428,7 +1430,7 @@ imemalign_prof_sample(tsd_t *tsd, size_t alignment, size_t usize,
return (NULL); return (NULL);
if (usize <= SMALL_MAXCLASS) { if (usize <= SMALL_MAXCLASS) {
assert(sa2u(LARGE_MINCLASS, alignment) == LARGE_MINCLASS); assert(sa2u(LARGE_MINCLASS, alignment) == LARGE_MINCLASS);
p = imalloc(tsd, LARGE_MINCLASS); p = ipalloc(tsd, LARGE_MINCLASS, alignment, false);
if (p == NULL) if (p == NULL)
return (NULL); return (NULL);
arena_prof_promoted(p, usize); arena_prof_promoted(p, usize);
@ -1623,6 +1625,10 @@ je_calloc(size_t num, size_t size)
if (config_prof && opt_prof) { if (config_prof && opt_prof) {
usize = s2u(num_size); usize = s2u(num_size);
if (unlikely(usize == 0)) {
ret = NULL;
goto label_return;
}
ret = icalloc_prof(tsd, usize); ret = icalloc_prof(tsd, usize);
} else { } else {
if (config_stats || (config_valgrind && unlikely(in_valgrind))) if (config_stats || (config_valgrind && unlikely(in_valgrind)))
@ -1757,7 +1763,8 @@ je_realloc(void *ptr, size_t size)
if (config_prof && opt_prof) { if (config_prof && opt_prof) {
usize = s2u(size); usize = s2u(size);
ret = irealloc_prof(tsd, ptr, old_usize, usize); ret = unlikely(usize == 0) ? NULL : irealloc_prof(tsd,
ptr, old_usize, usize);
} else { } else {
if (config_stats || (config_valgrind && if (config_stats || (config_valgrind &&
unlikely(in_valgrind))) unlikely(in_valgrind)))
@ -1903,7 +1910,7 @@ imallocx_flags_decode(tsd_t *tsd, size_t size, int flags, size_t *usize,
if (likely(flags == 0)) { if (likely(flags == 0)) {
*usize = s2u(size); *usize = s2u(size);
assert(usize != 0); assert(*usize != 0);
*alignment = 0; *alignment = 0;
*zero = false; *zero = false;
*tcache = tcache_get(tsd, true); *tcache = tcache_get(tsd, true);
@ -1946,7 +1953,8 @@ imallocx_prof_sample(tsd_t *tsd, size_t size, int flags, size_t usize,
if (usize <= SMALL_MAXCLASS) { if (usize <= SMALL_MAXCLASS) {
assert(((alignment == 0) ? s2u(LARGE_MINCLASS) : assert(((alignment == 0) ? s2u(LARGE_MINCLASS) :
sa2u(LARGE_MINCLASS, alignment)) == LARGE_MINCLASS); sa2u(LARGE_MINCLASS, alignment)) == LARGE_MINCLASS);
p = imalloct(tsd, LARGE_MINCLASS, tcache, arena); p = imallocx_maybe_flags(tsd, LARGE_MINCLASS, flags,
LARGE_MINCLASS, alignment, zero, tcache, arena);
if (p == NULL) if (p == NULL)
return (NULL); return (NULL);
arena_prof_promoted(p, usize); arena_prof_promoted(p, usize);

View File

@ -1,3 +1,7 @@
#include <limits.h>
#ifndef SIZE_T_MAX
# define SIZE_T_MAX SIZE_MAX
#endif
#include <stdlib.h> #include <stdlib.h>
#include <stdarg.h> #include <stdarg.h>
#include <stdbool.h> #include <stdbool.h>

View File

@ -1,12 +1,8 @@
#include "test/jemalloc_test.h" #include "test/jemalloc_test.h"
#define CHUNK 0x400000
#define MAXALIGN (((size_t)1) << 25)
#define MAXSZ (((size_t)1) << 26)
#define NITER 4
TEST_BEGIN(test_basic) TEST_BEGIN(test_basic)
{ {
#define MAXSZ (((size_t)1) << 26)
size_t sz; size_t sz;
for (sz = 1; sz < MAXSZ; sz = nallocx(sz, 0) + 1) { for (sz = 1; sz < MAXSZ; sz = nallocx(sz, 0) + 1) {
@ -33,11 +29,14 @@ TEST_BEGIN(test_basic)
assert_zu_eq(nsz, rsz, "nallocx()/sallocx() rsize mismatch"); assert_zu_eq(nsz, rsz, "nallocx()/sallocx() rsize mismatch");
dallocx(p, 0); dallocx(p, 0);
} }
#undef MAXSZ
} }
TEST_END TEST_END
TEST_BEGIN(test_alignment_and_size) TEST_BEGIN(test_alignment_and_size)
{ {
#define MAXALIGN (((size_t)1) << 25)
#define NITER 4
size_t nsz, rsz, sz, alignment, total; size_t nsz, rsz, sz, alignment, total;
unsigned i; unsigned i;
void *ps[NITER]; void *ps[NITER];
@ -87,6 +86,8 @@ TEST_BEGIN(test_alignment_and_size)
} }
} }
} }
#undef MAXALIGN
#undef NITER
} }
TEST_END TEST_END

View File

@ -0,0 +1,49 @@
#include "test/jemalloc_test.h"
TEST_BEGIN(test_overflow)
{
unsigned nhchunks;
size_t mib[4];
size_t sz, miblen, max_size_class;
void *p;
sz = sizeof(unsigned);
assert_d_eq(mallctl("arenas.nhchunks", &nhchunks, &sz, NULL, 0), 0,
"Unexpected mallctl() error");
miblen = sizeof(mib) / sizeof(size_t);
assert_d_eq(mallctlnametomib("arenas.hchunk.0.size", mib, &miblen), 0,
"Unexpected mallctlnametomib() error");
mib[2] = nhchunks - 1;
sz = sizeof(size_t);
assert_d_eq(mallctlbymib(mib, miblen, &max_size_class, &sz, NULL, 0), 0,
"Unexpected mallctlbymib() error");
assert_ptr_null(malloc(max_size_class + 1),
"Expected OOM due to over-sized allocation request");
assert_ptr_null(malloc(SIZE_T_MAX),
"Expected OOM due to over-sized allocation request");
assert_ptr_null(calloc(1, max_size_class + 1),
"Expected OOM due to over-sized allocation request");
assert_ptr_null(calloc(1, SIZE_T_MAX),
"Expected OOM due to over-sized allocation request");
p = malloc(1);
assert_ptr_not_null(p, "Unexpected malloc() OOM");
assert_ptr_null(realloc(p, max_size_class + 1),
"Expected OOM due to over-sized allocation request");
assert_ptr_null(realloc(p, SIZE_T_MAX),
"Expected OOM due to over-sized allocation request");
free(p);
}
TEST_END
int
main(void)
{
return (test(
test_overflow));
}