Make *allocx() size class overflow behavior defined.

Limit supported size and alignment to HUGE_MAXCLASS, which in turn is
now limited to be less than PTRDIFF_MAX.

This resolves #278 and #295.
This commit is contained in:
Jason Evans 2016-02-25 15:29:49 -08:00
parent 767d85061a
commit 0c516a00c4
14 changed files with 247 additions and 89 deletions

View File

@ -310,16 +310,14 @@
<para>The <function>mallocx<parameter/></function> function allocates at <para>The <function>mallocx<parameter/></function> function allocates at
least <parameter>size</parameter> bytes of memory, and returns a pointer least <parameter>size</parameter> bytes of memory, and returns a pointer
to the base address of the allocation. Behavior is undefined if to the base address of the allocation. Behavior is undefined if
<parameter>size</parameter> is <constant>0</constant>, or if request size <parameter>size</parameter> is <constant>0</constant>.</para>
overflows due to size class and/or alignment constraints.</para>
<para>The <function>rallocx<parameter/></function> function resizes the <para>The <function>rallocx<parameter/></function> function resizes the
allocation at <parameter>ptr</parameter> to be at least allocation at <parameter>ptr</parameter> to be at least
<parameter>size</parameter> bytes, and returns a pointer to the base <parameter>size</parameter> bytes, and returns a pointer to the base
address of the resulting allocation, which may or may not have moved from address of the resulting allocation, which may or may not have moved from
its original location. Behavior is undefined if its original location. Behavior is undefined if
<parameter>size</parameter> is <constant>0</constant>, or if request size <parameter>size</parameter> is <constant>0</constant>.</para>
overflows due to size class and/or alignment constraints.</para>
<para>The <function>xallocx<parameter/></function> function resizes the <para>The <function>xallocx<parameter/></function> function resizes the
allocation at <parameter>ptr</parameter> in place to be at least allocation at <parameter>ptr</parameter> in place to be at least
@ -354,10 +352,10 @@
memory, but it performs the same size computation as the memory, but it performs the same size computation as the
<function>mallocx<parameter/></function> function, and returns the real <function>mallocx<parameter/></function> function, and returns the real
size of the allocation that would result from the equivalent size of the allocation that would result from the equivalent
<function>mallocx<parameter/></function> function call. Behavior is <function>mallocx<parameter/></function> function call, or
undefined if <parameter>size</parameter> is <constant>0</constant>, or if <constant>0</constant> if the inputs exceed the maximum supported size
request size overflows due to size class and/or alignment class and/or alignment. Behavior is undefined if
constraints.</para> <parameter>size</parameter> is <constant>0</constant>.</para>
<para>The <function>mallctl<parameter/></function> function provides a <para>The <function>mallctl<parameter/></function> function provides a
general interface for introspecting the memory allocator, as well as general interface for introspecting the memory allocator, as well as

View File

@ -536,8 +536,7 @@ extern arena_dalloc_junk_small_t *arena_dalloc_junk_small;
void arena_dalloc_junk_small(void *ptr, arena_bin_info_t *bin_info); void arena_dalloc_junk_small(void *ptr, arena_bin_info_t *bin_info);
#endif #endif
void arena_quarantine_junk_small(void *ptr, size_t usize); void arena_quarantine_junk_small(void *ptr, size_t usize);
void *arena_malloc_large(tsd_t *tsd, arena_t *arena, size_t size, void *arena_malloc_large(tsd_t *tsd, arena_t *arena, szind_t ind, bool zero);
szind_t ind, bool zero);
void *arena_malloc_hard(tsd_t *tsd, arena_t *arena, size_t size, szind_t ind, void *arena_malloc_hard(tsd_t *tsd, arena_t *arena, size_t size, szind_t ind,
bool zero, tcache_t *tcache); bool zero, tcache_t *tcache);
void *arena_palloc(tsd_t *tsd, arena_t *arena, size_t usize, void *arena_palloc(tsd_t *tsd, arena_t *arena, size_t usize,

View File

@ -9,9 +9,9 @@
/******************************************************************************/ /******************************************************************************/
#ifdef JEMALLOC_H_EXTERNS #ifdef JEMALLOC_H_EXTERNS
void *huge_malloc(tsd_t *tsd, arena_t *arena, size_t size, bool zero, void *huge_malloc(tsd_t *tsd, arena_t *arena, size_t usize, bool zero,
tcache_t *tcache); tcache_t *tcache);
void *huge_palloc(tsd_t *tsd, arena_t *arena, size_t size, size_t alignment, void *huge_palloc(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment,
bool zero, tcache_t *tcache); bool zero, tcache_t *tcache);
bool huge_ralloc_no_move(tsd_t *tsd, void *ptr, size_t oldsize, bool huge_ralloc_no_move(tsd_t *tsd, void *ptr, size_t oldsize,
size_t usize_min, size_t usize_max, bool zero); size_t usize_min, size_t usize_max, bool zero);

View File

@ -642,7 +642,7 @@ JEMALLOC_ALWAYS_INLINE size_t
index2size(szind_t index) index2size(szind_t index)
{ {
assert(index <= NSIZES); assert(index < NSIZES);
return (index2size_lookup(index)); return (index2size_lookup(index));
} }
@ -745,17 +745,16 @@ sa2u(size_t size, size_t alignment)
return (usize); return (usize);
} }
/* Huge size class. Beware of size_t overflow. */ /* Huge size class. Beware of overflow. */
if (unlikely(alignment > HUGE_MAXCLASS))
return (0);
/* /*
* We can't achieve subchunk alignment, so round up alignment to the * We can't achieve subchunk alignment, so round up alignment to the
* minimum that can actually be supported. * minimum that can actually be supported.
*/ */
alignment = CHUNK_CEILING(alignment); alignment = CHUNK_CEILING(alignment);
if (alignment == 0) {
/* size_t overflow. */
return (0);
}
/* Make sure result is a huge size class. */ /* Make sure result is a huge size class. */
if (size <= chunksize) if (size <= chunksize)
@ -1106,7 +1105,7 @@ iralloct_realign(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
size_t usize, copysize; size_t usize, copysize;
usize = sa2u(size + extra, alignment); usize = sa2u(size + extra, alignment);
if (usize == 0) if (unlikely(usize == 0 || usize > HUGE_MAXCLASS))
return (NULL); return (NULL);
p = ipalloct(tsd, usize, alignment, zero, tcache, arena); p = ipalloct(tsd, usize, alignment, zero, tcache, arena);
if (p == NULL) { if (p == NULL) {
@ -1114,7 +1113,7 @@ iralloct_realign(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
return (NULL); return (NULL);
/* Try again, without extra this time. */ /* Try again, without extra this time. */
usize = sa2u(size, alignment); usize = sa2u(size, alignment);
if (usize == 0) if (unlikely(usize == 0 || usize > HUGE_MAXCLASS))
return (NULL); return (NULL);
p = ipalloct(tsd, usize, alignment, zero, tcache, arena); p = ipalloct(tsd, usize, alignment, zero, tcache, arena);
if (p == NULL) if (p == NULL)

View File

@ -142,10 +142,10 @@ size_classes() {
# All remaining groups. # All remaining groups.
lg_grp=$((${lg_grp} + ${lg_g})) lg_grp=$((${lg_grp} + ${lg_g}))
while [ ${lg_grp} -lt ${ptr_bits} ] ; do while [ ${lg_grp} -lt $((${ptr_bits} - 1)) ] ; do
sep_line sep_line
ndelta=1 ndelta=1
if [ ${lg_grp} -eq $((${ptr_bits} - 1)) ] ; then if [ ${lg_grp} -eq $((${ptr_bits} - 2)) ] ; then
ndelta_limit=$((${g} - 1)) ndelta_limit=$((${g} - 1))
else else
ndelta_limit=${g} ndelta_limit=${g}

View File

@ -344,7 +344,6 @@ tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
void *ret; void *ret;
tcache_bin_t *tbin; tcache_bin_t *tbin;
bool tcache_success; bool tcache_success;
size_t usize JEMALLOC_CC_SILENCE_INIT(0);
assert(binind < nhbins); assert(binind < nhbins);
tbin = &tcache->tbins[binind]; tbin = &tcache->tbins[binind];
@ -359,14 +358,15 @@ tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
if (unlikely(arena == NULL)) if (unlikely(arena == NULL))
return (NULL); return (NULL);
usize = index2size(binind); ret = arena_malloc_large(tsd, arena, binind, zero);
assert(usize <= tcache_maxclass);
ret = arena_malloc_large(tsd, arena, usize, binind, zero);
if (ret == NULL) if (ret == NULL)
return (NULL); return (NULL);
} else { } else {
size_t usize JEMALLOC_CC_SILENCE_INIT(0);
/* Only compute usize on demand */ /* Only compute usize on demand */
if (config_prof || (slow_path && config_fill) || unlikely(zero)) { if (config_prof || (slow_path && config_fill) ||
unlikely(zero)) {
usize = index2size(binind); usize = index2size(binind);
assert(usize <= tcache_maxclass); assert(usize <= tcache_maxclass);
} }

View File

@ -16,7 +16,8 @@
# define MALLOCX_ALIGN(a) ((int)(ffs(a)-1)) # define MALLOCX_ALIGN(a) ((int)(ffs(a)-1))
# else # else
# define MALLOCX_ALIGN(a) \ # define MALLOCX_ALIGN(a) \
((int)((a < (size_t)INT_MAX) ? ffs((int)a)-1 : ffs((int)(a>>32))+31)) ((int)(((a) < (size_t)INT_MAX) ? ffs((int)(a))-1 : \
ffs((int)((a)>>32))+31))
# endif # endif
# define MALLOCX_ZERO ((int)0x40) # define MALLOCX_ZERO ((int)0x40)
/* /*

View File

@ -2364,16 +2364,16 @@ arena_quarantine_junk_small(void *ptr, size_t usize)
} }
static void * static void *
arena_malloc_small(tsd_t *tsd, arena_t *arena, size_t size, szind_t binind, arena_malloc_small(tsd_t *tsd, arena_t *arena, szind_t binind, bool zero)
bool zero)
{ {
void *ret; void *ret;
arena_bin_t *bin; arena_bin_t *bin;
size_t usize;
arena_run_t *run; arena_run_t *run;
assert(binind < NBINS); assert(binind < NBINS);
bin = &arena->bins[binind]; bin = &arena->bins[binind];
size = index2size(binind); usize = index2size(binind);
malloc_mutex_lock(&bin->lock); malloc_mutex_lock(&bin->lock);
if ((run = bin->runcur) != NULL && run->nfree > 0) if ((run = bin->runcur) != NULL && run->nfree > 0)
@ -2392,7 +2392,7 @@ arena_malloc_small(tsd_t *tsd, arena_t *arena, size_t size, szind_t binind,
bin->stats.curregs++; bin->stats.curregs++;
} }
malloc_mutex_unlock(&bin->lock); malloc_mutex_unlock(&bin->lock);
if (config_prof && !isthreaded && arena_prof_accum(arena, size)) if (config_prof && !isthreaded && arena_prof_accum(arena, usize))
prof_idump(); prof_idump();
if (!zero) { if (!zero) {
@ -2401,16 +2401,16 @@ arena_malloc_small(tsd_t *tsd, arena_t *arena, size_t size, szind_t binind,
arena_alloc_junk_small(ret, arena_alloc_junk_small(ret,
&arena_bin_info[binind], false); &arena_bin_info[binind], false);
} else if (unlikely(opt_zero)) } else if (unlikely(opt_zero))
memset(ret, 0, size); memset(ret, 0, usize);
} }
JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ret, size); JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ret, usize);
} else { } else {
if (config_fill && unlikely(opt_junk_alloc)) { if (config_fill && unlikely(opt_junk_alloc)) {
arena_alloc_junk_small(ret, &arena_bin_info[binind], arena_alloc_junk_small(ret, &arena_bin_info[binind],
true); true);
} }
JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ret, size); JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ret, usize);
memset(ret, 0, size); memset(ret, 0, usize);
} }
arena_decay_tick(tsd, arena); arena_decay_tick(tsd, arena);
@ -2418,8 +2418,7 @@ arena_malloc_small(tsd_t *tsd, arena_t *arena, size_t size, szind_t binind,
} }
void * void *
arena_malloc_large(tsd_t *tsd, arena_t *arena, size_t size, szind_t binind, arena_malloc_large(tsd_t *tsd, arena_t *arena, szind_t binind, bool zero)
bool zero)
{ {
void *ret; void *ret;
size_t usize; size_t usize;
@ -2490,10 +2489,10 @@ arena_malloc_hard(tsd_t *tsd, arena_t *arena, size_t size, szind_t ind,
return (NULL); return (NULL);
if (likely(size <= SMALL_MAXCLASS)) if (likely(size <= SMALL_MAXCLASS))
return (arena_malloc_small(tsd, arena, size, ind, zero)); return (arena_malloc_small(tsd, arena, ind, zero));
if (likely(size <= large_maxclass)) if (likely(size <= large_maxclass))
return (arena_malloc_large(tsd, arena, size, ind, zero)); return (arena_malloc_large(tsd, arena, ind, zero));
return (huge_malloc(tsd, arena, size, zero, tcache)); return (huge_malloc(tsd, arena, index2size(ind), zero, tcache));
} }
/* Only handles large allocations that require more than page alignment. */ /* Only handles large allocations that require more than page alignment. */
@ -3047,6 +3046,13 @@ arena_ralloc_no_move(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
{ {
size_t usize_min, usize_max; size_t usize_min, usize_max;
/* Calls with non-zero extra had to clamp extra. */
assert(extra == 0 || size + extra <= HUGE_MAXCLASS);
/* Prevent exceeding PTRDIFF_MAX. */
if (unlikely(size > HUGE_MAXCLASS))
return (true);
usize_min = s2u(size); usize_min = s2u(size);
usize_max = s2u(size + extra); usize_max = s2u(size + extra);
if (likely(oldsize <= large_maxclass && usize_min <= large_maxclass)) { if (likely(oldsize <= large_maxclass && usize_min <= large_maxclass)) {
@ -3089,7 +3095,7 @@ arena_ralloc_move_helper(tsd_t *tsd, arena_t *arena, size_t usize,
return (arena_malloc(tsd, arena, usize, size2index(usize), zero, return (arena_malloc(tsd, arena, usize, size2index(usize), zero,
tcache, true)); tcache, true));
usize = sa2u(usize, alignment); usize = sa2u(usize, alignment);
if (usize == 0) if (unlikely(usize == 0 || usize > HUGE_MAXCLASS))
return (NULL); return (NULL);
return (ipalloct(tsd, usize, alignment, zero, tcache, arena)); return (ipalloct(tsd, usize, alignment, zero, tcache, arena));
} }
@ -3102,7 +3108,7 @@ arena_ralloc(tsd_t *tsd, arena_t *arena, void *ptr, size_t oldsize, size_t size,
size_t usize; size_t usize;
usize = s2u(size); usize = s2u(size);
if (usize == 0) if (unlikely(usize == 0 || size > HUGE_MAXCLASS))
return (NULL); return (NULL);
if (likely(usize <= large_maxclass)) { if (likely(usize <= large_maxclass)) {

View File

@ -266,7 +266,7 @@ ckh_grow(tsd_t *tsd, ckh_t *ckh)
lg_curcells++; lg_curcells++;
usize = sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE); usize = sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE);
if (usize == 0) { if (unlikely(usize == 0 || usize > HUGE_MAXCLASS)) {
ret = true; ret = true;
goto label_return; goto label_return;
} }
@ -312,7 +312,7 @@ ckh_shrink(tsd_t *tsd, ckh_t *ckh)
lg_prevbuckets = ckh->lg_curbuckets; lg_prevbuckets = ckh->lg_curbuckets;
lg_curcells = ckh->lg_curbuckets + LG_CKH_BUCKET_CELLS - 1; lg_curcells = ckh->lg_curbuckets + LG_CKH_BUCKET_CELLS - 1;
usize = sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE); usize = sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE);
if (usize == 0) if (unlikely(usize == 0 || usize > HUGE_MAXCLASS))
return; return;
tab = (ckhc_t *)ipallocztm(tsd, usize, CACHELINE, true, NULL, true, tab = (ckhc_t *)ipallocztm(tsd, usize, CACHELINE, true, NULL, true,
NULL); NULL);
@ -387,7 +387,7 @@ ckh_new(tsd_t *tsd, ckh_t *ckh, size_t minitems, ckh_hash_t *hash,
ckh->keycomp = keycomp; ckh->keycomp = keycomp;
usize = sa2u(sizeof(ckhc_t) << lg_mincells, CACHELINE); usize = sa2u(sizeof(ckhc_t) << lg_mincells, CACHELINE);
if (usize == 0) { if (unlikely(usize == 0 || usize > HUGE_MAXCLASS)) {
ret = true; ret = true;
goto label_return; goto label_return;
} }

View File

@ -31,35 +31,30 @@ huge_node_unset(const void *ptr, const extent_node_t *node)
} }
void * void *
huge_malloc(tsd_t *tsd, arena_t *arena, size_t size, bool zero, huge_malloc(tsd_t *tsd, arena_t *arena, size_t usize, bool zero,
tcache_t *tcache) tcache_t *tcache)
{ {
size_t usize;
usize = s2u(size); assert(usize == s2u(usize));
if (usize == 0) {
/* size_t overflow. */
return (NULL);
}
return (huge_palloc(tsd, arena, usize, chunksize, zero, tcache)); return (huge_palloc(tsd, arena, usize, chunksize, zero, tcache));
} }
void * void *
huge_palloc(tsd_t *tsd, arena_t *arena, size_t size, size_t alignment, huge_palloc(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment,
bool zero, tcache_t *tcache) bool zero, tcache_t *tcache)
{ {
void *ret; void *ret;
size_t usize; size_t ausize;
extent_node_t *node; extent_node_t *node;
bool is_zeroed; bool is_zeroed;
/* Allocate one or more contiguous chunks for this request. */ /* Allocate one or more contiguous chunks for this request. */
usize = sa2u(size, alignment); ausize = sa2u(usize, alignment);
if (unlikely(usize == 0)) if (unlikely(ausize == 0 || ausize > HUGE_MAXCLASS))
return (NULL); return (NULL);
assert(usize >= chunksize); assert(ausize >= chunksize);
/* Allocate an extent node with which to track the chunk. */ /* Allocate an extent node with which to track the chunk. */
node = ipallocztm(tsd, CACHELINE_CEILING(sizeof(extent_node_t)), node = ipallocztm(tsd, CACHELINE_CEILING(sizeof(extent_node_t)),
@ -74,15 +69,15 @@ huge_palloc(tsd_t *tsd, arena_t *arena, size_t size, size_t alignment,
is_zeroed = zero; is_zeroed = zero;
arena = arena_choose(tsd, arena); arena = arena_choose(tsd, arena);
if (unlikely(arena == NULL) || (ret = arena_chunk_alloc_huge(arena, if (unlikely(arena == NULL) || (ret = arena_chunk_alloc_huge(arena,
size, alignment, &is_zeroed)) == NULL) { usize, alignment, &is_zeroed)) == NULL) {
idalloctm(tsd, node, tcache, true, true); idalloctm(tsd, node, tcache, true, true);
return (NULL); return (NULL);
} }
extent_node_init(node, arena, ret, size, is_zeroed, true); extent_node_init(node, arena, ret, usize, is_zeroed, true);
if (huge_node_set(ret, node)) { if (huge_node_set(ret, node)) {
arena_chunk_dalloc_huge(arena, ret, size); arena_chunk_dalloc_huge(arena, ret, usize);
idalloctm(tsd, node, tcache, true, true); idalloctm(tsd, node, tcache, true, true);
return (NULL); return (NULL);
} }
@ -95,9 +90,9 @@ huge_palloc(tsd_t *tsd, arena_t *arena, size_t size, size_t alignment,
if (zero || (config_fill && unlikely(opt_zero))) { if (zero || (config_fill && unlikely(opt_zero))) {
if (!is_zeroed) if (!is_zeroed)
memset(ret, 0, size); memset(ret, 0, usize);
} else if (config_fill && unlikely(opt_junk_alloc)) } else if (config_fill && unlikely(opt_junk_alloc))
memset(ret, 0xa5, size); memset(ret, 0xa5, usize);
arena_decay_tick(tsd, arena); arena_decay_tick(tsd, arena);
return (ret); return (ret);
@ -286,6 +281,8 @@ huge_ralloc_no_move(tsd_t *tsd, void *ptr, size_t oldsize, size_t usize_min,
{ {
assert(s2u(oldsize) == oldsize); assert(s2u(oldsize) == oldsize);
/* The following should have been caught by callers. */
assert(usize_min > 0 && usize_max <= HUGE_MAXCLASS);
/* Both allocations must be huge to avoid a move. */ /* Both allocations must be huge to avoid a move. */
if (oldsize < chunksize || usize_max < chunksize) if (oldsize < chunksize || usize_max < chunksize)
@ -346,6 +343,9 @@ huge_ralloc(tsd_t *tsd, arena_t *arena, void *ptr, size_t oldsize, size_t usize,
void *ret; void *ret;
size_t copysize; size_t copysize;
/* The following should have been caught by callers. */
assert(usize > 0 && usize <= HUGE_MAXCLASS);
/* Try to avoid moving the allocation. */ /* Try to avoid moving the allocation. */
if (!huge_ralloc_no_move(tsd, ptr, oldsize, usize, usize, zero)) if (!huge_ralloc_no_move(tsd, ptr, oldsize, usize, usize, zero))
return (ptr); return (ptr);

View File

@ -1449,19 +1449,18 @@ imalloc_body(size_t size, tsd_t **tsd, size_t *usize, bool slow_path)
return (NULL); return (NULL);
*tsd = tsd_fetch(); *tsd = tsd_fetch();
ind = size2index(size); ind = size2index(size);
if (unlikely(ind >= NSIZES))
if (config_stats ||
(config_prof && opt_prof) ||
(slow_path && config_valgrind && unlikely(in_valgrind))) {
*usize = index2size(ind);
}
if (config_prof && opt_prof) {
if (unlikely(*usize == 0))
return (NULL); return (NULL);
return (imalloc_prof(*tsd, *usize, ind, slow_path));
if (config_stats || (config_prof && opt_prof) || (slow_path &&
config_valgrind && unlikely(in_valgrind))) {
*usize = index2size(ind);
assert(*usize > 0 && *usize <= HUGE_MAXCLASS);
} }
if (config_prof && opt_prof)
return (imalloc_prof(*tsd, *usize, ind, slow_path));
return (imalloc(*tsd, size, ind, slow_path)); return (imalloc(*tsd, size, ind, slow_path));
} }
@ -1584,7 +1583,7 @@ imemalign(void **memptr, size_t alignment, size_t size, size_t min_alignment)
} }
usize = sa2u(size, alignment); usize = sa2u(size, alignment);
if (unlikely(usize == 0)) { if (unlikely(usize == 0 || usize > HUGE_MAXCLASS)) {
result = NULL; result = NULL;
goto label_oom; goto label_oom;
} }
@ -1722,12 +1721,12 @@ je_calloc(size_t num, size_t size)
} }
ind = size2index(num_size); ind = size2index(num_size);
if (config_prof && opt_prof) { if (unlikely(ind >= NSIZES)) {
usize = index2size(ind);
if (unlikely(usize == 0)) {
ret = NULL; ret = NULL;
goto label_return; goto label_return;
} }
if (config_prof && opt_prof) {
usize = index2size(ind);
ret = icalloc_prof(tsd, usize, ind); ret = icalloc_prof(tsd, usize, ind);
} else { } else {
if (config_stats || (config_valgrind && unlikely(in_valgrind))) if (config_stats || (config_valgrind && unlikely(in_valgrind)))
@ -1874,8 +1873,8 @@ je_realloc(void *ptr, size_t size)
if (config_prof && opt_prof) { if (config_prof && opt_prof) {
usize = s2u(size); usize = s2u(size);
ret = unlikely(usize == 0) ? NULL : irealloc_prof(tsd, ret = unlikely(usize == 0 || usize > HUGE_MAXCLASS) ?
ptr, old_usize, usize); NULL : irealloc_prof(tsd, ptr, old_usize, usize);
} else { } else {
if (config_stats || (config_valgrind && if (config_stats || (config_valgrind &&
unlikely(in_valgrind))) unlikely(in_valgrind)))
@ -2006,7 +2005,8 @@ imallocx_flags_decode_hard(tsd_t *tsd, size_t size, int flags, size_t *usize,
*alignment = MALLOCX_ALIGN_GET_SPECIFIED(flags); *alignment = MALLOCX_ALIGN_GET_SPECIFIED(flags);
*usize = sa2u(size, *alignment); *usize = sa2u(size, *alignment);
} }
assert(*usize != 0); if (unlikely(*usize == 0 || *usize > HUGE_MAXCLASS))
return (true);
*zero = MALLOCX_ZERO_GET(flags); *zero = MALLOCX_ZERO_GET(flags);
if ((flags & MALLOCX_TCACHE_MASK) != 0) { if ((flags & MALLOCX_TCACHE_MASK) != 0) {
if ((flags & MALLOCX_TCACHE_MASK) == MALLOCX_TCACHE_NONE) if ((flags & MALLOCX_TCACHE_MASK) == MALLOCX_TCACHE_NONE)
@ -2032,7 +2032,6 @@ imallocx_flags_decode(tsd_t *tsd, size_t size, int flags, size_t *usize,
if (likely(flags == 0)) { if (likely(flags == 0)) {
*usize = s2u(size); *usize = s2u(size);
assert(*usize != 0);
*alignment = 0; *alignment = 0;
*zero = false; *zero = false;
*tcache = tcache_get(tsd, true); *tcache = tcache_get(tsd, true);
@ -2051,6 +2050,8 @@ imallocx_flags(tsd_t *tsd, size_t usize, size_t alignment, bool zero,
szind_t ind; szind_t ind;
ind = size2index(usize); ind = size2index(usize);
if (unlikely(ind >= NSIZES))
return (NULL);
if (unlikely(alignment != 0)) if (unlikely(alignment != 0))
return (ipalloct(tsd, usize, alignment, zero, tcache, arena)); return (ipalloct(tsd, usize, alignment, zero, tcache, arena));
if (unlikely(zero)) if (unlikely(zero))
@ -2120,8 +2121,13 @@ imallocx_no_prof(tsd_t *tsd, size_t size, int flags, size_t *usize)
if (likely(flags == 0)) { if (likely(flags == 0)) {
szind_t ind = size2index(size); szind_t ind = size2index(size);
if (config_stats || (config_valgrind && unlikely(in_valgrind))) if (unlikely(ind >= NSIZES))
return (NULL);
if (config_stats || (config_valgrind &&
unlikely(in_valgrind))) {
*usize = index2size(ind); *usize = index2size(ind);
assert(*usize > 0 && *usize <= HUGE_MAXCLASS);
}
return (imalloc(tsd, size, ind, true)); return (imalloc(tsd, size, ind, true));
} }
@ -2278,7 +2284,8 @@ je_rallocx(void *ptr, size_t size, int flags)
if (config_prof && opt_prof) { if (config_prof && opt_prof) {
usize = (alignment == 0) ? s2u(size) : sa2u(size, alignment); usize = (alignment == 0) ? s2u(size) : sa2u(size, alignment);
assert(usize != 0); if (unlikely(usize == 0 || usize > HUGE_MAXCLASS))
goto label_oom;
p = irallocx_prof(tsd, ptr, old_usize, size, alignment, &usize, p = irallocx_prof(tsd, ptr, old_usize, size, alignment, &usize,
zero, tcache, arena); zero, tcache, arena);
if (unlikely(p == NULL)) if (unlikely(p == NULL))
@ -2392,13 +2399,22 @@ je_xallocx(void *ptr, size_t size, size_t extra, int flags)
old_usize = isalloc(ptr, config_prof); old_usize = isalloc(ptr, config_prof);
/* Clamp extra if necessary to avoid (size + extra) overflow. */ if (unlikely(extra > 0)) {
if (unlikely(size + extra > HUGE_MAXCLASS)) { /*
/* Check for size overflow. */ * The API explicitly absolves itself of protecting against
* (size + extra) numerical overflow, but we may need to clamp
* extra to avoid exceeding HUGE_MAXCLASS.
*
* Ordinarily, size limit checking is handled deeper down, but
* here we have to check as part of (size + extra) clamping,
* since we need the clamped value in the above helper
* functions.
*/
if (unlikely(size > HUGE_MAXCLASS)) { if (unlikely(size > HUGE_MAXCLASS)) {
usize = old_usize; usize = old_usize;
goto label_not_resized; goto label_not_resized;
} }
if (unlikely(HUGE_MAXCLASS - size < extra))
extra = HUGE_MAXCLASS - size; extra = HUGE_MAXCLASS - size;
} }
@ -2474,7 +2490,6 @@ inallocx(size_t size, int flags)
usize = s2u(size); usize = s2u(size);
else else
usize = sa2u(size, MALLOCX_ALIGN_GET_SPECIFIED(flags)); usize = sa2u(size, MALLOCX_ALIGN_GET_SPECIFIED(flags));
assert(usize != 0);
return (usize); return (usize);
} }
@ -2507,13 +2522,18 @@ JEMALLOC_EXPORT size_t JEMALLOC_NOTHROW
JEMALLOC_ATTR(pure) JEMALLOC_ATTR(pure)
je_nallocx(size_t size, int flags) je_nallocx(size_t size, int flags)
{ {
size_t usize;
assert(size != 0); assert(size != 0);
if (unlikely(malloc_init())) if (unlikely(malloc_init()))
return (0); return (0);
return (inallocx(size, flags)); usize = inallocx(size, flags);
if (unlikely(usize > HUGE_MAXCLASS))
return (0);
return (usize);
} }
JEMALLOC_EXPORT int JEMALLOC_NOTHROW JEMALLOC_EXPORT int JEMALLOC_NOTHROW

View File

@ -46,6 +46,35 @@ get_huge_size(size_t ind)
return (get_size_impl("arenas.hchunk.0.size", ind)); return (get_size_impl("arenas.hchunk.0.size", ind));
} }
TEST_BEGIN(test_overflow)
{
size_t hugemax, size;
hugemax = get_huge_size(get_nhuge()-1);
assert_ptr_null(mallocx(hugemax+1, 0),
"Expected OOM for mallocx(size=%#zx, 0)", hugemax+1);
assert_ptr_null(mallocx(PTRDIFF_MAX+1, 0),
"Expected OOM for mallocx(size=%#zx, 0)", ZU(PTRDIFF_MAX+1));
assert_ptr_null(mallocx(SIZE_T_MAX, 0),
"Expected OOM for mallocx(size=%#zx, 0)", SIZE_T_MAX);
#if LG_SIZEOF_PTR == 3
size = ZU(0x600000000000000);
#else
size = ZU(0x6000000);
#endif
assert_ptr_null(mallocx(size, 0),
"Expected OOM for mallocx(size=%#zx, 0", size);
assert_ptr_null(mallocx(1, MALLOCX_ALIGN(PTRDIFF_MAX+1)),
"Expected OOM for mallocx(size=1, MALLOCX_ALIGN(%#zx))",
ZU(PTRDIFF_MAX+1));
}
TEST_END
TEST_BEGIN(test_oom) TEST_BEGIN(test_oom)
{ {
size_t hugemax, size, alignment; size_t hugemax, size, alignment;
@ -176,6 +205,7 @@ main(void)
{ {
return (test( return (test(
test_overflow,
test_oom, test_oom,
test_basic, test_basic,
test_alignment_and_size)); test_alignment_and_size));

View File

@ -1,5 +1,51 @@
#include "test/jemalloc_test.h" #include "test/jemalloc_test.h"
static unsigned
get_nsizes_impl(const char *cmd)
{
unsigned ret;
size_t z;
z = sizeof(unsigned);
assert_d_eq(mallctl(cmd, &ret, &z, NULL, 0), 0,
"Unexpected mallctl(\"%s\", ...) failure", cmd);
return (ret);
}
static unsigned
get_nhuge(void)
{
return (get_nsizes_impl("arenas.nhchunks"));
}
static size_t
get_size_impl(const char *cmd, size_t ind)
{
size_t ret;
size_t z;
size_t mib[4];
size_t miblen = 4;
z = sizeof(size_t);
assert_d_eq(mallctlnametomib(cmd, mib, &miblen),
0, "Unexpected mallctlnametomib(\"%s\", ...) failure", cmd);
mib[2] = ind;
z = sizeof(size_t);
assert_d_eq(mallctlbymib(mib, miblen, &ret, &z, NULL, 0),
0, "Unexpected mallctlbymib([\"%s\", %zu], ...) failure", cmd, ind);
return (ret);
}
static size_t
get_huge_size(size_t ind)
{
return (get_size_impl("arenas.hchunk.0.size", ind));
}
TEST_BEGIN(test_grow_and_shrink) TEST_BEGIN(test_grow_and_shrink)
{ {
void *p, *q; void *p, *q;
@ -173,6 +219,41 @@ TEST_BEGIN(test_lg_align_and_zero)
} }
TEST_END TEST_END
TEST_BEGIN(test_overflow)
{
size_t hugemax, size;
void *p;
hugemax = get_huge_size(get_nhuge()-1);
p = mallocx(1, 0);
assert_ptr_not_null(p, "Unexpected mallocx() failure");
assert_ptr_null(rallocx(p, hugemax+1, 0),
"Expected OOM for rallocx(p, size=%#zx, 0)", hugemax+1);
assert_ptr_null(rallocx(p, PTRDIFF_MAX+1, 0),
"Expected OOM for rallocx(p, size=%#zx, 0)", ZU(PTRDIFF_MAX+1));
assert_ptr_null(rallocx(p, SIZE_T_MAX, 0),
"Expected OOM for rallocx(p, size=%#zx, 0)", SIZE_T_MAX);
#if LG_SIZEOF_PTR == 3
size = ZU(0x600000000000000);
#else
size = ZU(0x6000000);
#endif
assert_ptr_null(rallocx(p, size, 0),
"Expected OOM for rallocx(p, size=%#zx, 0", size);
assert_ptr_null(rallocx(p, 1, MALLOCX_ALIGN(PTRDIFF_MAX+1)),
"Expected OOM for rallocx(p, size=1, MALLOCX_ALIGN(%#zx))",
ZU(PTRDIFF_MAX+1));
dallocx(p, 0);
}
TEST_END
int int
main(void) main(void)
{ {
@ -181,5 +262,6 @@ main(void)
test_grow_and_shrink, test_grow_and_shrink,
test_zero, test_zero,
test_align, test_align,
test_lg_align_and_zero)); test_lg_align_and_zero,
test_overflow));
} }

View File

@ -80,10 +80,33 @@ TEST_BEGIN(test_size_classes)
} }
TEST_END TEST_END
TEST_BEGIN(test_overflow)
{
size_t max_size_class;
max_size_class = get_max_size_class();
assert_u_ge(size2index(max_size_class+1), NSIZES,
"size2index() should return >= NSIZES on overflow");
assert_u_ge(size2index(PTRDIFF_MAX+1), NSIZES,
"size2index() should return >= NSIZES on overflow");
assert_u_ge(size2index(SIZE_T_MAX), NSIZES,
"size2index() should return >= NSIZES on overflow");
assert_zu_gt(s2u(max_size_class+1), HUGE_MAXCLASS,
"s2u() should return > HUGE_MAXCLASS for unsupported size");
assert_zu_gt(s2u(PTRDIFF_MAX+1), HUGE_MAXCLASS,
"s2u() should return > HUGE_MAXCLASS for unsupported size");
assert_zu_eq(s2u(SIZE_T_MAX), 0,
"s2u() should return 0 on overflow");
}
TEST_END
int int
main(void) main(void)
{ {
return (test( return (test(
test_size_classes)); test_size_classes,
test_overflow));
} }