Fix xallocx() bugs.

Fix xallocx() bugs related to the 'extra' parameter when specified as
non-zero.
This commit is contained in:
Jason Evans 2015-09-11 16:18:53 -07:00
parent a00b10735a
commit 560a4e1e01
9 changed files with 399 additions and 184 deletions

View File

@ -26,6 +26,8 @@ brevity. Much more detail can be found in the git revision history:
with interposed resets (triggered via the "prof.reset" mallctl). This bug with interposed resets (triggered via the "prof.reset" mallctl). This bug
could cause data structure corruption that would most likely result in a could cause data structure corruption that would most likely result in a
segfault. segfault.
- Fix xallocx() bugs related to the 'extra' parameter when specified as
non-zero.
* 4.0.0 (August 17, 2015) * 4.0.0 (August 17, 2015)

View File

@ -488,7 +488,7 @@ extern arena_ralloc_junk_large_t *arena_ralloc_junk_large;
bool arena_ralloc_no_move(void *ptr, size_t oldsize, size_t size, bool arena_ralloc_no_move(void *ptr, size_t oldsize, size_t size,
size_t extra, bool zero); size_t extra, bool zero);
void *arena_ralloc(tsd_t *tsd, arena_t *arena, void *ptr, size_t oldsize, void *arena_ralloc(tsd_t *tsd, arena_t *arena, void *ptr, size_t oldsize,
size_t size, size_t extra, size_t alignment, bool zero, tcache_t *tcache); size_t size, size_t alignment, bool zero, tcache_t *tcache);
dss_prec_t arena_dss_prec_get(arena_t *arena); dss_prec_t arena_dss_prec_get(arena_t *arena);
bool arena_dss_prec_set(arena_t *arena, dss_prec_t dss_prec); bool arena_dss_prec_set(arena_t *arena, dss_prec_t dss_prec);
ssize_t arena_lg_dirty_mult_default_get(void); ssize_t arena_lg_dirty_mult_default_get(void);

View File

@ -13,11 +13,10 @@ void *huge_malloc(tsd_t *tsd, arena_t *arena, size_t size, bool zero,
tcache_t *tcache); tcache_t *tcache);
void *huge_palloc(tsd_t *tsd, arena_t *arena, size_t size, size_t alignment, void *huge_palloc(tsd_t *tsd, arena_t *arena, size_t size, size_t alignment,
bool zero, tcache_t *tcache); bool zero, tcache_t *tcache);
bool huge_ralloc_no_move(void *ptr, size_t oldsize, size_t size, bool huge_ralloc_no_move(void *ptr, size_t oldsize, size_t usize_min,
size_t extra, bool zero); size_t usize_max, bool zero);
void *huge_ralloc(tsd_t *tsd, arena_t *arena, void *ptr, size_t oldsize, void *huge_ralloc(tsd_t *tsd, arena_t *arena, void *ptr, size_t oldsize,
size_t size, size_t extra, size_t alignment, bool zero, size_t usize, size_t alignment, bool zero, tcache_t *tcache);
tcache_t *tcache);
#ifdef JEMALLOC_JET #ifdef JEMALLOC_JET
typedef void (huge_dalloc_junk_t)(void *, size_t); typedef void (huge_dalloc_junk_t)(void *, size_t);
extern huge_dalloc_junk_t *huge_dalloc_junk; extern huge_dalloc_junk_t *huge_dalloc_junk;

View File

@ -1096,7 +1096,7 @@ iralloct(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, size_t alignment,
zero, tcache, arena)); zero, tcache, arena));
} }
return (arena_ralloc(tsd, arena, ptr, oldsize, size, 0, alignment, zero, return (arena_ralloc(tsd, arena, ptr, oldsize, size, alignment, zero,
tcache)); tcache));
} }

View File

@ -167,6 +167,8 @@ size_classes() {
lg_large_minclass=$((${lg_grp} + 2)) lg_large_minclass=$((${lg_grp} + 2))
fi fi
fi fi
# Final written value is correct:
huge_maxclass="((((size_t)1) << ${lg_grp}) + (((size_t)${ndelta}) << ${lg_delta}))"
index=$((${index} + 1)) index=$((${index} + 1))
ndelta=$((${ndelta} + 1)) ndelta=$((${ndelta} + 1))
done done
@ -185,6 +187,7 @@ size_classes() {
# - lookup_maxclass # - lookup_maxclass
# - small_maxclass # - small_maxclass
# - lg_large_minclass # - lg_large_minclass
# - huge_maxclass
} }
cat <<EOF cat <<EOF
@ -215,6 +218,7 @@ cat <<EOF
* LOOKUP_MAXCLASS: Maximum size class included in lookup table. * LOOKUP_MAXCLASS: Maximum size class included in lookup table.
* SMALL_MAXCLASS: Maximum small size class. * SMALL_MAXCLASS: Maximum small size class.
* LG_LARGE_MINCLASS: Lg of minimum large size class. * LG_LARGE_MINCLASS: Lg of minimum large size class.
* HUGE_MAXCLASS: Maximum (huge) size class.
*/ */
#define LG_SIZE_CLASS_GROUP ${lg_g} #define LG_SIZE_CLASS_GROUP ${lg_g}
@ -238,6 +242,7 @@ for lg_z in ${lg_zarr} ; do
echo "#define LOOKUP_MAXCLASS ${lookup_maxclass}" echo "#define LOOKUP_MAXCLASS ${lookup_maxclass}"
echo "#define SMALL_MAXCLASS ${small_maxclass}" echo "#define SMALL_MAXCLASS ${small_maxclass}"
echo "#define LG_LARGE_MINCLASS ${lg_large_minclass}" echo "#define LG_LARGE_MINCLASS ${lg_large_minclass}"
echo "#define HUGE_MAXCLASS ${huge_maxclass}"
echo "#endif" echo "#endif"
echo echo
done done

View File

@ -2642,42 +2642,42 @@ arena_ralloc_large_shrink(arena_t *arena, arena_chunk_t *chunk, void *ptr,
static bool static bool
arena_ralloc_large_grow(arena_t *arena, arena_chunk_t *chunk, void *ptr, arena_ralloc_large_grow(arena_t *arena, arena_chunk_t *chunk, void *ptr,
size_t oldsize, size_t size, size_t extra, bool zero) size_t oldsize, size_t usize_min, size_t usize_max, bool zero)
{ {
size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE; size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
size_t npages = (oldsize + large_pad) >> LG_PAGE; size_t npages = (oldsize + large_pad) >> LG_PAGE;
size_t followsize; size_t followsize;
size_t usize_min = s2u(size);
assert(oldsize == arena_mapbits_large_size_get(chunk, pageind) - assert(oldsize == arena_mapbits_large_size_get(chunk, pageind) -
large_pad); large_pad);
/* Try to extend the run. */ /* Try to extend the run. */
assert(usize_min > oldsize);
malloc_mutex_lock(&arena->lock); malloc_mutex_lock(&arena->lock);
if (pageind+npages < chunk_npages && if (pageind+npages >= chunk_npages || arena_mapbits_allocated_get(chunk,
arena_mapbits_allocated_get(chunk, pageind+npages) == 0 && pageind+npages) != 0)
(followsize = arena_mapbits_unallocated_size_get(chunk, goto label_fail;
pageind+npages)) >= usize_min - oldsize) { followsize = arena_mapbits_unallocated_size_get(chunk, pageind+npages);
if (oldsize + followsize >= usize_min) {
/* /*
* The next run is available and sufficiently large. Split the * The next run is available and sufficiently large. Split the
* following run, then merge the first part with the existing * following run, then merge the first part with the existing
* allocation. * allocation.
*/ */
arena_run_t *run; arena_run_t *run;
size_t flag_dirty, flag_unzeroed_mask, splitsize, usize; size_t usize, splitsize, size, flag_dirty, flag_unzeroed_mask;
usize = s2u(size + extra); usize = usize_max;
while (oldsize + followsize < usize) while (oldsize + followsize < usize)
usize = index2size(size2index(usize)-1); usize = index2size(size2index(usize)-1);
assert(usize >= usize_min); assert(usize >= usize_min);
assert(usize >= oldsize);
splitsize = usize - oldsize; splitsize = usize - oldsize;
if (splitsize == 0)
goto label_fail;
run = &arena_miscelm_get(chunk, pageind+npages)->run; run = &arena_miscelm_get(chunk, pageind+npages)->run;
if (arena_run_split_large(arena, run, splitsize, zero)) { if (arena_run_split_large(arena, run, splitsize, zero))
malloc_mutex_unlock(&arena->lock); goto label_fail;
return (true);
}
size = oldsize + splitsize; size = oldsize + splitsize;
npages = (size + large_pad) >> LG_PAGE; npages = (size + large_pad) >> LG_PAGE;
@ -2719,8 +2719,8 @@ arena_ralloc_large_grow(arena_t *arena, arena_chunk_t *chunk, void *ptr,
malloc_mutex_unlock(&arena->lock); malloc_mutex_unlock(&arena->lock);
return (false); return (false);
} }
label_fail:
malloc_mutex_unlock(&arena->lock); malloc_mutex_unlock(&arena->lock);
return (true); return (true);
} }
@ -2749,98 +2749,114 @@ arena_ralloc_junk_large_t *arena_ralloc_junk_large =
* always fail if growing an object, and the following run is already in use. * always fail if growing an object, and the following run is already in use.
*/ */
static bool static bool
arena_ralloc_large(void *ptr, size_t oldsize, size_t size, size_t extra, arena_ralloc_large(void *ptr, size_t oldsize, size_t usize_min,
bool zero) size_t usize_max, bool zero)
{ {
size_t usize; arena_chunk_t *chunk;
arena_t *arena;
/* Make sure extra can't cause size_t overflow. */ if (oldsize == usize_max) {
if (unlikely(extra >= arena_maxclass)) /* Current size class is compatible and maximal. */
return (true);
usize = s2u(size + extra);
if (usize == oldsize) {
/* Same size class. */
return (false); return (false);
} else {
arena_chunk_t *chunk;
arena_t *arena;
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
arena = extent_node_arena_get(&chunk->node);
if (usize < oldsize) {
/* Fill before shrinking in order avoid a race. */
arena_ralloc_junk_large(ptr, oldsize, usize);
arena_ralloc_large_shrink(arena, chunk, ptr, oldsize,
usize);
return (false);
} else {
bool ret = arena_ralloc_large_grow(arena, chunk, ptr,
oldsize, size, extra, zero);
if (config_fill && !ret && !zero) {
if (unlikely(opt_junk_alloc)) {
memset((void *)((uintptr_t)ptr +
oldsize), 0xa5, isalloc(ptr,
config_prof) - oldsize);
} else if (unlikely(opt_zero)) {
memset((void *)((uintptr_t)ptr +
oldsize), 0, isalloc(ptr,
config_prof) - oldsize);
}
}
return (ret);
}
} }
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
arena = extent_node_arena_get(&chunk->node);
if (oldsize < usize_max) {
bool ret = arena_ralloc_large_grow(arena, chunk, ptr, oldsize,
usize_min, usize_max, zero);
if (config_fill && !ret && !zero) {
if (unlikely(opt_junk_alloc)) {
memset((void *)((uintptr_t)ptr + oldsize), 0xa5,
isalloc(ptr, config_prof) - oldsize);
} else if (unlikely(opt_zero)) {
memset((void *)((uintptr_t)ptr + oldsize), 0,
isalloc(ptr, config_prof) - oldsize);
}
}
return (ret);
}
assert(oldsize > usize_max);
/* Fill before shrinking in order avoid a race. */
arena_ralloc_junk_large(ptr, oldsize, usize_max);
arena_ralloc_large_shrink(arena, chunk, ptr, oldsize, usize_max);
return (false);
} }
bool bool
arena_ralloc_no_move(void *ptr, size_t oldsize, size_t size, size_t extra, arena_ralloc_no_move(void *ptr, size_t oldsize, size_t size, size_t extra,
bool zero) bool zero)
{ {
size_t usize_min, usize_max;
if (likely(size <= arena_maxclass)) { /* Check for size overflow. */
if (unlikely(size > HUGE_MAXCLASS))
return (true);
usize_min = s2u(size);
/* Clamp extra if necessary to avoid (size + extra) overflow. */
if (unlikely(size + extra > HUGE_MAXCLASS))
extra = HUGE_MAXCLASS - size;
usize_max = s2u(size + extra);
if (likely(oldsize <= arena_maxclass && usize_min <= arena_maxclass)) {
/* /*
* Avoid moving the allocation if the size class can be left the * Avoid moving the allocation if the size class can be left the
* same. * same.
*/ */
if (likely(oldsize <= arena_maxclass)) { if (oldsize <= SMALL_MAXCLASS) {
if (oldsize <= SMALL_MAXCLASS) { assert(arena_bin_info[size2index(oldsize)].reg_size ==
assert( oldsize);
arena_bin_info[size2index(oldsize)].reg_size if ((usize_max <= SMALL_MAXCLASS &&
== oldsize); size2index(usize_max) == size2index(oldsize)) ||
if ((size + extra <= SMALL_MAXCLASS && (size <= oldsize && usize_max >= oldsize))
size2index(size + extra) == return (false);
size2index(oldsize)) || (size <= oldsize && } else {
size + extra >= oldsize)) if (usize_max > SMALL_MAXCLASS) {
if (!arena_ralloc_large(ptr, oldsize, usize_min,
usize_max, zero))
return (false); return (false);
} else {
assert(size <= arena_maxclass);
if (size + extra > SMALL_MAXCLASS) {
if (!arena_ralloc_large(ptr, oldsize,
size, extra, zero))
return (false);
}
} }
} }
/* Reallocation would require a move. */ /* Reallocation would require a move. */
return (true); return (true);
} else } else {
return (huge_ralloc_no_move(ptr, oldsize, size, extra, zero)); return (huge_ralloc_no_move(ptr, oldsize, usize_min, usize_max,
zero));
}
}
static void *
arena_ralloc_move_helper(tsd_t *tsd, arena_t *arena, size_t usize,
size_t alignment, bool zero, tcache_t *tcache)
{
if (alignment == 0)
return (arena_malloc(tsd, arena, usize, zero, tcache));
usize = sa2u(usize, alignment);
if (usize == 0)
return (NULL);
return (ipalloct(tsd, usize, alignment, zero, tcache, arena));
} }
void * void *
arena_ralloc(tsd_t *tsd, arena_t *arena, void *ptr, size_t oldsize, size_t size, arena_ralloc(tsd_t *tsd, arena_t *arena, void *ptr, size_t oldsize, size_t size,
size_t extra, size_t alignment, bool zero, tcache_t *tcache) size_t alignment, bool zero, tcache_t *tcache)
{ {
void *ret; void *ret;
size_t usize;
if (likely(size <= arena_maxclass)) { usize = s2u(size);
if (usize == 0)
return (NULL);
if (likely(usize <= arena_maxclass)) {
size_t copysize; size_t copysize;
/* Try to avoid moving the allocation. */ /* Try to avoid moving the allocation. */
if (!arena_ralloc_no_move(ptr, oldsize, size, extra, zero)) if (!arena_ralloc_no_move(ptr, oldsize, usize, 0, zero))
return (ptr); return (ptr);
/* /*
@ -2848,53 +2864,23 @@ arena_ralloc(tsd_t *tsd, arena_t *arena, void *ptr, size_t oldsize, size_t size,
* the object. In that case, fall back to allocating new space * the object. In that case, fall back to allocating new space
* and copying. * and copying.
*/ */
if (alignment != 0) { ret = arena_ralloc_move_helper(tsd, arena, usize, alignment,
size_t usize = sa2u(size + extra, alignment); zero, tcache);
if (usize == 0) if (ret == NULL)
return (NULL); return (NULL);
ret = ipalloct(tsd, usize, alignment, zero, tcache,
arena);
} else {
ret = arena_malloc(tsd, arena, size + extra, zero,
tcache);
}
if (ret == NULL) {
if (extra == 0)
return (NULL);
/* Try again, this time without extra. */
if (alignment != 0) {
size_t usize = sa2u(size, alignment);
if (usize == 0)
return (NULL);
ret = ipalloct(tsd, usize, alignment, zero,
tcache, arena);
} else {
ret = arena_malloc(tsd, arena, size, zero,
tcache);
}
if (ret == NULL)
return (NULL);
}
/* /*
* Junk/zero-filling were already done by * Junk/zero-filling were already done by
* ipalloc()/arena_malloc(). * ipalloc()/arena_malloc().
*/ */
/* copysize = (usize < oldsize) ? usize : oldsize;
* Copy at most size bytes (not size+extra), since the caller
* has no expectation that the extra bytes will be reliably
* preserved.
*/
copysize = (size < oldsize) ? size : oldsize;
JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ret, copysize); JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ret, copysize);
memcpy(ret, ptr, copysize); memcpy(ret, ptr, copysize);
isqalloc(tsd, ptr, oldsize, tcache); isqalloc(tsd, ptr, oldsize, tcache);
} else { } else {
ret = huge_ralloc(tsd, arena, ptr, oldsize, size, extra, ret = huge_ralloc(tsd, arena, ptr, oldsize, usize, alignment,
alignment, zero, tcache); zero, tcache);
} }
return (ret); return (ret);
} }

View File

@ -126,18 +126,19 @@ huge_dalloc_junk_t *huge_dalloc_junk = JEMALLOC_N(huge_dalloc_junk_impl);
#endif #endif
static void static void
huge_ralloc_no_move_similar(void *ptr, size_t oldsize, size_t usize, huge_ralloc_no_move_similar(void *ptr, size_t oldsize, size_t usize_min,
size_t size, size_t extra, bool zero) size_t usize_max, bool zero)
{ {
size_t usize_next; size_t usize, usize_next;
extent_node_t *node; extent_node_t *node;
arena_t *arena; arena_t *arena;
chunk_hooks_t chunk_hooks = CHUNK_HOOKS_INITIALIZER; chunk_hooks_t chunk_hooks = CHUNK_HOOKS_INITIALIZER;
bool zeroed; bool zeroed;
/* Increase usize to incorporate extra. */ /* Increase usize to incorporate extra. */
while (usize < s2u(size+extra) && (usize_next = s2u(usize+1)) < oldsize) for (usize = usize_min; usize < usize_max && (usize_next = s2u(usize+1))
usize = usize_next; <= oldsize; usize = usize_next)
; /* Do nothing. */
if (oldsize == usize) if (oldsize == usize)
return; return;
@ -195,6 +196,8 @@ huge_ralloc_no_move_shrink(void *ptr, size_t oldsize, size_t usize)
arena = extent_node_arena_get(node); arena = extent_node_arena_get(node);
chunk_hooks = chunk_hooks_get(arena); chunk_hooks = chunk_hooks_get(arena);
assert(oldsize > usize);
/* Split excess chunks. */ /* Split excess chunks. */
cdiff = CHUNK_CEILING(oldsize) - CHUNK_CEILING(usize); cdiff = CHUNK_CEILING(oldsize) - CHUNK_CEILING(usize);
if (cdiff != 0 && chunk_hooks.split(ptr, CHUNK_CEILING(oldsize), if (cdiff != 0 && chunk_hooks.split(ptr, CHUNK_CEILING(oldsize),
@ -230,18 +233,11 @@ huge_ralloc_no_move_shrink(void *ptr, size_t oldsize, size_t usize)
} }
static bool static bool
huge_ralloc_no_move_expand(void *ptr, size_t oldsize, size_t size, bool zero) { huge_ralloc_no_move_expand(void *ptr, size_t oldsize, size_t usize, bool zero) {
size_t usize;
extent_node_t *node; extent_node_t *node;
arena_t *arena; arena_t *arena;
bool is_zeroed_subchunk, is_zeroed_chunk; bool is_zeroed_subchunk, is_zeroed_chunk;
usize = s2u(size);
if (usize == 0) {
/* size_t overflow. */
return (true);
}
node = huge_node_get(ptr); node = huge_node_get(ptr);
arena = extent_node_arena_get(node); arena = extent_node_arena_get(node);
malloc_mutex_lock(&arena->huge_mtx); malloc_mutex_lock(&arena->huge_mtx);
@ -282,89 +278,76 @@ huge_ralloc_no_move_expand(void *ptr, size_t oldsize, size_t size, bool zero) {
} }
bool bool
huge_ralloc_no_move(void *ptr, size_t oldsize, size_t size, size_t extra, huge_ralloc_no_move(void *ptr, size_t oldsize, size_t usize_min,
bool zero) size_t usize_max, bool zero)
{ {
size_t usize;
/* Both allocations must be huge to avoid a move. */
if (oldsize < chunksize)
return (true);
assert(s2u(oldsize) == oldsize); assert(s2u(oldsize) == oldsize);
usize = s2u(size);
if (usize == 0) { /* Both allocations must be huge to avoid a move. */
/* size_t overflow. */ if (oldsize < chunksize || usize_max < chunksize)
return (true); return (true);
if (CHUNK_CEILING(usize_max) > CHUNK_CEILING(oldsize)) {
/* Attempt to expand the allocation in-place. */
if (!huge_ralloc_no_move_expand(ptr, oldsize, usize_max, zero))
return (false);
/* Try again, this time with usize_min. */
if (usize_min < usize_max && CHUNK_CEILING(usize_min) >
CHUNK_CEILING(oldsize) && huge_ralloc_no_move_expand(ptr,
oldsize, usize_min, zero))
return (false);
} }
/* /*
* Avoid moving the allocation if the existing chunk size accommodates * Avoid moving the allocation if the existing chunk size accommodates
* the new size. * the new size.
*/ */
if (CHUNK_CEILING(oldsize) >= CHUNK_CEILING(usize) if (CHUNK_CEILING(oldsize) >= CHUNK_CEILING(usize_min)
&& CHUNK_CEILING(oldsize) <= CHUNK_CEILING(s2u(size+extra))) { && CHUNK_CEILING(oldsize) <= CHUNK_CEILING(usize_max)) {
huge_ralloc_no_move_similar(ptr, oldsize, usize, size, extra, huge_ralloc_no_move_similar(ptr, oldsize, usize_min, usize_max,
zero); zero);
return (false); return (false);
} }
/* Attempt to shrink the allocation in-place. */ /* Attempt to shrink the allocation in-place. */
if (CHUNK_CEILING(oldsize) >= CHUNK_CEILING(usize)) if (CHUNK_CEILING(oldsize) > CHUNK_CEILING(usize_max))
return (huge_ralloc_no_move_shrink(ptr, oldsize, usize)); return (huge_ralloc_no_move_shrink(ptr, oldsize, usize_max));
return (true);
}
/* Attempt to expand the allocation in-place. */ static void *
if (huge_ralloc_no_move_expand(ptr, oldsize, size + extra, zero)) { huge_ralloc_move_helper(tsd_t *tsd, arena_t *arena, size_t usize,
if (extra == 0) size_t alignment, bool zero, tcache_t *tcache)
return (true); {
/* Try again, this time without extra. */ if (alignment <= chunksize)
return (huge_ralloc_no_move_expand(ptr, oldsize, size, zero)); return (huge_malloc(tsd, arena, usize, zero, tcache));
} return (huge_palloc(tsd, arena, usize, alignment, zero, tcache));
return (false);
} }
void * void *
huge_ralloc(tsd_t *tsd, arena_t *arena, void *ptr, size_t oldsize, size_t size, huge_ralloc(tsd_t *tsd, arena_t *arena, void *ptr, size_t oldsize, size_t usize,
size_t extra, size_t alignment, bool zero, tcache_t *tcache) size_t alignment, bool zero, tcache_t *tcache)
{ {
void *ret; void *ret;
size_t copysize; size_t copysize;
/* Try to avoid moving the allocation. */ /* Try to avoid moving the allocation. */
if (!huge_ralloc_no_move(ptr, oldsize, size, extra, zero)) if (!huge_ralloc_no_move(ptr, oldsize, usize, usize, zero))
return (ptr); return (ptr);
/* /*
* size and oldsize are different enough that we need to use a * usize and oldsize are different enough that we need to use a
* different size class. In that case, fall back to allocating new * different size class. In that case, fall back to allocating new
* space and copying. * space and copying.
*/ */
if (alignment > chunksize) { ret = huge_ralloc_move_helper(tsd, arena, usize, alignment, zero,
ret = huge_palloc(tsd, arena, size + extra, alignment, zero, tcache);
tcache); if (ret == NULL)
} else return (NULL);
ret = huge_malloc(tsd, arena, size + extra, zero, tcache);
if (ret == NULL) { copysize = (usize < oldsize) ? usize : oldsize;
if (extra == 0)
return (NULL);
/* Try again, this time without extra. */
if (alignment > chunksize) {
ret = huge_palloc(tsd, arena, size, alignment, zero,
tcache);
} else
ret = huge_malloc(tsd, arena, size, zero, tcache);
if (ret == NULL)
return (NULL);
}
/*
* Copy at most size bytes (not size+extra), since the caller has no
* expectation that the extra bytes will be reliably preserved.
*/
copysize = (size < oldsize) ? size : oldsize;
memcpy(ret, ptr, copysize); memcpy(ret, ptr, copysize);
isqalloc(tsd, ptr, oldsize, tcache); isqalloc(tsd, ptr, oldsize, tcache);
return (ret); return (ret);

View File

@ -22,7 +22,7 @@ TEST_BEGIN(test_grow_and_shrink)
szs[j-1], szs[j-1]+1); szs[j-1], szs[j-1]+1);
szs[j] = sallocx(q, 0); szs[j] = sallocx(q, 0);
assert_zu_ne(szs[j], szs[j-1]+1, assert_zu_ne(szs[j], szs[j-1]+1,
"Expected size to at least: %zu", szs[j-1]+1); "Expected size to be at least: %zu", szs[j-1]+1);
p = q; p = q;
} }

View File

@ -48,6 +48,243 @@ TEST_BEGIN(test_no_move_fail)
} }
TEST_END TEST_END
static unsigned
get_nsizes_impl(const char *cmd)
{
unsigned ret;
size_t z;
z = sizeof(unsigned);
assert_d_eq(mallctl(cmd, &ret, &z, NULL, 0), 0,
"Unexpected mallctl(\"%s\", ...) failure", cmd);
return (ret);
}
static unsigned
get_nsmall(void)
{
return (get_nsizes_impl("arenas.nbins"));
}
static unsigned
get_nlarge(void)
{
return (get_nsizes_impl("arenas.nlruns"));
}
static unsigned
get_nhuge(void)
{
return (get_nsizes_impl("arenas.nhchunks"));
}
static size_t
get_size_impl(const char *cmd, size_t ind)
{
size_t ret;
size_t z;
size_t mib[4];
size_t miblen = 4;
z = sizeof(size_t);
assert_d_eq(mallctlnametomib(cmd, mib, &miblen),
0, "Unexpected mallctlnametomib(\"%s\", ...) failure", cmd);
mib[2] = ind;
z = sizeof(size_t);
assert_d_eq(mallctlbymib(mib, miblen, &ret, &z, NULL, 0),
0, "Unexpected mallctlbymib([\"%s\", %zu], ...) failure", cmd, ind);
return (ret);
}
static size_t
get_small_size(size_t ind)
{
return (get_size_impl("arenas.bin.0.size", ind));
}
static size_t
get_large_size(size_t ind)
{
return (get_size_impl("arenas.lrun.0.size", ind));
}
static size_t
get_huge_size(size_t ind)
{
return (get_size_impl("arenas.hchunk.0.size", ind));
}
TEST_BEGIN(test_extra_small)
{
size_t small0, small1, hugemax;
void *p;
/* Get size classes. */
small0 = get_small_size(0);
small1 = get_small_size(1);
hugemax = get_huge_size(get_nhuge()-1);
p = mallocx(small0, 0);
assert_ptr_not_null(p, "Unexpected mallocx() error");
assert_zu_eq(xallocx(p, small1, 0, 0), small0,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, small1, 0, 0), small0,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, small0, small1 - small0, 0), small0,
"Unexpected xallocx() behavior");
/* Test size+extra overflow. */
assert_zu_eq(xallocx(p, small0, hugemax - small0 + 1, 0), small0,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, small0, SIZE_T_MAX - small0, 0), small0,
"Unexpected xallocx() behavior");
dallocx(p, 0);
}
TEST_END
TEST_BEGIN(test_extra_large)
{
size_t smallmax, large0, large1, large2, huge0, hugemax;
void *p;
/* Get size classes. */
smallmax = get_small_size(get_nsmall()-1);
large0 = get_large_size(0);
large1 = get_large_size(1);
large2 = get_large_size(2);
huge0 = get_huge_size(0);
hugemax = get_huge_size(get_nhuge()-1);
p = mallocx(large2, 0);
assert_ptr_not_null(p, "Unexpected mallocx() error");
assert_zu_eq(xallocx(p, large2, 0, 0), large2,
"Unexpected xallocx() behavior");
/* Test size decrease with zero extra. */
assert_zu_eq(xallocx(p, large0, 0, 0), large0,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, smallmax, 0, 0), large0,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, large2, 0, 0), large2,
"Unexpected xallocx() behavior");
/* Test size decrease with non-zero extra. */
assert_zu_eq(xallocx(p, large0, large2 - large0, 0), large2,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, large1, large2 - large1, 0), large2,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, large0, large1 - large0, 0), large1,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, smallmax, large0 - smallmax, 0), large0,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, large0, 0, 0), large0,
"Unexpected xallocx() behavior");
/* Test size increase with zero extra. */
assert_zu_eq(xallocx(p, large2, 0, 0), large2,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, huge0, 0, 0), large2,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, large0, 0, 0), large0,
"Unexpected xallocx() behavior");
/* Test size increase with non-zero extra. */
assert_zu_lt(xallocx(p, large0, huge0 - large0, 0), huge0,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, large0, 0, 0), large0,
"Unexpected xallocx() behavior");
/* Test size increase with non-zero extra. */
assert_zu_eq(xallocx(p, large0, large2 - large0, 0), large2,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, large2, 0, 0), large2,
"Unexpected xallocx() behavior");
/* Test size+extra overflow. */
assert_zu_lt(xallocx(p, large2, hugemax - large2 + 1, 0), huge0,
"Unexpected xallocx() behavior");
dallocx(p, 0);
}
TEST_END
TEST_BEGIN(test_extra_huge)
{
size_t largemax, huge0, huge1, huge2, hugemax;
void *p;
/* Get size classes. */
largemax = get_large_size(get_nlarge()-1);
huge0 = get_huge_size(0);
huge1 = get_huge_size(1);
huge2 = get_huge_size(2);
hugemax = get_huge_size(get_nhuge()-1);
p = mallocx(huge2, 0);
assert_ptr_not_null(p, "Unexpected mallocx() error");
assert_zu_eq(xallocx(p, huge2, 0, 0), huge2,
"Unexpected xallocx() behavior");
/* Test size decrease with zero extra. */
assert_zu_eq(xallocx(p, huge0, 0, 0), huge0,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, largemax, 0, 0), huge0,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, huge2, 0, 0), huge2,
"Unexpected xallocx() behavior");
/* Test size decrease with non-zero extra. */
assert_zu_eq(xallocx(p, huge0, huge2 - huge0, 0), huge2,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, huge1, huge2 - huge1, 0), huge2,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, huge0, huge1 - huge0, 0), huge1,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, largemax, huge0 - largemax, 0), huge0,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, huge0, 0, 0), huge0,
"Unexpected xallocx() behavior");
/* Test size increase with zero extra. */
assert_zu_eq(xallocx(p, huge2, 0, 0), huge2,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, hugemax+1, 0, 0), huge2,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, huge0, 0, 0), huge0,
"Unexpected xallocx() behavior");
/* Test size increase with non-zero extra. */
assert_zu_le(xallocx(p, huge0, SIZE_T_MAX - huge0, 0), hugemax,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, huge0, 0, 0), huge0,
"Unexpected xallocx() behavior");
/* Test size increase with non-zero extra. */
assert_zu_eq(xallocx(p, huge0, huge2 - huge0, 0), huge2,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, huge2, 0, 0), huge2,
"Unexpected xallocx() behavior");
/* Test size+extra overflow. */
assert_zu_le(xallocx(p, huge2, hugemax - huge2 + 1, 0), hugemax,
"Unexpected xallocx() behavior");
dallocx(p, 0);
}
TEST_END
int int
main(void) main(void)
{ {
@ -55,5 +292,8 @@ main(void)
return (test( return (test(
test_same_size, test_same_size,
test_extra_no_move, test_extra_no_move,
test_no_move_fail)); test_no_move_fail,
test_extra_small,
test_extra_large,
test_extra_huge));
} }