Don't junk-fill reallocations unless usize changes.
Don't junk fill reallocations for which the request size is less than the current usable size, but not enough smaller to cause a size class change. Unlike malloc()/calloc()/realloc(), *allocx() contractually treats the full usize as the allocation, so a caller can ask for zeroed memory via mallocx() and a series of rallocx() calls that all specify MALLOCX_ZERO, and be assured that all newly allocated bytes will be zeroed and made available to the application without danger of allocator mutation until the size class decreases enough to cause usize reduction.
This commit is contained in:
parent
665769357c
commit
6e62984ef6
@ -297,6 +297,7 @@ tcache_alloc_small(tcache_t *tcache, size_t size, bool zero)
|
|||||||
binind = SMALL_SIZE2BIN(size);
|
binind = SMALL_SIZE2BIN(size);
|
||||||
assert(binind < NBINS);
|
assert(binind < NBINS);
|
||||||
tbin = &tcache->tbins[binind];
|
tbin = &tcache->tbins[binind];
|
||||||
|
size = arena_bin_info[binind].reg_size;
|
||||||
ret = tcache_alloc_easy(tbin);
|
ret = tcache_alloc_easy(tbin);
|
||||||
if (ret == NULL) {
|
if (ret == NULL) {
|
||||||
ret = tcache_alloc_small_hard(tcache, tbin, binind);
|
ret = tcache_alloc_small_hard(tcache, tbin, binind);
|
||||||
|
15
src/arena.c
15
src/arena.c
@ -1938,10 +1938,6 @@ arena_ralloc_large(void *ptr, size_t oldsize, size_t size, size_t extra,
|
|||||||
psize = PAGE_CEILING(size + extra);
|
psize = PAGE_CEILING(size + extra);
|
||||||
if (psize == oldsize) {
|
if (psize == oldsize) {
|
||||||
/* Same size class. */
|
/* Same size class. */
|
||||||
if (config_fill && opt_junk && size < oldsize) {
|
|
||||||
memset((void *)((uintptr_t)ptr + size), 0x5a, oldsize -
|
|
||||||
size);
|
|
||||||
}
|
|
||||||
return (false);
|
return (false);
|
||||||
} else {
|
} else {
|
||||||
arena_chunk_t *chunk;
|
arena_chunk_t *chunk;
|
||||||
@ -1953,8 +1949,8 @@ arena_ralloc_large(void *ptr, size_t oldsize, size_t size, size_t extra,
|
|||||||
if (psize < oldsize) {
|
if (psize < oldsize) {
|
||||||
/* Fill before shrinking in order avoid a race. */
|
/* Fill before shrinking in order avoid a race. */
|
||||||
if (config_fill && opt_junk) {
|
if (config_fill && opt_junk) {
|
||||||
memset((void *)((uintptr_t)ptr + size), 0x5a,
|
memset((void *)((uintptr_t)ptr + psize), 0x5a,
|
||||||
oldsize - size);
|
oldsize - psize);
|
||||||
}
|
}
|
||||||
arena_ralloc_large_shrink(arena, chunk, ptr, oldsize,
|
arena_ralloc_large_shrink(arena, chunk, ptr, oldsize,
|
||||||
psize);
|
psize);
|
||||||
@ -1988,13 +1984,8 @@ arena_ralloc_no_move(void *ptr, size_t oldsize, size_t size, size_t extra,
|
|||||||
if ((size + extra <= SMALL_MAXCLASS &&
|
if ((size + extra <= SMALL_MAXCLASS &&
|
||||||
SMALL_SIZE2BIN(size + extra) ==
|
SMALL_SIZE2BIN(size + extra) ==
|
||||||
SMALL_SIZE2BIN(oldsize)) || (size <= oldsize &&
|
SMALL_SIZE2BIN(oldsize)) || (size <= oldsize &&
|
||||||
size + extra >= oldsize)) {
|
size + extra >= oldsize))
|
||||||
if (config_fill && opt_junk && size < oldsize) {
|
|
||||||
memset((void *)((uintptr_t)ptr + size),
|
|
||||||
0x5a, oldsize - size);
|
|
||||||
}
|
|
||||||
return (ptr);
|
return (ptr);
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
assert(size <= arena_maxclass);
|
assert(size <= arena_maxclass);
|
||||||
if (size + extra > SMALL_MAXCLASS) {
|
if (size + extra > SMALL_MAXCLASS) {
|
||||||
|
@ -89,10 +89,6 @@ huge_ralloc_no_move(void *ptr, size_t oldsize, size_t size, size_t extra)
|
|||||||
&& CHUNK_CEILING(oldsize) >= CHUNK_CEILING(size)
|
&& CHUNK_CEILING(oldsize) >= CHUNK_CEILING(size)
|
||||||
&& CHUNK_CEILING(oldsize) <= CHUNK_CEILING(size+extra)) {
|
&& CHUNK_CEILING(oldsize) <= CHUNK_CEILING(size+extra)) {
|
||||||
assert(CHUNK_CEILING(oldsize) == oldsize);
|
assert(CHUNK_CEILING(oldsize) == oldsize);
|
||||||
if (config_fill && opt_junk && size < oldsize) {
|
|
||||||
memset((void *)((uintptr_t)ptr + size), 0x5a,
|
|
||||||
oldsize - size);
|
|
||||||
}
|
|
||||||
return (ptr);
|
return (ptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user