Add JEMALLOC_ALLOC_JUNK and JEMALLOC_FREE_JUNK macros
Replace hardcoded 0xa5 and 0x5a junk values with JEMALLOC_ALLOC_JUNK and JEMALLOC_FREE_JUNK macros, respectively.
This commit is contained in:
parent
f86bc081d6
commit
a82070ef5f
@ -381,9 +381,10 @@ tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
|
||||
}
|
||||
if (likely(!zero)) {
|
||||
if (slow_path && config_fill) {
|
||||
if (unlikely(opt_junk_alloc))
|
||||
memset(ret, 0xa5, usize);
|
||||
else if (unlikely(opt_zero))
|
||||
if (unlikely(opt_junk_alloc)) {
|
||||
memset(ret, JEMALLOC_ALLOC_JUNK,
|
||||
usize);
|
||||
} else if (unlikely(opt_zero))
|
||||
memset(ret, 0, usize);
|
||||
}
|
||||
} else
|
||||
|
@ -40,6 +40,10 @@
|
||||
*/
|
||||
#define MALLOC_PRINTF_BUFSIZE 4096
|
||||
|
||||
/* Junk fill patterns. */
|
||||
#define JEMALLOC_ALLOC_JUNK 0xa5
|
||||
#define JEMALLOC_FREE_JUNK 0x5a
|
||||
|
||||
/*
|
||||
* Wrap a cpp argument that contains commas such that it isn't broken up into
|
||||
* multiple arguments.
|
||||
|
36
src/arena.c
36
src/arena.c
@ -2249,15 +2249,16 @@ void
|
||||
arena_alloc_junk_small(void *ptr, arena_bin_info_t *bin_info, bool zero)
|
||||
{
|
||||
|
||||
size_t redzone_size = bin_info->redzone_size;
|
||||
|
||||
if (zero) {
|
||||
size_t redzone_size = bin_info->redzone_size;
|
||||
memset((void *)((uintptr_t)ptr - redzone_size), 0xa5,
|
||||
redzone_size);
|
||||
memset((void *)((uintptr_t)ptr + bin_info->reg_size), 0xa5,
|
||||
redzone_size);
|
||||
memset((void *)((uintptr_t)ptr - redzone_size),
|
||||
JEMALLOC_ALLOC_JUNK, redzone_size);
|
||||
memset((void *)((uintptr_t)ptr + bin_info->reg_size),
|
||||
JEMALLOC_ALLOC_JUNK, redzone_size);
|
||||
} else {
|
||||
memset((void *)((uintptr_t)ptr - bin_info->redzone_size), 0xa5,
|
||||
bin_info->reg_interval);
|
||||
memset((void *)((uintptr_t)ptr - redzone_size),
|
||||
JEMALLOC_ALLOC_JUNK, bin_info->reg_interval);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2293,22 +2294,22 @@ arena_redzones_validate(void *ptr, arena_bin_info_t *bin_info, bool reset)
|
||||
|
||||
for (i = 1; i <= redzone_size; i++) {
|
||||
uint8_t *byte = (uint8_t *)((uintptr_t)ptr - i);
|
||||
if (*byte != 0xa5) {
|
||||
if (*byte != JEMALLOC_ALLOC_JUNK) {
|
||||
error = true;
|
||||
arena_redzone_corruption(ptr, size, false, i,
|
||||
*byte);
|
||||
if (reset)
|
||||
*byte = 0xa5;
|
||||
*byte = JEMALLOC_ALLOC_JUNK;
|
||||
}
|
||||
}
|
||||
for (i = 0; i < redzone_size; i++) {
|
||||
uint8_t *byte = (uint8_t *)((uintptr_t)ptr + size + i);
|
||||
if (*byte != 0xa5) {
|
||||
if (*byte != JEMALLOC_ALLOC_JUNK) {
|
||||
error = true;
|
||||
arena_redzone_corruption(ptr, size, true, i,
|
||||
*byte);
|
||||
if (reset)
|
||||
*byte = 0xa5;
|
||||
*byte = JEMALLOC_ALLOC_JUNK;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2327,7 +2328,7 @@ arena_dalloc_junk_small(void *ptr, arena_bin_info_t *bin_info)
|
||||
size_t redzone_size = bin_info->redzone_size;
|
||||
|
||||
arena_redzones_validate(ptr, bin_info, false);
|
||||
memset((void *)((uintptr_t)ptr - redzone_size), 0x5a,
|
||||
memset((void *)((uintptr_t)ptr - redzone_size), JEMALLOC_FREE_JUNK,
|
||||
bin_info->reg_interval);
|
||||
}
|
||||
#ifdef JEMALLOC_JET
|
||||
@ -2458,7 +2459,7 @@ arena_malloc_large(tsd_t *tsd, arena_t *arena, szind_t binind, bool zero)
|
||||
if (!zero) {
|
||||
if (config_fill) {
|
||||
if (unlikely(opt_junk_alloc))
|
||||
memset(ret, 0xa5, usize);
|
||||
memset(ret, JEMALLOC_ALLOC_JUNK, usize);
|
||||
else if (unlikely(opt_zero))
|
||||
memset(ret, 0, usize);
|
||||
}
|
||||
@ -2563,7 +2564,7 @@ arena_palloc_large(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment,
|
||||
|
||||
if (config_fill && !zero) {
|
||||
if (unlikely(opt_junk_alloc))
|
||||
memset(ret, 0xa5, usize);
|
||||
memset(ret, JEMALLOC_ALLOC_JUNK, usize);
|
||||
else if (unlikely(opt_zero))
|
||||
memset(ret, 0, usize);
|
||||
}
|
||||
@ -2776,7 +2777,7 @@ arena_dalloc_junk_large(void *ptr, size_t usize)
|
||||
{
|
||||
|
||||
if (config_fill && unlikely(opt_junk_free))
|
||||
memset(ptr, 0x5a, usize);
|
||||
memset(ptr, JEMALLOC_FREE_JUNK, usize);
|
||||
}
|
||||
#ifdef JEMALLOC_JET
|
||||
#undef arena_dalloc_junk_large
|
||||
@ -2977,7 +2978,7 @@ arena_ralloc_junk_large(void *ptr, size_t old_usize, size_t usize)
|
||||
{
|
||||
|
||||
if (config_fill && unlikely(opt_junk_free)) {
|
||||
memset((void *)((uintptr_t)ptr + usize), 0x5a,
|
||||
memset((void *)((uintptr_t)ptr + usize), JEMALLOC_FREE_JUNK,
|
||||
old_usize - usize);
|
||||
}
|
||||
}
|
||||
@ -3012,7 +3013,8 @@ arena_ralloc_large(void *ptr, size_t oldsize, size_t usize_min,
|
||||
usize_min, usize_max, zero);
|
||||
if (config_fill && !ret && !zero) {
|
||||
if (unlikely(opt_junk_alloc)) {
|
||||
memset((void *)((uintptr_t)ptr + oldsize), 0xa5,
|
||||
memset((void *)((uintptr_t)ptr + oldsize),
|
||||
JEMALLOC_ALLOC_JUNK,
|
||||
isalloc(ptr, config_prof) - oldsize);
|
||||
} else if (unlikely(opt_zero)) {
|
||||
memset((void *)((uintptr_t)ptr + oldsize), 0,
|
||||
|
@ -423,7 +423,7 @@ ckh_delete(tsd_t *tsd, ckh_t *ckh)
|
||||
|
||||
idalloctm(tsd, ckh->tab, tcache_get(tsd, false), true, true);
|
||||
if (config_debug)
|
||||
memset(ckh, 0x5a, sizeof(ckh_t));
|
||||
memset(ckh, JEMALLOC_FREE_JUNK, sizeof(ckh_t));
|
||||
}
|
||||
|
||||
size_t
|
||||
|
15
src/huge.c
15
src/huge.c
@ -92,7 +92,7 @@ huge_palloc(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment,
|
||||
if (!is_zeroed)
|
||||
memset(ret, 0, usize);
|
||||
} else if (config_fill && unlikely(opt_junk_alloc))
|
||||
memset(ret, 0xa5, usize);
|
||||
memset(ret, JEMALLOC_ALLOC_JUNK, usize);
|
||||
|
||||
arena_decay_tick(tsd, arena);
|
||||
return (ret);
|
||||
@ -112,7 +112,7 @@ huge_dalloc_junk(void *ptr, size_t usize)
|
||||
* unmapped.
|
||||
*/
|
||||
if (!config_munmap || (have_dss && chunk_in_dss(ptr)))
|
||||
memset(ptr, 0x5a, usize);
|
||||
memset(ptr, JEMALLOC_FREE_JUNK, usize);
|
||||
}
|
||||
}
|
||||
#ifdef JEMALLOC_JET
|
||||
@ -147,7 +147,8 @@ huge_ralloc_no_move_similar(void *ptr, size_t oldsize, size_t usize_min,
|
||||
if (oldsize > usize) {
|
||||
size_t sdiff = oldsize - usize;
|
||||
if (config_fill && unlikely(opt_junk_free)) {
|
||||
memset((void *)((uintptr_t)ptr + usize), 0x5a, sdiff);
|
||||
memset((void *)((uintptr_t)ptr + usize),
|
||||
JEMALLOC_FREE_JUNK, sdiff);
|
||||
post_zeroed = false;
|
||||
} else {
|
||||
post_zeroed = !chunk_purge_wrapper(arena, &chunk_hooks,
|
||||
@ -174,8 +175,8 @@ huge_ralloc_no_move_similar(void *ptr, size_t oldsize, size_t usize_min,
|
||||
usize - oldsize);
|
||||
}
|
||||
} else if (config_fill && unlikely(opt_junk_alloc)) {
|
||||
memset((void *)((uintptr_t)ptr + oldsize), 0xa5, usize -
|
||||
oldsize);
|
||||
memset((void *)((uintptr_t)ptr + oldsize),
|
||||
JEMALLOC_ALLOC_JUNK, usize - oldsize);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -268,8 +269,8 @@ huge_ralloc_no_move_expand(void *ptr, size_t oldsize, size_t usize, bool zero) {
|
||||
CHUNK_CEILING(oldsize));
|
||||
}
|
||||
} else if (config_fill && unlikely(opt_junk_alloc)) {
|
||||
memset((void *)((uintptr_t)ptr + oldsize), 0xa5, usize -
|
||||
oldsize);
|
||||
memset((void *)((uintptr_t)ptr + oldsize), JEMALLOC_ALLOC_JUNK,
|
||||
usize - oldsize);
|
||||
}
|
||||
|
||||
return (false);
|
||||
|
@ -160,7 +160,7 @@ quarantine(tsd_t *tsd, void *ptr)
|
||||
&& usize <= SMALL_MAXCLASS)
|
||||
arena_quarantine_junk_small(ptr, usize);
|
||||
else
|
||||
memset(ptr, 0x5a, usize);
|
||||
memset(ptr, JEMALLOC_FREE_JUNK, usize);
|
||||
}
|
||||
} else {
|
||||
assert(quarantine->curbytes == 0);
|
||||
|
@ -29,7 +29,7 @@ arena_dalloc_junk_small_intercept(void *ptr, arena_bin_info_t *bin_info)
|
||||
|
||||
arena_dalloc_junk_small_orig(ptr, bin_info);
|
||||
for (i = 0; i < bin_info->reg_size; i++) {
|
||||
assert_c_eq(((char *)ptr)[i], 0x5a,
|
||||
assert_c_eq(((char *)ptr)[i], JEMALLOC_FREE_JUNK,
|
||||
"Missing junk fill for byte %zu/%zu of deallocated region",
|
||||
i, bin_info->reg_size);
|
||||
}
|
||||
@ -44,7 +44,7 @@ arena_dalloc_junk_large_intercept(void *ptr, size_t usize)
|
||||
|
||||
arena_dalloc_junk_large_orig(ptr, usize);
|
||||
for (i = 0; i < usize; i++) {
|
||||
assert_c_eq(((char *)ptr)[i], 0x5a,
|
||||
assert_c_eq(((char *)ptr)[i], JEMALLOC_FREE_JUNK,
|
||||
"Missing junk fill for byte %zu/%zu of deallocated region",
|
||||
i, usize);
|
||||
}
|
||||
@ -98,7 +98,7 @@ test_junk(size_t sz_min, size_t sz_max)
|
||||
|
||||
for (i = sz_prev; i < sz; i++) {
|
||||
if (opt_junk_alloc) {
|
||||
assert_c_eq(s[i], 0xa5,
|
||||
assert_c_eq(s[i], JEMALLOC_ALLOC_JUNK,
|
||||
"Newly allocated byte %zu/%zu isn't "
|
||||
"junk-filled", i, sz);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user