Add junk/zero filling unit tests, and fix discovered bugs.
Fix growing large reallocation to junk fill new space. Fix huge deallocation to junk fill when munmap is disabled.
This commit is contained in:
parent
e18c25d23d
commit
6b694c4d47
15
Makefile.in
15
Makefile.in
@ -108,13 +108,14 @@ C_TESTLIB_SRCS := $(srcroot)test/src/math.c $(srcroot)test/src/mtx.c \
|
|||||||
$(srcroot)test/src/thd.c
|
$(srcroot)test/src/thd.c
|
||||||
C_UTIL_INTEGRATION_SRCS := $(srcroot)src/util.c
|
C_UTIL_INTEGRATION_SRCS := $(srcroot)src/util.c
|
||||||
TESTS_UNIT := $(srcroot)test/unit/bitmap.c $(srcroot)test/unit/ckh.c \
|
TESTS_UNIT := $(srcroot)test/unit/bitmap.c $(srcroot)test/unit/ckh.c \
|
||||||
$(srcroot)test/unit/hash.c $(srcroot)test/unit/mallctl.c \
|
$(srcroot)test/unit/hash.c $(srcroot)test/unit/junk.c \
|
||||||
$(srcroot)test/unit/math.c $(srcroot)test/unit/mq.c \
|
$(srcroot)test/unit/mallctl.c $(srcroot)test/unit/math.c \
|
||||||
$(srcroot)test/unit/mtx.c $(srcroot)test/unit/ql.c \
|
$(srcroot)test/unit/mq.c $(srcroot)test/unit/mtx.c \
|
||||||
$(srcroot)test/unit/qr.c $(srcroot)test/unit/quarantine.c \
|
$(srcroot)test/unit/ql.c $(srcroot)test/unit/qr.c \
|
||||||
$(srcroot)test/unit/rb.c $(srcroot)test/unit/rtree.c \
|
$(srcroot)test/unit/quarantine.c $(srcroot)test/unit/rb.c \
|
||||||
$(srcroot)test/unit/SFMT.c $(srcroot)test/unit/stats.c \
|
$(srcroot)test/unit/rtree.c $(srcroot)test/unit/SFMT.c \
|
||||||
$(srcroot)test/unit/tsd.c $(srcroot)test/unit/util.c
|
$(srcroot)test/unit/stats.c $(srcroot)test/unit/tsd.c \
|
||||||
|
$(srcroot)test/unit/util.c $(srcroot)test/unit/zero.c
|
||||||
TESTS_INTEGRATION := $(srcroot)test/integration/aligned_alloc.c \
|
TESTS_INTEGRATION := $(srcroot)test/integration/aligned_alloc.c \
|
||||||
$(srcroot)test/integration/allocated.c \
|
$(srcroot)test/integration/allocated.c \
|
||||||
$(srcroot)test/integration/mallocx.c \
|
$(srcroot)test/integration/mallocx.c \
|
||||||
|
@ -408,9 +408,12 @@ void arena_alloc_junk_small(void *ptr, arena_bin_info_t *bin_info,
|
|||||||
#ifdef JEMALLOC_JET
|
#ifdef JEMALLOC_JET
|
||||||
typedef void (arena_redzone_corruption_t)(void *, size_t, bool, size_t,
|
typedef void (arena_redzone_corruption_t)(void *, size_t, bool, size_t,
|
||||||
uint8_t);
|
uint8_t);
|
||||||
extern arena_redzone_corruption_t *arena_redzone_corruption_fptr;
|
extern arena_redzone_corruption_t *arena_redzone_corruption;
|
||||||
#endif
|
typedef void (arena_dalloc_junk_small_t)(void *, arena_bin_info_t *);
|
||||||
|
extern arena_dalloc_junk_small_t *arena_dalloc_junk_small;
|
||||||
|
#else
|
||||||
void arena_dalloc_junk_small(void *ptr, arena_bin_info_t *bin_info);
|
void arena_dalloc_junk_small(void *ptr, arena_bin_info_t *bin_info);
|
||||||
|
#endif
|
||||||
void arena_quarantine_junk_small(void *ptr, size_t usize);
|
void arena_quarantine_junk_small(void *ptr, size_t usize);
|
||||||
void *arena_malloc_small(arena_t *arena, size_t size, bool zero);
|
void *arena_malloc_small(arena_t *arena, size_t size, bool zero);
|
||||||
void *arena_malloc_large(arena_t *arena, size_t size, bool zero);
|
void *arena_malloc_large(arena_t *arena, size_t size, bool zero);
|
||||||
@ -422,9 +425,17 @@ void arena_dalloc_bin(arena_t *arena, arena_chunk_t *chunk, void *ptr,
|
|||||||
size_t pageind, arena_chunk_map_t *mapelm);
|
size_t pageind, arena_chunk_map_t *mapelm);
|
||||||
void arena_dalloc_small(arena_t *arena, arena_chunk_t *chunk, void *ptr,
|
void arena_dalloc_small(arena_t *arena, arena_chunk_t *chunk, void *ptr,
|
||||||
size_t pageind);
|
size_t pageind);
|
||||||
|
#ifdef JEMALLOC_JET
|
||||||
|
typedef void (arena_dalloc_junk_large_t)(void *, size_t);
|
||||||
|
extern arena_dalloc_junk_large_t *arena_dalloc_junk_large;
|
||||||
|
#endif
|
||||||
void arena_dalloc_large_locked(arena_t *arena, arena_chunk_t *chunk,
|
void arena_dalloc_large_locked(arena_t *arena, arena_chunk_t *chunk,
|
||||||
void *ptr);
|
void *ptr);
|
||||||
void arena_dalloc_large(arena_t *arena, arena_chunk_t *chunk, void *ptr);
|
void arena_dalloc_large(arena_t *arena, arena_chunk_t *chunk, void *ptr);
|
||||||
|
#ifdef JEMALLOC_JET
|
||||||
|
typedef void (arena_ralloc_junk_large_t)(void *, size_t, size_t);
|
||||||
|
extern arena_ralloc_junk_large_t *arena_ralloc_junk_large;
|
||||||
|
#endif
|
||||||
void *arena_ralloc_no_move(void *ptr, size_t oldsize, size_t size,
|
void *arena_ralloc_no_move(void *ptr, size_t oldsize, size_t size,
|
||||||
size_t extra, bool zero);
|
size_t extra, bool zero);
|
||||||
void *arena_ralloc(arena_t *arena, void *ptr, size_t oldsize, size_t size,
|
void *arena_ralloc(arena_t *arena, void *ptr, size_t oldsize, size_t size,
|
||||||
|
@ -23,6 +23,10 @@ void *huge_ralloc_no_move(void *ptr, size_t oldsize, size_t size,
|
|||||||
size_t extra);
|
size_t extra);
|
||||||
void *huge_ralloc(void *ptr, size_t oldsize, size_t size, size_t extra,
|
void *huge_ralloc(void *ptr, size_t oldsize, size_t size, size_t extra,
|
||||||
size_t alignment, bool zero, bool try_tcache_dalloc);
|
size_t alignment, bool zero, bool try_tcache_dalloc);
|
||||||
|
#ifdef JEMALLOC_JET
|
||||||
|
typedef void (huge_dalloc_junk_t)(void *, size_t);
|
||||||
|
extern huge_dalloc_junk_t *huge_dalloc_junk;
|
||||||
|
#endif
|
||||||
void huge_dalloc(void *ptr, bool unmap);
|
void huge_dalloc(void *ptr, bool unmap);
|
||||||
size_t huge_salloc(const void *ptr);
|
size_t huge_salloc(const void *ptr);
|
||||||
prof_ctx_t *huge_prof_ctx_get(const void *ptr);
|
prof_ctx_t *huge_prof_ctx_get(const void *ptr);
|
||||||
|
@ -8,6 +8,7 @@ arena_boot
|
|||||||
arena_dalloc
|
arena_dalloc
|
||||||
arena_dalloc_bin
|
arena_dalloc_bin
|
||||||
arena_dalloc_bin_locked
|
arena_dalloc_bin_locked
|
||||||
|
arena_dalloc_junk_large
|
||||||
arena_dalloc_junk_small
|
arena_dalloc_junk_small
|
||||||
arena_dalloc_large
|
arena_dalloc_large
|
||||||
arena_dalloc_large_locked
|
arena_dalloc_large_locked
|
||||||
@ -52,6 +53,7 @@ arena_ptr_small_binind_get
|
|||||||
arena_purge_all
|
arena_purge_all
|
||||||
arena_quarantine_junk_small
|
arena_quarantine_junk_small
|
||||||
arena_ralloc
|
arena_ralloc
|
||||||
|
arena_ralloc_junk_large
|
||||||
arena_ralloc_no_move
|
arena_ralloc_no_move
|
||||||
arena_redzone_corruption
|
arena_redzone_corruption
|
||||||
arena_run_regind
|
arena_run_regind
|
||||||
@ -194,6 +196,7 @@ hash_x86_32
|
|||||||
huge_allocated
|
huge_allocated
|
||||||
huge_boot
|
huge_boot
|
||||||
huge_dalloc
|
huge_dalloc
|
||||||
|
huge_dalloc_junk
|
||||||
huge_malloc
|
huge_malloc
|
||||||
huge_mtx
|
huge_mtx
|
||||||
huge_ndalloc
|
huge_ndalloc
|
||||||
|
84
src/arena.c
84
src/arena.c
@ -1446,10 +1446,10 @@ arena_redzone_corruption(void *ptr, size_t usize, bool after,
|
|||||||
after ? "after" : "before", ptr, usize, byte);
|
after ? "after" : "before", ptr, usize, byte);
|
||||||
}
|
}
|
||||||
#ifdef JEMALLOC_JET
|
#ifdef JEMALLOC_JET
|
||||||
arena_redzone_corruption_t *arena_redzone_corruption_fptr =
|
|
||||||
arena_redzone_corruption;
|
|
||||||
#undef arena_redzone_corruption
|
#undef arena_redzone_corruption
|
||||||
#define arena_redzone_corruption arena_redzone_corruption_fptr
|
#define arena_redzone_corruption JEMALLOC_N(arena_redzone_corruption)
|
||||||
|
arena_redzone_corruption_t *arena_redzone_corruption =
|
||||||
|
JEMALLOC_N(arena_redzone_corruption_impl);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
static void
|
static void
|
||||||
@ -1482,6 +1482,10 @@ arena_redzones_validate(void *ptr, arena_bin_info_t *bin_info, bool reset)
|
|||||||
abort();
|
abort();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef JEMALLOC_JET
|
||||||
|
#undef arena_dalloc_junk_small
|
||||||
|
#define arena_dalloc_junk_small JEMALLOC_N(arena_dalloc_junk_small_impl)
|
||||||
|
#endif
|
||||||
void
|
void
|
||||||
arena_dalloc_junk_small(void *ptr, arena_bin_info_t *bin_info)
|
arena_dalloc_junk_small(void *ptr, arena_bin_info_t *bin_info)
|
||||||
{
|
{
|
||||||
@ -1491,6 +1495,12 @@ arena_dalloc_junk_small(void *ptr, arena_bin_info_t *bin_info)
|
|||||||
memset((void *)((uintptr_t)ptr - redzone_size), 0x5a,
|
memset((void *)((uintptr_t)ptr - redzone_size), 0x5a,
|
||||||
bin_info->reg_interval);
|
bin_info->reg_interval);
|
||||||
}
|
}
|
||||||
|
#ifdef JEMALLOC_JET
|
||||||
|
#undef arena_dalloc_junk_small
|
||||||
|
#define arena_dalloc_junk_small JEMALLOC_N(arena_dalloc_junk_small)
|
||||||
|
arena_dalloc_junk_small_t *arena_dalloc_junk_small =
|
||||||
|
JEMALLOC_N(arena_dalloc_junk_small_impl);
|
||||||
|
#endif
|
||||||
|
|
||||||
void
|
void
|
||||||
arena_quarantine_junk_small(void *ptr, size_t usize)
|
arena_quarantine_junk_small(void *ptr, size_t usize)
|
||||||
@ -1841,21 +1851,38 @@ arena_dalloc_small(arena_t *arena, arena_chunk_t *chunk, void *ptr,
|
|||||||
arena_dalloc_bin(arena, chunk, ptr, pageind, mapelm);
|
arena_dalloc_bin(arena, chunk, ptr, pageind, mapelm);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef JEMALLOC_JET
|
||||||
|
#undef arena_dalloc_junk_large
|
||||||
|
#define arena_dalloc_junk_large JEMALLOC_N(arena_dalloc_junk_large_impl)
|
||||||
|
#endif
|
||||||
|
static void
|
||||||
|
arena_dalloc_junk_large(void *ptr, size_t usize)
|
||||||
|
{
|
||||||
|
|
||||||
|
if (config_fill && opt_junk)
|
||||||
|
memset(ptr, 0x5a, usize);
|
||||||
|
}
|
||||||
|
#ifdef JEMALLOC_JET
|
||||||
|
#undef arena_dalloc_junk_large
|
||||||
|
#define arena_dalloc_junk_large JEMALLOC_N(arena_dalloc_junk_large)
|
||||||
|
arena_dalloc_junk_large_t *arena_dalloc_junk_large =
|
||||||
|
JEMALLOC_N(arena_dalloc_junk_large_impl);
|
||||||
|
#endif
|
||||||
|
|
||||||
void
|
void
|
||||||
arena_dalloc_large_locked(arena_t *arena, arena_chunk_t *chunk, void *ptr)
|
arena_dalloc_large_locked(arena_t *arena, arena_chunk_t *chunk, void *ptr)
|
||||||
{
|
{
|
||||||
|
|
||||||
if (config_fill || config_stats) {
|
if (config_fill || config_stats) {
|
||||||
size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||||
size_t size = arena_mapbits_large_size_get(chunk, pageind);
|
size_t usize = arena_mapbits_large_size_get(chunk, pageind);
|
||||||
|
|
||||||
if (config_fill && config_stats && opt_junk)
|
arena_dalloc_junk_large(ptr, usize);
|
||||||
memset(ptr, 0x5a, size);
|
|
||||||
if (config_stats) {
|
if (config_stats) {
|
||||||
arena->stats.ndalloc_large++;
|
arena->stats.ndalloc_large++;
|
||||||
arena->stats.allocated_large -= size;
|
arena->stats.allocated_large -= usize;
|
||||||
arena->stats.lstats[(size >> LG_PAGE) - 1].ndalloc++;
|
arena->stats.lstats[(usize >> LG_PAGE) - 1].ndalloc++;
|
||||||
arena->stats.lstats[(size >> LG_PAGE) - 1].curruns--;
|
arena->stats.lstats[(usize >> LG_PAGE) - 1].curruns--;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1967,6 +1994,26 @@ arena_ralloc_large_grow(arena_t *arena, arena_chunk_t *chunk, void *ptr,
|
|||||||
return (true);
|
return (true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef JEMALLOC_JET
|
||||||
|
#undef arena_ralloc_junk_large
|
||||||
|
#define arena_ralloc_junk_large JEMALLOC_N(arena_ralloc_junk_large_impl)
|
||||||
|
#endif
|
||||||
|
static void
|
||||||
|
arena_ralloc_junk_large(void *ptr, size_t old_usize, size_t usize)
|
||||||
|
{
|
||||||
|
|
||||||
|
if (config_fill && opt_junk) {
|
||||||
|
memset((void *)((uintptr_t)ptr + usize), 0x5a,
|
||||||
|
old_usize - usize);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#ifdef JEMALLOC_JET
|
||||||
|
#undef arena_ralloc_junk_large
|
||||||
|
#define arena_ralloc_junk_large JEMALLOC_N(arena_ralloc_junk_large)
|
||||||
|
arena_ralloc_junk_large_t *arena_ralloc_junk_large =
|
||||||
|
JEMALLOC_N(arena_ralloc_junk_large_impl);
|
||||||
|
#endif
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Try to resize a large allocation, in order to avoid copying. This will
|
* Try to resize a large allocation, in order to avoid copying. This will
|
||||||
* always fail if growing an object, and the following run is already in use.
|
* always fail if growing an object, and the following run is already in use.
|
||||||
@ -1990,10 +2037,7 @@ arena_ralloc_large(void *ptr, size_t oldsize, size_t size, size_t extra,
|
|||||||
|
|
||||||
if (psize < oldsize) {
|
if (psize < oldsize) {
|
||||||
/* Fill before shrinking in order avoid a race. */
|
/* Fill before shrinking in order avoid a race. */
|
||||||
if (config_fill && opt_junk) {
|
arena_ralloc_junk_large(ptr, oldsize, psize);
|
||||||
memset((void *)((uintptr_t)ptr + psize), 0x5a,
|
|
||||||
oldsize - psize);
|
|
||||||
}
|
|
||||||
arena_ralloc_large_shrink(arena, chunk, ptr, oldsize,
|
arena_ralloc_large_shrink(arena, chunk, ptr, oldsize,
|
||||||
psize);
|
psize);
|
||||||
return (false);
|
return (false);
|
||||||
@ -2001,10 +2045,16 @@ arena_ralloc_large(void *ptr, size_t oldsize, size_t size, size_t extra,
|
|||||||
bool ret = arena_ralloc_large_grow(arena, chunk, ptr,
|
bool ret = arena_ralloc_large_grow(arena, chunk, ptr,
|
||||||
oldsize, PAGE_CEILING(size),
|
oldsize, PAGE_CEILING(size),
|
||||||
psize - PAGE_CEILING(size), zero);
|
psize - PAGE_CEILING(size), zero);
|
||||||
if (config_fill && ret == false && zero == false &&
|
if (config_fill && ret == false && zero == false) {
|
||||||
opt_zero) {
|
if (opt_junk) {
|
||||||
memset((void *)((uintptr_t)ptr + oldsize), 0,
|
memset((void *)((uintptr_t)ptr +
|
||||||
size - oldsize);
|
oldsize), 0xa5, isalloc(ptr,
|
||||||
|
config_prof) - oldsize);
|
||||||
|
} else if (opt_zero) {
|
||||||
|
memset((void *)((uintptr_t)ptr +
|
||||||
|
oldsize), 0, isalloc(ptr,
|
||||||
|
config_prof) - oldsize);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
27
src/huge.c
27
src/huge.c
@ -182,6 +182,29 @@ huge_ralloc(void *ptr, size_t oldsize, size_t size, size_t extra,
|
|||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef JEMALLOC_JET
|
||||||
|
#undef huge_dalloc_junk
|
||||||
|
#define huge_dalloc_junk JEMALLOC_N(huge_dalloc_junk_impl)
|
||||||
|
#endif
|
||||||
|
static void
|
||||||
|
huge_dalloc_junk(void *ptr, size_t usize)
|
||||||
|
{
|
||||||
|
|
||||||
|
if (config_fill && config_dss && opt_junk) {
|
||||||
|
/*
|
||||||
|
* Only bother junk filling if the chunk isn't about to be
|
||||||
|
* unmapped.
|
||||||
|
*/
|
||||||
|
if (config_munmap == false || (config_dss && chunk_in_dss(ptr)))
|
||||||
|
memset(ptr, 0x5a, usize);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#ifdef JEMALLOC_JET
|
||||||
|
#undef huge_dalloc_junk
|
||||||
|
#define huge_dalloc_junk JEMALLOC_N(huge_dalloc_junk)
|
||||||
|
huge_dalloc_junk_t *huge_dalloc_junk = JEMALLOC_N(huge_dalloc_junk_impl);
|
||||||
|
#endif
|
||||||
|
|
||||||
void
|
void
|
||||||
huge_dalloc(void *ptr, bool unmap)
|
huge_dalloc(void *ptr, bool unmap)
|
||||||
{
|
{
|
||||||
@ -204,8 +227,8 @@ huge_dalloc(void *ptr, bool unmap)
|
|||||||
|
|
||||||
malloc_mutex_unlock(&huge_mtx);
|
malloc_mutex_unlock(&huge_mtx);
|
||||||
|
|
||||||
if (unmap && config_fill && config_dss && opt_junk)
|
if (unmap)
|
||||||
memset(node->addr, 0x5a, node->size);
|
huge_dalloc_junk(node->addr, node->size);
|
||||||
|
|
||||||
chunk_dealloc(node->addr, node->size, unmap);
|
chunk_dealloc(node->addr, node->size, unmap);
|
||||||
|
|
||||||
|
219
test/unit/junk.c
Normal file
219
test/unit/junk.c
Normal file
@ -0,0 +1,219 @@
|
|||||||
|
#include "test/jemalloc_test.h"
|
||||||
|
|
||||||
|
#ifdef JEMALLOC_FILL
|
||||||
|
const char *malloc_conf =
|
||||||
|
"abort:false,junk:true,zero:false,redzone:true,quarantine:0";
|
||||||
|
#endif
|
||||||
|
|
||||||
|
static arena_dalloc_junk_small_t *arena_dalloc_junk_small_orig;
|
||||||
|
static arena_dalloc_junk_large_t *arena_dalloc_junk_large_orig;
|
||||||
|
static huge_dalloc_junk_t *huge_dalloc_junk_orig;
|
||||||
|
static void *most_recently_junked;
|
||||||
|
|
||||||
|
static void
|
||||||
|
arena_dalloc_junk_small_intercept(void *ptr, arena_bin_info_t *bin_info)
|
||||||
|
{
|
||||||
|
size_t i;
|
||||||
|
|
||||||
|
arena_dalloc_junk_small_orig(ptr, bin_info);
|
||||||
|
for (i = 0; i < bin_info->reg_size; i++) {
|
||||||
|
assert_c_eq(((char *)ptr)[i], 0x5a,
|
||||||
|
"Missing junk fill for byte %zu/%zu of deallocated region",
|
||||||
|
i, bin_info->reg_size);
|
||||||
|
}
|
||||||
|
most_recently_junked = ptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void
|
||||||
|
arena_dalloc_junk_large_intercept(void *ptr, size_t usize)
|
||||||
|
{
|
||||||
|
size_t i;
|
||||||
|
|
||||||
|
arena_dalloc_junk_large_orig(ptr, usize);
|
||||||
|
for (i = 0; i < usize; i++) {
|
||||||
|
assert_c_eq(((char *)ptr)[i], 0x5a,
|
||||||
|
"Missing junk fill for byte %zu/%zu of deallocated region",
|
||||||
|
i, usize);
|
||||||
|
}
|
||||||
|
most_recently_junked = ptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void
|
||||||
|
huge_dalloc_junk_intercept(void *ptr, size_t usize)
|
||||||
|
{
|
||||||
|
|
||||||
|
huge_dalloc_junk_orig(ptr, usize);
|
||||||
|
/*
|
||||||
|
* The conditions under which junk filling actually occurs are nuanced
|
||||||
|
* enough that it doesn't make sense to duplicate the decision logic in
|
||||||
|
* test code, so don't actually check that the region is junk-filled.
|
||||||
|
*/
|
||||||
|
most_recently_junked = ptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void
|
||||||
|
test_junk(size_t sz_min, size_t sz_max)
|
||||||
|
{
|
||||||
|
char *s;
|
||||||
|
size_t sz_prev, sz, i;
|
||||||
|
|
||||||
|
arena_dalloc_junk_small_orig = arena_dalloc_junk_small;
|
||||||
|
arena_dalloc_junk_small = arena_dalloc_junk_small_intercept;
|
||||||
|
arena_dalloc_junk_large_orig = arena_dalloc_junk_large;
|
||||||
|
arena_dalloc_junk_large = arena_dalloc_junk_large_intercept;
|
||||||
|
huge_dalloc_junk_orig = huge_dalloc_junk;
|
||||||
|
huge_dalloc_junk = huge_dalloc_junk_intercept;
|
||||||
|
|
||||||
|
sz_prev = 0;
|
||||||
|
s = (char *)mallocx(sz_min, 0);
|
||||||
|
assert_ptr_not_null((void *)s, "Unexpected mallocx() failure");
|
||||||
|
|
||||||
|
for (sz = sallocx(s, 0); sz <= sz_max;
|
||||||
|
sz_prev = sz, sz = sallocx(s, 0)) {
|
||||||
|
if (sz_prev > 0) {
|
||||||
|
assert_c_eq(s[0], 'a',
|
||||||
|
"Previously allocated byte %zu/%zu is corrupted",
|
||||||
|
0, sz_prev);
|
||||||
|
assert_c_eq(s[sz_prev-1], 'a',
|
||||||
|
"Previously allocated byte %zu/%zu is corrupted",
|
||||||
|
sz_prev-1, sz_prev);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i = sz_prev; i < sz; i++) {
|
||||||
|
assert_c_eq(s[i], 0xa5,
|
||||||
|
"Newly allocated byte %zu/%zu isn't junk-filled",
|
||||||
|
i, sz);
|
||||||
|
s[i] = 'a';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (xallocx(s, sz+1, 0, 0) == sz) {
|
||||||
|
void *junked = (void *)s;
|
||||||
|
|
||||||
|
s = (char *)rallocx(s, sz+1, 0);
|
||||||
|
assert_ptr_not_null((void *)s,
|
||||||
|
"Unexpected rallocx() failure");
|
||||||
|
assert_ptr_eq(most_recently_junked, junked,
|
||||||
|
"Expected region of size %zu to be junk-filled",
|
||||||
|
sz);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dallocx(s, 0);
|
||||||
|
assert_ptr_eq(most_recently_junked, (void *)s,
|
||||||
|
"Expected region of size %zu to be junk-filled", sz);
|
||||||
|
|
||||||
|
arena_dalloc_junk_small = arena_dalloc_junk_small_orig;
|
||||||
|
arena_dalloc_junk_large = arena_dalloc_junk_large_orig;
|
||||||
|
huge_dalloc_junk = huge_dalloc_junk_orig;
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_BEGIN(test_junk_small)
|
||||||
|
{
|
||||||
|
|
||||||
|
test_skip_if(!config_fill);
|
||||||
|
test_junk(1, SMALL_MAXCLASS-1);
|
||||||
|
}
|
||||||
|
TEST_END
|
||||||
|
|
||||||
|
TEST_BEGIN(test_junk_large)
|
||||||
|
{
|
||||||
|
|
||||||
|
test_skip_if(!config_fill);
|
||||||
|
test_junk(SMALL_MAXCLASS+1, arena_maxclass);
|
||||||
|
}
|
||||||
|
TEST_END
|
||||||
|
|
||||||
|
TEST_BEGIN(test_junk_huge)
|
||||||
|
{
|
||||||
|
|
||||||
|
test_skip_if(!config_fill);
|
||||||
|
test_junk(arena_maxclass+1, chunksize*2);
|
||||||
|
}
|
||||||
|
TEST_END
|
||||||
|
|
||||||
|
arena_ralloc_junk_large_t *arena_ralloc_junk_large_orig;
|
||||||
|
static void *most_recently_trimmed;
|
||||||
|
|
||||||
|
static void
|
||||||
|
arena_ralloc_junk_large_intercept(void *ptr, size_t old_usize, size_t usize)
|
||||||
|
{
|
||||||
|
|
||||||
|
arena_ralloc_junk_large_orig(ptr, old_usize, usize);
|
||||||
|
assert_zu_eq(old_usize, arena_maxclass, "Unexpected old_usize");
|
||||||
|
assert_zu_eq(usize, arena_maxclass-PAGE, "Unexpected usize");
|
||||||
|
most_recently_trimmed = ptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_BEGIN(test_junk_large_ralloc_shrink)
|
||||||
|
{
|
||||||
|
void *p1, *p2;
|
||||||
|
|
||||||
|
p1 = mallocx(arena_maxclass, 0);
|
||||||
|
assert_ptr_not_null(p1, "Unexpected mallocx() failure");
|
||||||
|
|
||||||
|
arena_ralloc_junk_large_orig = arena_ralloc_junk_large;
|
||||||
|
arena_ralloc_junk_large = arena_ralloc_junk_large_intercept;
|
||||||
|
|
||||||
|
p2 = rallocx(p1, arena_maxclass-PAGE, 0);
|
||||||
|
assert_ptr_eq(p1, p2, "Unexpected move during shrink");
|
||||||
|
|
||||||
|
arena_ralloc_junk_large = arena_ralloc_junk_large_orig;
|
||||||
|
|
||||||
|
assert_ptr_eq(most_recently_trimmed, p1,
|
||||||
|
"Expected trimmed portion of region to be junk-filled");
|
||||||
|
}
|
||||||
|
TEST_END
|
||||||
|
|
||||||
|
static bool detected_redzone_corruption;
|
||||||
|
|
||||||
|
static void
|
||||||
|
arena_redzone_corruption_replacement(void *ptr, size_t usize, bool after,
|
||||||
|
size_t offset, uint8_t byte)
|
||||||
|
{
|
||||||
|
|
||||||
|
detected_redzone_corruption = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_BEGIN(test_junk_redzone)
|
||||||
|
{
|
||||||
|
char *s;
|
||||||
|
arena_redzone_corruption_t *arena_redzone_corruption_orig;
|
||||||
|
|
||||||
|
test_skip_if(!config_fill);
|
||||||
|
|
||||||
|
arena_redzone_corruption_orig = arena_redzone_corruption;
|
||||||
|
arena_redzone_corruption = arena_redzone_corruption_replacement;
|
||||||
|
|
||||||
|
/* Test underflow. */
|
||||||
|
detected_redzone_corruption = false;
|
||||||
|
s = (char *)mallocx(1, 0);
|
||||||
|
assert_ptr_not_null((void *)s, "Unexpected mallocx() failure");
|
||||||
|
s[-1] = 0xbb;
|
||||||
|
dallocx(s, 0);
|
||||||
|
assert_true(detected_redzone_corruption,
|
||||||
|
"Did not detect redzone corruption");
|
||||||
|
|
||||||
|
/* Test overflow. */
|
||||||
|
detected_redzone_corruption = false;
|
||||||
|
s = (char *)mallocx(1, 0);
|
||||||
|
assert_ptr_not_null((void *)s, "Unexpected mallocx() failure");
|
||||||
|
s[sallocx(s, 0)] = 0xbb;
|
||||||
|
dallocx(s, 0);
|
||||||
|
assert_true(detected_redzone_corruption,
|
||||||
|
"Did not detect redzone corruption");
|
||||||
|
|
||||||
|
arena_redzone_corruption = arena_redzone_corruption_orig;
|
||||||
|
}
|
||||||
|
TEST_END
|
||||||
|
|
||||||
|
int
|
||||||
|
main(void)
|
||||||
|
{
|
||||||
|
|
||||||
|
return (test(
|
||||||
|
test_junk_small,
|
||||||
|
test_junk_large,
|
||||||
|
test_junk_huge,
|
||||||
|
test_junk_large_ralloc_shrink,
|
||||||
|
test_junk_redzone));
|
||||||
|
}
|
@ -73,8 +73,8 @@ TEST_BEGIN(test_quarantine_redzone)
|
|||||||
|
|
||||||
test_skip_if(!config_fill);
|
test_skip_if(!config_fill);
|
||||||
|
|
||||||
arena_redzone_corruption_orig = arena_redzone_corruption_fptr;
|
arena_redzone_corruption_orig = arena_redzone_corruption;
|
||||||
arena_redzone_corruption_fptr = arena_redzone_corruption_replacement;
|
arena_redzone_corruption = arena_redzone_corruption_replacement;
|
||||||
|
|
||||||
/* Test underflow. */
|
/* Test underflow. */
|
||||||
detected_redzone_corruption = false;
|
detected_redzone_corruption = false;
|
||||||
@ -94,7 +94,7 @@ TEST_BEGIN(test_quarantine_redzone)
|
|||||||
assert_true(detected_redzone_corruption,
|
assert_true(detected_redzone_corruption,
|
||||||
"Did not detect redzone corruption");
|
"Did not detect redzone corruption");
|
||||||
|
|
||||||
arena_redzone_corruption_fptr = arena_redzone_corruption_orig;
|
arena_redzone_corruption = arena_redzone_corruption_orig;
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
|
78
test/unit/zero.c
Normal file
78
test/unit/zero.c
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
#include "test/jemalloc_test.h"
|
||||||
|
|
||||||
|
#ifdef JEMALLOC_FILL
|
||||||
|
const char *malloc_conf =
|
||||||
|
"abort:false,junk:false,zero:true,redzone:false,quarantine:0";
|
||||||
|
#endif
|
||||||
|
|
||||||
|
static void
|
||||||
|
test_zero(size_t sz_min, size_t sz_max)
|
||||||
|
{
|
||||||
|
char *s;
|
||||||
|
size_t sz_prev, sz, i;
|
||||||
|
|
||||||
|
sz_prev = 0;
|
||||||
|
s = (char *)mallocx(sz_min, 0);
|
||||||
|
assert_ptr_not_null((void *)s, "Unexpected mallocx() failure");
|
||||||
|
|
||||||
|
for (sz = sallocx(s, 0); sz <= sz_max;
|
||||||
|
sz_prev = sz, sz = sallocx(s, 0)) {
|
||||||
|
if (sz_prev > 0) {
|
||||||
|
assert_c_eq(s[0], 'a',
|
||||||
|
"Previously allocated byte %zu/%zu is corrupted",
|
||||||
|
0, sz_prev);
|
||||||
|
assert_c_eq(s[sz_prev-1], 'a',
|
||||||
|
"Previously allocated byte %zu/%zu is corrupted",
|
||||||
|
sz_prev-1, sz_prev);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i = sz_prev; i < sz; i++) {
|
||||||
|
assert_c_eq(s[i], 0x0,
|
||||||
|
"Newly allocated byte %zu/%zu isn't zero-filled",
|
||||||
|
i, sz);
|
||||||
|
s[i] = 'a';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (xallocx(s, sz+1, 0, 0) == sz) {
|
||||||
|
s = (char *)rallocx(s, sz+1, 0);
|
||||||
|
assert_ptr_not_null((void *)s,
|
||||||
|
"Unexpected rallocx() failure");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dallocx(s, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_BEGIN(test_zero_small)
|
||||||
|
{
|
||||||
|
|
||||||
|
test_skip_if(!config_fill);
|
||||||
|
test_zero(1, SMALL_MAXCLASS-1);
|
||||||
|
}
|
||||||
|
TEST_END
|
||||||
|
|
||||||
|
TEST_BEGIN(test_zero_large)
|
||||||
|
{
|
||||||
|
|
||||||
|
test_skip_if(!config_fill);
|
||||||
|
test_zero(SMALL_MAXCLASS+1, arena_maxclass);
|
||||||
|
}
|
||||||
|
TEST_END
|
||||||
|
|
||||||
|
TEST_BEGIN(test_zero_huge)
|
||||||
|
{
|
||||||
|
|
||||||
|
test_skip_if(!config_fill);
|
||||||
|
test_zero(arena_maxclass+1, chunksize*2);
|
||||||
|
}
|
||||||
|
TEST_END
|
||||||
|
|
||||||
|
int
|
||||||
|
main(void)
|
||||||
|
{
|
||||||
|
|
||||||
|
return (test(
|
||||||
|
test_zero_small,
|
||||||
|
test_zero_large,
|
||||||
|
test_zero_huge));
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user