Tighten valgrind integration.
Tighten valgrind integration such that immediately after memory is validated or zeroed, valgrind is told to forget the memory's 'defined' state. The only place newly allocated memory should be left marked as 'defined' is in the public functions (e.g. calloc() and realloc()).
This commit is contained in:
parent
14a2c6a698
commit
38067483c5
@ -320,6 +320,7 @@ tcache_alloc_small(tcache_t *tcache, size_t size, bool zero)
|
||||
}
|
||||
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
||||
memset(ret, 0, size);
|
||||
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
||||
}
|
||||
|
||||
if (config_stats)
|
||||
@ -370,6 +371,7 @@ tcache_alloc_large(tcache_t *tcache, size_t size, bool zero)
|
||||
} else {
|
||||
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
||||
memset(ret, 0, size);
|
||||
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
||||
}
|
||||
|
||||
if (config_stats)
|
||||
|
50
src/arena.c
50
src/arena.c
@ -359,13 +359,29 @@ arena_run_reg_dalloc(arena_run_t *run, void *ptr)
|
||||
}
|
||||
|
||||
static inline void
|
||||
arena_chunk_validate_zeroed(arena_chunk_t *chunk, size_t run_ind)
|
||||
arena_run_zero(arena_chunk_t *chunk, size_t run_ind, size_t npages)
|
||||
{
|
||||
|
||||
VALGRIND_MAKE_MEM_UNDEFINED((void *)((uintptr_t)chunk + (run_ind <<
|
||||
LG_PAGE)), (npages << LG_PAGE));
|
||||
memset((void *)((uintptr_t)chunk + (run_ind << LG_PAGE)), 0,
|
||||
(npages << LG_PAGE));
|
||||
VALGRIND_MAKE_MEM_UNDEFINED((void *)((uintptr_t)chunk + (run_ind <<
|
||||
LG_PAGE)), (npages << LG_PAGE));
|
||||
}
|
||||
|
||||
static inline void
|
||||
arena_run_page_validate_zeroed(arena_chunk_t *chunk, size_t run_ind)
|
||||
{
|
||||
size_t i;
|
||||
UNUSED size_t *p = (size_t *)((uintptr_t)chunk + (run_ind << LG_PAGE));
|
||||
|
||||
VALGRIND_MAKE_MEM_DEFINED((void *)((uintptr_t)chunk + (run_ind <<
|
||||
LG_PAGE)), PAGE);
|
||||
for (i = 0; i < PAGE / sizeof(size_t); i++)
|
||||
assert(p[i] == 0);
|
||||
VALGRIND_MAKE_MEM_UNDEFINED((void *)((uintptr_t)chunk + (run_ind <<
|
||||
LG_PAGE)), PAGE);
|
||||
}
|
||||
|
||||
static void
|
||||
@ -441,19 +457,10 @@ arena_run_split(arena_t *arena, arena_run_t *run, size_t size, bool large,
|
||||
for (i = 0; i < need_pages; i++) {
|
||||
if (arena_mapbits_unzeroed_get(chunk,
|
||||
run_ind+i) != 0) {
|
||||
VALGRIND_MAKE_MEM_UNDEFINED(
|
||||
(void *)((uintptr_t)
|
||||
chunk + ((run_ind+i) <<
|
||||
LG_PAGE)), PAGE);
|
||||
memset((void *)((uintptr_t)
|
||||
chunk + ((run_ind+i) <<
|
||||
LG_PAGE)), 0, PAGE);
|
||||
arena_run_zero(chunk, run_ind+i,
|
||||
1);
|
||||
} else if (config_debug) {
|
||||
VALGRIND_MAKE_MEM_DEFINED(
|
||||
(void *)((uintptr_t)
|
||||
chunk + ((run_ind+i) <<
|
||||
LG_PAGE)), PAGE);
|
||||
arena_chunk_validate_zeroed(
|
||||
arena_run_page_validate_zeroed(
|
||||
chunk, run_ind+i);
|
||||
}
|
||||
}
|
||||
@ -462,11 +469,7 @@ arena_run_split(arena_t *arena, arena_run_t *run, size_t size, bool large,
|
||||
* The run is dirty, so all pages must be
|
||||
* zeroed.
|
||||
*/
|
||||
VALGRIND_MAKE_MEM_UNDEFINED((void
|
||||
*)((uintptr_t)chunk + (run_ind <<
|
||||
LG_PAGE)), (need_pages << LG_PAGE));
|
||||
memset((void *)((uintptr_t)chunk + (run_ind <<
|
||||
LG_PAGE)), 0, (need_pages << LG_PAGE));
|
||||
arena_run_zero(chunk, run_ind, need_pages);
|
||||
}
|
||||
}
|
||||
|
||||
@ -492,19 +495,21 @@ arena_run_split(arena_t *arena, arena_run_t *run, size_t size, bool large,
|
||||
*/
|
||||
if (config_debug && flag_dirty == 0 &&
|
||||
arena_mapbits_unzeroed_get(chunk, run_ind) == 0)
|
||||
arena_chunk_validate_zeroed(chunk, run_ind);
|
||||
arena_run_page_validate_zeroed(chunk, run_ind);
|
||||
for (i = 1; i < need_pages - 1; i++) {
|
||||
arena_mapbits_small_set(chunk, run_ind+i, i, binind, 0);
|
||||
if (config_debug && flag_dirty == 0 &&
|
||||
arena_mapbits_unzeroed_get(chunk, run_ind+i) == 0)
|
||||
arena_chunk_validate_zeroed(chunk, run_ind+i);
|
||||
arena_mapbits_unzeroed_get(chunk, run_ind+i) == 0) {
|
||||
arena_run_page_validate_zeroed(chunk,
|
||||
run_ind+i);
|
||||
}
|
||||
}
|
||||
arena_mapbits_small_set(chunk, run_ind+need_pages-1,
|
||||
need_pages-1, binind, flag_dirty);
|
||||
if (config_debug && flag_dirty == 0 &&
|
||||
arena_mapbits_unzeroed_get(chunk, run_ind+need_pages-1) ==
|
||||
0) {
|
||||
arena_chunk_validate_zeroed(chunk,
|
||||
arena_run_page_validate_zeroed(chunk,
|
||||
run_ind+need_pages-1);
|
||||
}
|
||||
}
|
||||
@ -1459,6 +1464,7 @@ arena_malloc_small(arena_t *arena, size_t size, bool zero)
|
||||
}
|
||||
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
||||
memset(ret, 0, size);
|
||||
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
||||
}
|
||||
|
||||
return (ret);
|
||||
|
@ -127,6 +127,7 @@ chunk_alloc_dss(size_t size, size_t alignment, bool *zero)
|
||||
if (*zero) {
|
||||
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
||||
memset(ret, 0, size);
|
||||
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
||||
}
|
||||
return (ret);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user