Fix Valgrind integration.
Fix Valgrind integration to annotate all internally allocated memory in a way that keeps Valgrind happy about internal data structure access.
This commit is contained in:
parent
a7a28c334e
commit
06912756cc
@ -25,6 +25,8 @@ found in the git revision history:
|
|||||||
array.
|
array.
|
||||||
+ Reallocation failure for internal reallocation of the quarantined object
|
+ Reallocation failure for internal reallocation of the quarantined object
|
||||||
array (very unlikely) resulted in memory corruption.
|
array (very unlikely) resulted in memory corruption.
|
||||||
|
- Fix Valgrind integration to annotate all internally allocated memory in a
|
||||||
|
way that keeps Valgrind happy about internal data structure access.
|
||||||
|
|
||||||
* 3.3.0 (January 23, 2013)
|
* 3.3.0 (January 23, 2013)
|
||||||
|
|
||||||
|
@ -443,6 +443,7 @@ static const bool config_ivsalloc =
|
|||||||
#define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed)
|
#define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed)
|
||||||
#define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB)
|
#define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB)
|
||||||
#define VALGRIND_FREELIKE_BLOCK(addr, rzB)
|
#define VALGRIND_FREELIKE_BLOCK(addr, rzB)
|
||||||
|
#define VALGRIND_MAKE_MEM_NOACCESS(_qzz_addr, _qzz_len)
|
||||||
#define VALGRIND_MAKE_MEM_UNDEFINED(_qzz_addr, _qzz_len)
|
#define VALGRIND_MAKE_MEM_UNDEFINED(_qzz_addr, _qzz_len)
|
||||||
#define VALGRIND_MAKE_MEM_DEFINED(_qzz_addr, _qzz_len)
|
#define VALGRIND_MAKE_MEM_DEFINED(_qzz_addr, _qzz_len)
|
||||||
#define JEMALLOC_VALGRIND_MALLOC(cond, ptr, usize, zero)
|
#define JEMALLOC_VALGRIND_MALLOC(cond, ptr, usize, zero)
|
||||||
|
@ -320,8 +320,8 @@ tcache_alloc_small(tcache_t *tcache, size_t size, bool zero)
|
|||||||
}
|
}
|
||||||
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
||||||
memset(ret, 0, size);
|
memset(ret, 0, size);
|
||||||
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
|
||||||
}
|
}
|
||||||
|
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
||||||
|
|
||||||
if (config_stats)
|
if (config_stats)
|
||||||
tbin->tstats.nrequests++;
|
tbin->tstats.nrequests++;
|
||||||
@ -371,8 +371,8 @@ tcache_alloc_large(tcache_t *tcache, size_t size, bool zero)
|
|||||||
} else {
|
} else {
|
||||||
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
||||||
memset(ret, 0, size);
|
memset(ret, 0, size);
|
||||||
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
|
||||||
}
|
}
|
||||||
|
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
||||||
|
|
||||||
if (config_stats)
|
if (config_stats)
|
||||||
tbin->tstats.nrequests++;
|
tbin->tstats.nrequests++;
|
||||||
|
15
src/arena.c
15
src/arena.c
@ -366,8 +366,6 @@ arena_run_zero(arena_chunk_t *chunk, size_t run_ind, size_t npages)
|
|||||||
LG_PAGE)), (npages << LG_PAGE));
|
LG_PAGE)), (npages << LG_PAGE));
|
||||||
memset((void *)((uintptr_t)chunk + (run_ind << LG_PAGE)), 0,
|
memset((void *)((uintptr_t)chunk + (run_ind << LG_PAGE)), 0,
|
||||||
(npages << LG_PAGE));
|
(npages << LG_PAGE));
|
||||||
VALGRIND_MAKE_MEM_UNDEFINED((void *)((uintptr_t)chunk + (run_ind <<
|
|
||||||
LG_PAGE)), (npages << LG_PAGE));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void
|
static inline void
|
||||||
@ -380,8 +378,6 @@ arena_run_page_validate_zeroed(arena_chunk_t *chunk, size_t run_ind)
|
|||||||
LG_PAGE)), PAGE);
|
LG_PAGE)), PAGE);
|
||||||
for (i = 0; i < PAGE / sizeof(size_t); i++)
|
for (i = 0; i < PAGE / sizeof(size_t); i++)
|
||||||
assert(p[i] == 0);
|
assert(p[i] == 0);
|
||||||
VALGRIND_MAKE_MEM_UNDEFINED((void *)((uintptr_t)chunk + (run_ind <<
|
|
||||||
LG_PAGE)), PAGE);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
@ -513,6 +509,8 @@ arena_run_split(arena_t *arena, arena_run_t *run, size_t size, bool large,
|
|||||||
run_ind+need_pages-1);
|
run_ind+need_pages-1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
VALGRIND_MAKE_MEM_UNDEFINED((void *)((uintptr_t)chunk + (run_ind <<
|
||||||
|
LG_PAGE)), (need_pages << LG_PAGE));
|
||||||
}
|
}
|
||||||
|
|
||||||
static arena_chunk_t *
|
static arena_chunk_t *
|
||||||
@ -574,6 +572,11 @@ arena_chunk_alloc(arena_t *arena)
|
|||||||
for (i = map_bias+1; i < chunk_npages-1; i++)
|
for (i = map_bias+1; i < chunk_npages-1; i++)
|
||||||
arena_mapbits_unzeroed_set(chunk, i, unzeroed);
|
arena_mapbits_unzeroed_set(chunk, i, unzeroed);
|
||||||
} else if (config_debug) {
|
} else if (config_debug) {
|
||||||
|
VALGRIND_MAKE_MEM_DEFINED(
|
||||||
|
(void *)arena_mapp_get(chunk, map_bias+1),
|
||||||
|
(void *)((uintptr_t)
|
||||||
|
arena_mapp_get(chunk, chunk_npages-1)
|
||||||
|
- (uintptr_t)arena_mapp_get(chunk, map_bias+1)));
|
||||||
for (i = map_bias+1; i < chunk_npages-1; i++) {
|
for (i = map_bias+1; i < chunk_npages-1; i++) {
|
||||||
assert(arena_mapbits_unzeroed_get(chunk, i) ==
|
assert(arena_mapbits_unzeroed_get(chunk, i) ==
|
||||||
unzeroed);
|
unzeroed);
|
||||||
@ -1246,8 +1249,6 @@ arena_bin_nonfull_run_get(arena_t *arena, arena_bin_t *bin)
|
|||||||
(uintptr_t)bin_info->bitmap_offset);
|
(uintptr_t)bin_info->bitmap_offset);
|
||||||
|
|
||||||
/* Initialize run internals. */
|
/* Initialize run internals. */
|
||||||
VALGRIND_MAKE_MEM_UNDEFINED(run, bin_info->reg0_offset -
|
|
||||||
bin_info->redzone_size);
|
|
||||||
run->bin = bin;
|
run->bin = bin;
|
||||||
run->nextind = 0;
|
run->nextind = 0;
|
||||||
run->nfree = bin_info->nregs;
|
run->nfree = bin_info->nregs;
|
||||||
@ -1464,8 +1465,8 @@ arena_malloc_small(arena_t *arena, size_t size, bool zero)
|
|||||||
}
|
}
|
||||||
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
||||||
memset(ret, 0, size);
|
memset(ret, 0, size);
|
||||||
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
|
||||||
}
|
}
|
||||||
|
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
||||||
|
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
@ -63,6 +63,7 @@ base_alloc(size_t size)
|
|||||||
ret = base_next_addr;
|
ret = base_next_addr;
|
||||||
base_next_addr = (void *)((uintptr_t)base_next_addr + csize);
|
base_next_addr = (void *)((uintptr_t)base_next_addr + csize);
|
||||||
malloc_mutex_unlock(&base_mtx);
|
malloc_mutex_unlock(&base_mtx);
|
||||||
|
VALGRIND_MAKE_MEM_UNDEFINED(ret, csize);
|
||||||
|
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
@ -88,6 +89,7 @@ base_node_alloc(void)
|
|||||||
ret = base_nodes;
|
ret = base_nodes;
|
||||||
base_nodes = *(extent_node_t **)ret;
|
base_nodes = *(extent_node_t **)ret;
|
||||||
malloc_mutex_unlock(&base_mtx);
|
malloc_mutex_unlock(&base_mtx);
|
||||||
|
VALGRIND_MAKE_MEM_UNDEFINED(ret, sizeof(extent_node_t));
|
||||||
} else {
|
} else {
|
||||||
malloc_mutex_unlock(&base_mtx);
|
malloc_mutex_unlock(&base_mtx);
|
||||||
ret = (extent_node_t *)base_alloc(sizeof(extent_node_t));
|
ret = (extent_node_t *)base_alloc(sizeof(extent_node_t));
|
||||||
@ -100,6 +102,7 @@ void
|
|||||||
base_node_dealloc(extent_node_t *node)
|
base_node_dealloc(extent_node_t *node)
|
||||||
{
|
{
|
||||||
|
|
||||||
|
VALGRIND_MAKE_MEM_UNDEFINED(node, sizeof(extent_node_t));
|
||||||
malloc_mutex_lock(&base_mtx);
|
malloc_mutex_lock(&base_mtx);
|
||||||
*(extent_node_t **)node = base_nodes;
|
*(extent_node_t **)node = base_nodes;
|
||||||
base_nodes = node;
|
base_nodes = node;
|
||||||
|
48
src/chunk.c
48
src/chunk.c
@ -120,7 +120,6 @@ chunk_recycle(extent_tree_t *chunks_szad, extent_tree_t *chunks_ad, size_t size,
|
|||||||
|
|
||||||
if (node != NULL)
|
if (node != NULL)
|
||||||
base_node_dealloc(node);
|
base_node_dealloc(node);
|
||||||
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
|
||||||
if (*zero) {
|
if (*zero) {
|
||||||
if (zeroed == false)
|
if (zeroed == false)
|
||||||
memset(ret, 0, size);
|
memset(ret, 0, size);
|
||||||
@ -131,7 +130,6 @@ chunk_recycle(extent_tree_t *chunks_szad, extent_tree_t *chunks_ad, size_t size,
|
|||||||
VALGRIND_MAKE_MEM_DEFINED(ret, size);
|
VALGRIND_MAKE_MEM_DEFINED(ret, size);
|
||||||
for (i = 0; i < size / sizeof(size_t); i++)
|
for (i = 0; i < size / sizeof(size_t); i++)
|
||||||
assert(p[i] == 0);
|
assert(p[i] == 0);
|
||||||
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return (ret);
|
return (ret);
|
||||||
@ -180,27 +178,32 @@ chunk_alloc(size_t size, size_t alignment, bool base, bool *zero,
|
|||||||
/* All strategies for allocation failed. */
|
/* All strategies for allocation failed. */
|
||||||
ret = NULL;
|
ret = NULL;
|
||||||
label_return:
|
label_return:
|
||||||
if (config_ivsalloc && base == false && ret != NULL) {
|
if (ret != NULL) {
|
||||||
if (rtree_set(chunks_rtree, (uintptr_t)ret, ret)) {
|
if (config_ivsalloc && base == false) {
|
||||||
chunk_dealloc(ret, size, true);
|
if (rtree_set(chunks_rtree, (uintptr_t)ret, ret)) {
|
||||||
return (NULL);
|
chunk_dealloc(ret, size, true);
|
||||||
|
return (NULL);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
if (config_stats || config_prof) {
|
||||||
if ((config_stats || config_prof) && ret != NULL) {
|
bool gdump;
|
||||||
bool gdump;
|
malloc_mutex_lock(&chunks_mtx);
|
||||||
malloc_mutex_lock(&chunks_mtx);
|
if (config_stats)
|
||||||
if (config_stats)
|
stats_chunks.nchunks += (size / chunksize);
|
||||||
stats_chunks.nchunks += (size / chunksize);
|
stats_chunks.curchunks += (size / chunksize);
|
||||||
stats_chunks.curchunks += (size / chunksize);
|
if (stats_chunks.curchunks > stats_chunks.highchunks) {
|
||||||
if (stats_chunks.curchunks > stats_chunks.highchunks) {
|
stats_chunks.highchunks =
|
||||||
stats_chunks.highchunks = stats_chunks.curchunks;
|
stats_chunks.curchunks;
|
||||||
if (config_prof)
|
if (config_prof)
|
||||||
gdump = true;
|
gdump = true;
|
||||||
} else if (config_prof)
|
} else if (config_prof)
|
||||||
gdump = false;
|
gdump = false;
|
||||||
malloc_mutex_unlock(&chunks_mtx);
|
malloc_mutex_unlock(&chunks_mtx);
|
||||||
if (config_prof && opt_prof && opt_prof_gdump && gdump)
|
if (config_prof && opt_prof && opt_prof_gdump && gdump)
|
||||||
prof_gdump();
|
prof_gdump();
|
||||||
|
}
|
||||||
|
if (config_valgrind)
|
||||||
|
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
||||||
}
|
}
|
||||||
assert(CHUNK_ADDR2BASE(ret) == ret);
|
assert(CHUNK_ADDR2BASE(ret) == ret);
|
||||||
return (ret);
|
return (ret);
|
||||||
@ -214,6 +217,7 @@ chunk_record(extent_tree_t *chunks_szad, extent_tree_t *chunks_ad, void *chunk,
|
|||||||
extent_node_t *xnode, *node, *prev, key;
|
extent_node_t *xnode, *node, *prev, key;
|
||||||
|
|
||||||
unzeroed = pages_purge(chunk, size);
|
unzeroed = pages_purge(chunk, size);
|
||||||
|
VALGRIND_MAKE_MEM_NOACCESS(chunk, size);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Allocate a node before acquiring chunks_mtx even though it might not
|
* Allocate a node before acquiring chunks_mtx even though it might not
|
||||||
|
@ -127,7 +127,6 @@ chunk_alloc_dss(size_t size, size_t alignment, bool *zero)
|
|||||||
if (*zero) {
|
if (*zero) {
|
||||||
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
||||||
memset(ret, 0, size);
|
memset(ret, 0, size);
|
||||||
VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
|
||||||
}
|
}
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
@ -41,6 +41,7 @@ je_thread_start(void *arg)
|
|||||||
malloc_printf("Unexpected allocm() error\n");
|
malloc_printf("Unexpected allocm() error\n");
|
||||||
abort();
|
abort();
|
||||||
}
|
}
|
||||||
|
dallocm(p, 0);
|
||||||
|
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
|
@ -17,6 +17,7 @@ je_thread_start(void *arg)
|
|||||||
malloc_printf("%s(): Error in malloc()\n", __func__);
|
malloc_printf("%s(): Error in malloc()\n", __func__);
|
||||||
return (void *)1;
|
return (void *)1;
|
||||||
}
|
}
|
||||||
|
free(p);
|
||||||
|
|
||||||
size = sizeof(arena_ind);
|
size = sizeof(arena_ind);
|
||||||
if ((err = mallctl("thread.arena", &arena_ind, &size, &main_arena_ind,
|
if ((err = mallctl("thread.arena", &arena_ind, &size, &main_arena_ind,
|
||||||
|
Loading…
Reference in New Issue
Block a user