Fix a Valgrind integration flaw.
Fix a Valgrind integration flaw that caused Valgrind warnings about reads of uninitialized memory in arena chunk headers.
This commit is contained in:
parent
543abf7e6c
commit
87a02d2bb1
@ -441,6 +441,7 @@ void arena_postfork_child(arena_t *arena);
|
|||||||
#ifndef JEMALLOC_ENABLE_INLINE
|
#ifndef JEMALLOC_ENABLE_INLINE
|
||||||
arena_chunk_map_t *arena_mapp_get(arena_chunk_t *chunk, size_t pageind);
|
arena_chunk_map_t *arena_mapp_get(arena_chunk_t *chunk, size_t pageind);
|
||||||
size_t *arena_mapbitsp_get(arena_chunk_t *chunk, size_t pageind);
|
size_t *arena_mapbitsp_get(arena_chunk_t *chunk, size_t pageind);
|
||||||
|
size_t arena_mapbitsp_read(size_t *mapbitsp);
|
||||||
size_t arena_mapbits_get(arena_chunk_t *chunk, size_t pageind);
|
size_t arena_mapbits_get(arena_chunk_t *chunk, size_t pageind);
|
||||||
size_t arena_mapbits_unallocated_size_get(arena_chunk_t *chunk,
|
size_t arena_mapbits_unallocated_size_get(arena_chunk_t *chunk,
|
||||||
size_t pageind);
|
size_t pageind);
|
||||||
@ -451,6 +452,7 @@ size_t arena_mapbits_dirty_get(arena_chunk_t *chunk, size_t pageind);
|
|||||||
size_t arena_mapbits_unzeroed_get(arena_chunk_t *chunk, size_t pageind);
|
size_t arena_mapbits_unzeroed_get(arena_chunk_t *chunk, size_t pageind);
|
||||||
size_t arena_mapbits_large_get(arena_chunk_t *chunk, size_t pageind);
|
size_t arena_mapbits_large_get(arena_chunk_t *chunk, size_t pageind);
|
||||||
size_t arena_mapbits_allocated_get(arena_chunk_t *chunk, size_t pageind);
|
size_t arena_mapbits_allocated_get(arena_chunk_t *chunk, size_t pageind);
|
||||||
|
void arena_mapbitsp_write(size_t *mapbitsp, size_t mapbits);
|
||||||
void arena_mapbits_unallocated_set(arena_chunk_t *chunk, size_t pageind,
|
void arena_mapbits_unallocated_set(arena_chunk_t *chunk, size_t pageind,
|
||||||
size_t size, size_t flags);
|
size_t size, size_t flags);
|
||||||
void arena_mapbits_unallocated_size_set(arena_chunk_t *chunk, size_t pageind,
|
void arena_mapbits_unallocated_size_set(arena_chunk_t *chunk, size_t pageind,
|
||||||
@ -497,11 +499,18 @@ arena_mapbitsp_get(arena_chunk_t *chunk, size_t pageind)
|
|||||||
return (&arena_mapp_get(chunk, pageind)->bits);
|
return (&arena_mapp_get(chunk, pageind)->bits);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
|
arena_mapbitsp_read(size_t *mapbitsp)
|
||||||
|
{
|
||||||
|
|
||||||
|
return (*mapbitsp);
|
||||||
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
arena_mapbits_get(arena_chunk_t *chunk, size_t pageind)
|
arena_mapbits_get(arena_chunk_t *chunk, size_t pageind)
|
||||||
{
|
{
|
||||||
|
|
||||||
return (*arena_mapbitsp_get(chunk, pageind));
|
return (arena_mapbitsp_read(arena_mapbitsp_get(chunk, pageind)));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
@ -584,83 +593,90 @@ arena_mapbits_allocated_get(arena_chunk_t *chunk, size_t pageind)
|
|||||||
return (mapbits & CHUNK_MAP_ALLOCATED);
|
return (mapbits & CHUNK_MAP_ALLOCATED);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
|
arena_mapbitsp_write(size_t *mapbitsp, size_t mapbits)
|
||||||
|
{
|
||||||
|
|
||||||
|
*mapbitsp = mapbits;
|
||||||
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_mapbits_unallocated_set(arena_chunk_t *chunk, size_t pageind, size_t size,
|
arena_mapbits_unallocated_set(arena_chunk_t *chunk, size_t pageind, size_t size,
|
||||||
size_t flags)
|
size_t flags)
|
||||||
{
|
{
|
||||||
size_t *mapbitsp;
|
size_t *mapbitsp = arena_mapbitsp_get(chunk, pageind);
|
||||||
|
|
||||||
mapbitsp = arena_mapbitsp_get(chunk, pageind);
|
|
||||||
assert((size & PAGE_MASK) == 0);
|
assert((size & PAGE_MASK) == 0);
|
||||||
assert((flags & ~CHUNK_MAP_FLAGS_MASK) == 0);
|
assert((flags & ~CHUNK_MAP_FLAGS_MASK) == 0);
|
||||||
assert((flags & (CHUNK_MAP_DIRTY|CHUNK_MAP_UNZEROED)) == flags);
|
assert((flags & (CHUNK_MAP_DIRTY|CHUNK_MAP_UNZEROED)) == flags);
|
||||||
*mapbitsp = size | CHUNK_MAP_BININD_INVALID | flags;
|
arena_mapbitsp_write(mapbitsp, size | CHUNK_MAP_BININD_INVALID | flags);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_mapbits_unallocated_size_set(arena_chunk_t *chunk, size_t pageind,
|
arena_mapbits_unallocated_size_set(arena_chunk_t *chunk, size_t pageind,
|
||||||
size_t size)
|
size_t size)
|
||||||
{
|
{
|
||||||
size_t *mapbitsp;
|
size_t *mapbitsp = arena_mapbitsp_get(chunk, pageind);
|
||||||
|
size_t mapbits = arena_mapbitsp_read(mapbitsp);
|
||||||
|
|
||||||
mapbitsp = arena_mapbitsp_get(chunk, pageind);
|
|
||||||
assert((size & PAGE_MASK) == 0);
|
assert((size & PAGE_MASK) == 0);
|
||||||
assert((*mapbitsp & (CHUNK_MAP_LARGE|CHUNK_MAP_ALLOCATED)) == 0);
|
assert((mapbits & (CHUNK_MAP_LARGE|CHUNK_MAP_ALLOCATED)) == 0);
|
||||||
*mapbitsp = size | (*mapbitsp & PAGE_MASK);
|
arena_mapbitsp_write(mapbitsp, size | (mapbits & PAGE_MASK));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_mapbits_large_set(arena_chunk_t *chunk, size_t pageind, size_t size,
|
arena_mapbits_large_set(arena_chunk_t *chunk, size_t pageind, size_t size,
|
||||||
size_t flags)
|
size_t flags)
|
||||||
{
|
{
|
||||||
size_t *mapbitsp;
|
size_t *mapbitsp = arena_mapbitsp_get(chunk, pageind);
|
||||||
|
size_t mapbits = arena_mapbitsp_read(mapbitsp);
|
||||||
size_t unzeroed;
|
size_t unzeroed;
|
||||||
|
|
||||||
mapbitsp = arena_mapbitsp_get(chunk, pageind);
|
|
||||||
assert((size & PAGE_MASK) == 0);
|
assert((size & PAGE_MASK) == 0);
|
||||||
assert((flags & CHUNK_MAP_DIRTY) == flags);
|
assert((flags & CHUNK_MAP_DIRTY) == flags);
|
||||||
unzeroed = *mapbitsp & CHUNK_MAP_UNZEROED; /* Preserve unzeroed. */
|
unzeroed = mapbits & CHUNK_MAP_UNZEROED; /* Preserve unzeroed. */
|
||||||
*mapbitsp = size | CHUNK_MAP_BININD_INVALID | flags | unzeroed |
|
arena_mapbitsp_write(mapbitsp, size | CHUNK_MAP_BININD_INVALID | flags
|
||||||
CHUNK_MAP_LARGE | CHUNK_MAP_ALLOCATED;
|
| unzeroed | CHUNK_MAP_LARGE | CHUNK_MAP_ALLOCATED);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_mapbits_large_binind_set(arena_chunk_t *chunk, size_t pageind,
|
arena_mapbits_large_binind_set(arena_chunk_t *chunk, size_t pageind,
|
||||||
size_t binind)
|
size_t binind)
|
||||||
{
|
{
|
||||||
size_t *mapbitsp;
|
size_t *mapbitsp = arena_mapbitsp_get(chunk, pageind);
|
||||||
|
size_t mapbits = arena_mapbitsp_read(mapbitsp);
|
||||||
|
|
||||||
assert(binind <= BININD_INVALID);
|
assert(binind <= BININD_INVALID);
|
||||||
mapbitsp = arena_mapbitsp_get(chunk, pageind);
|
|
||||||
assert(arena_mapbits_large_size_get(chunk, pageind) == PAGE);
|
assert(arena_mapbits_large_size_get(chunk, pageind) == PAGE);
|
||||||
*mapbitsp = (*mapbitsp & ~CHUNK_MAP_BININD_MASK) | (binind <<
|
arena_mapbitsp_write(mapbitsp, (mapbits & ~CHUNK_MAP_BININD_MASK) |
|
||||||
CHUNK_MAP_BININD_SHIFT);
|
(binind << CHUNK_MAP_BININD_SHIFT));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_mapbits_small_set(arena_chunk_t *chunk, size_t pageind, size_t runind,
|
arena_mapbits_small_set(arena_chunk_t *chunk, size_t pageind, size_t runind,
|
||||||
size_t binind, size_t flags)
|
size_t binind, size_t flags)
|
||||||
{
|
{
|
||||||
size_t *mapbitsp;
|
size_t *mapbitsp = arena_mapbitsp_get(chunk, pageind);
|
||||||
|
size_t mapbits = arena_mapbitsp_read(mapbitsp);
|
||||||
size_t unzeroed;
|
size_t unzeroed;
|
||||||
|
|
||||||
assert(binind < BININD_INVALID);
|
assert(binind < BININD_INVALID);
|
||||||
mapbitsp = arena_mapbitsp_get(chunk, pageind);
|
|
||||||
assert(pageind - runind >= map_bias);
|
assert(pageind - runind >= map_bias);
|
||||||
assert((flags & CHUNK_MAP_DIRTY) == flags);
|
assert((flags & CHUNK_MAP_DIRTY) == flags);
|
||||||
unzeroed = *mapbitsp & CHUNK_MAP_UNZEROED; /* Preserve unzeroed. */
|
unzeroed = mapbits & CHUNK_MAP_UNZEROED; /* Preserve unzeroed. */
|
||||||
*mapbitsp = (runind << LG_PAGE) | (binind << CHUNK_MAP_BININD_SHIFT) |
|
arena_mapbitsp_write(mapbitsp, (runind << LG_PAGE) | (binind <<
|
||||||
flags | unzeroed | CHUNK_MAP_ALLOCATED;
|
CHUNK_MAP_BININD_SHIFT) | flags | unzeroed | CHUNK_MAP_ALLOCATED);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_mapbits_unzeroed_set(arena_chunk_t *chunk, size_t pageind,
|
arena_mapbits_unzeroed_set(arena_chunk_t *chunk, size_t pageind,
|
||||||
size_t unzeroed)
|
size_t unzeroed)
|
||||||
{
|
{
|
||||||
size_t *mapbitsp;
|
size_t *mapbitsp = arena_mapbitsp_get(chunk, pageind);
|
||||||
|
size_t mapbits = arena_mapbitsp_read(mapbitsp);
|
||||||
|
|
||||||
mapbitsp = arena_mapbitsp_get(chunk, pageind);
|
arena_mapbitsp_write(mapbitsp, (mapbits & ~CHUNK_MAP_UNZEROED) |
|
||||||
*mapbitsp = (*mapbitsp & ~CHUNK_MAP_UNZEROED) | unzeroed;
|
unzeroed);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
|
@ -33,6 +33,8 @@
|
|||||||
#define arena_mapbits_unzeroed_get JEMALLOC_N(arena_mapbits_unzeroed_get)
|
#define arena_mapbits_unzeroed_get JEMALLOC_N(arena_mapbits_unzeroed_get)
|
||||||
#define arena_mapbits_unzeroed_set JEMALLOC_N(arena_mapbits_unzeroed_set)
|
#define arena_mapbits_unzeroed_set JEMALLOC_N(arena_mapbits_unzeroed_set)
|
||||||
#define arena_mapbitsp_get JEMALLOC_N(arena_mapbitsp_get)
|
#define arena_mapbitsp_get JEMALLOC_N(arena_mapbitsp_get)
|
||||||
|
#define arena_mapbitsp_read JEMALLOC_N(arena_mapbitsp_read)
|
||||||
|
#define arena_mapbitsp_write JEMALLOC_N(arena_mapbitsp_write)
|
||||||
#define arena_mapp_get JEMALLOC_N(arena_mapp_get)
|
#define arena_mapp_get JEMALLOC_N(arena_mapp_get)
|
||||||
#define arena_maxclass JEMALLOC_N(arena_maxclass)
|
#define arena_maxclass JEMALLOC_N(arena_maxclass)
|
||||||
#define arena_new JEMALLOC_N(arena_new)
|
#define arena_new JEMALLOC_N(arena_new)
|
||||||
|
21
src/arena.c
21
src/arena.c
@ -569,17 +569,24 @@ arena_chunk_alloc(arena_t *arena)
|
|||||||
* unless the chunk is not zeroed.
|
* unless the chunk is not zeroed.
|
||||||
*/
|
*/
|
||||||
if (zero == false) {
|
if (zero == false) {
|
||||||
|
VALGRIND_MAKE_MEM_UNDEFINED(
|
||||||
|
(void *)arena_mapp_get(chunk, map_bias+1),
|
||||||
|
(size_t)((uintptr_t) arena_mapp_get(chunk,
|
||||||
|
chunk_npages-1) - (uintptr_t)arena_mapp_get(chunk,
|
||||||
|
map_bias+1)));
|
||||||
for (i = map_bias+1; i < chunk_npages-1; i++)
|
for (i = map_bias+1; i < chunk_npages-1; i++)
|
||||||
arena_mapbits_unzeroed_set(chunk, i, unzeroed);
|
arena_mapbits_unzeroed_set(chunk, i, unzeroed);
|
||||||
} else if (config_debug) {
|
} else {
|
||||||
VALGRIND_MAKE_MEM_DEFINED(
|
VALGRIND_MAKE_MEM_DEFINED(
|
||||||
(void *)arena_mapp_get(chunk, map_bias+1),
|
(void *)arena_mapp_get(chunk, map_bias+1),
|
||||||
(void *)((uintptr_t)
|
(size_t)((uintptr_t) arena_mapp_get(chunk,
|
||||||
arena_mapp_get(chunk, chunk_npages-1)
|
chunk_npages-1) - (uintptr_t)arena_mapp_get(chunk,
|
||||||
- (uintptr_t)arena_mapp_get(chunk, map_bias+1)));
|
map_bias+1)));
|
||||||
for (i = map_bias+1; i < chunk_npages-1; i++) {
|
if (config_debug) {
|
||||||
assert(arena_mapbits_unzeroed_get(chunk, i) ==
|
for (i = map_bias+1; i < chunk_npages-1; i++) {
|
||||||
unzeroed);
|
assert(arena_mapbits_unzeroed_get(chunk,
|
||||||
|
i) == unzeroed);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
arena_mapbits_unallocated_set(chunk, chunk_npages-1,
|
arena_mapbits_unallocated_set(chunk, chunk_npages-1,
|
||||||
|
Loading…
Reference in New Issue
Block a user