Fix a race condition in the "arenas.extend" mallctl.
Fix a race condition in the "arenas.extend" mallctl that could lead to internal data structure corruption. The race could be hit if one thread called the "arenas.extend" mallctl while another thread concurrently triggered initialization of one of the lazily created arenas.
This commit is contained in:
parent
0f1d8ec300
commit
7b65180b32
@ -9,6 +9,8 @@ found in the git revision history:
|
|||||||
* 3.4.1 (XXX)
|
* 3.4.1 (XXX)
|
||||||
|
|
||||||
Bug fixes:
|
Bug fixes:
|
||||||
|
- Fix a race in the "arenas.extend" mallctl that could cause memory corruption
|
||||||
|
of internal data structures and subsequent crashes.
|
||||||
- Fix Valgrind integration flaws that caused Valgrind warnings about reads of
|
- Fix Valgrind integration flaws that caused Valgrind warnings about reads of
|
||||||
uninitialized memory in:
|
uninitialized memory in:
|
||||||
+ arena chunk headers
|
+ arena chunk headers
|
||||||
|
84
src/ctl.c
84
src/ctl.c
@ -546,43 +546,30 @@ ctl_arena_refresh(arena_t *arena, unsigned i)
|
|||||||
static bool
|
static bool
|
||||||
ctl_grow(void)
|
ctl_grow(void)
|
||||||
{
|
{
|
||||||
size_t astats_size;
|
|
||||||
ctl_arena_stats_t *astats;
|
ctl_arena_stats_t *astats;
|
||||||
arena_t **tarenas;
|
arena_t **tarenas;
|
||||||
|
|
||||||
/* Extend arena stats and arenas arrays. */
|
/* Allocate extended arena stats and arenas arrays. */
|
||||||
astats_size = (ctl_stats.narenas + 2) * sizeof(ctl_arena_stats_t);
|
astats = (ctl_arena_stats_t *)imalloc((ctl_stats.narenas + 2) *
|
||||||
if (ctl_stats.narenas == narenas_auto) {
|
sizeof(ctl_arena_stats_t));
|
||||||
/* ctl_stats.arenas and arenas came from base_alloc(). */
|
if (astats == NULL)
|
||||||
astats = (ctl_arena_stats_t *)imalloc(astats_size);
|
|
||||||
if (astats == NULL)
|
|
||||||
return (true);
|
|
||||||
memcpy(astats, ctl_stats.arenas, (ctl_stats.narenas + 1) *
|
|
||||||
sizeof(ctl_arena_stats_t));
|
|
||||||
|
|
||||||
tarenas = (arena_t **)imalloc((ctl_stats.narenas + 1) *
|
|
||||||
sizeof(arena_t *));
|
|
||||||
if (tarenas == NULL) {
|
|
||||||
idalloc(astats);
|
|
||||||
return (true);
|
|
||||||
}
|
|
||||||
memcpy(tarenas, arenas, ctl_stats.narenas * sizeof(arena_t *));
|
|
||||||
} else {
|
|
||||||
astats = (ctl_arena_stats_t *)iralloc(ctl_stats.arenas,
|
|
||||||
astats_size, 0, 0, false, false);
|
|
||||||
if (astats == NULL)
|
|
||||||
return (true);
|
|
||||||
|
|
||||||
tarenas = (arena_t **)iralloc(arenas, (ctl_stats.narenas + 1) *
|
|
||||||
sizeof(arena_t *), 0, 0, false, false);
|
|
||||||
if (tarenas == NULL)
|
|
||||||
return (true);
|
|
||||||
}
|
|
||||||
/* Initialize the new astats and arenas elements. */
|
|
||||||
memset(&astats[ctl_stats.narenas + 1], 0, sizeof(ctl_arena_stats_t));
|
|
||||||
if (ctl_arena_init(&astats[ctl_stats.narenas + 1]))
|
|
||||||
return (true);
|
return (true);
|
||||||
tarenas[ctl_stats.narenas] = NULL;
|
tarenas = (arena_t **)imalloc((ctl_stats.narenas + 1) *
|
||||||
|
sizeof(arena_t *));
|
||||||
|
if (tarenas == NULL) {
|
||||||
|
idalloc(astats);
|
||||||
|
return (true);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Initialize the new astats element. */
|
||||||
|
memcpy(astats, ctl_stats.arenas, (ctl_stats.narenas + 1) *
|
||||||
|
sizeof(ctl_arena_stats_t));
|
||||||
|
memset(&astats[ctl_stats.narenas + 1], 0, sizeof(ctl_arena_stats_t));
|
||||||
|
if (ctl_arena_init(&astats[ctl_stats.narenas + 1])) {
|
||||||
|
idalloc(tarenas);
|
||||||
|
idalloc(astats);
|
||||||
|
return (true);
|
||||||
|
}
|
||||||
/* Swap merged stats to their new location. */
|
/* Swap merged stats to their new location. */
|
||||||
{
|
{
|
||||||
ctl_arena_stats_t tstats;
|
ctl_arena_stats_t tstats;
|
||||||
@ -593,13 +580,34 @@ ctl_grow(void)
|
|||||||
memcpy(&astats[ctl_stats.narenas + 1], &tstats,
|
memcpy(&astats[ctl_stats.narenas + 1], &tstats,
|
||||||
sizeof(ctl_arena_stats_t));
|
sizeof(ctl_arena_stats_t));
|
||||||
}
|
}
|
||||||
|
/* Initialize the new arenas element. */
|
||||||
|
tarenas[ctl_stats.narenas] = NULL;
|
||||||
|
{
|
||||||
|
arena_t **arenas_old = arenas;
|
||||||
|
/*
|
||||||
|
* Swap extended arenas array into place. Although ctl_mtx
|
||||||
|
* protects this function from other threads extending the
|
||||||
|
* array, it does not protect from other threads mutating it
|
||||||
|
* (i.e. initializing arenas and setting array elements to
|
||||||
|
* point to them). Therefore, array copying must happen under
|
||||||
|
* the protection of arenas_lock.
|
||||||
|
*/
|
||||||
|
malloc_mutex_lock(&arenas_lock);
|
||||||
|
arenas = tarenas;
|
||||||
|
memcpy(arenas, arenas_old, ctl_stats.narenas *
|
||||||
|
sizeof(arena_t *));
|
||||||
|
narenas_total++;
|
||||||
|
arenas_extend(narenas_total - 1);
|
||||||
|
malloc_mutex_unlock(&arenas_lock);
|
||||||
|
/*
|
||||||
|
* Deallocate arenas_old only if it came from imalloc() (not
|
||||||
|
* base_alloc()).
|
||||||
|
*/
|
||||||
|
if (ctl_stats.narenas != narenas_auto)
|
||||||
|
idalloc(arenas_old);
|
||||||
|
}
|
||||||
ctl_stats.arenas = astats;
|
ctl_stats.arenas = astats;
|
||||||
ctl_stats.narenas++;
|
ctl_stats.narenas++;
|
||||||
malloc_mutex_lock(&arenas_lock);
|
|
||||||
arenas = tarenas;
|
|
||||||
narenas_total++;
|
|
||||||
arenas_extend(narenas_total - 1);
|
|
||||||
malloc_mutex_unlock(&arenas_lock);
|
|
||||||
|
|
||||||
return (false);
|
return (false);
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user