Work around TLS deallocation via free().
glibc uses memalign()/free() to allocate/deallocate TLS, which means that it is unsafe to set TLS variables as a side effect of free() -- they may already be deallocated. Work around this by avoiding tcache_create() within free(). Reported by Mike Hommey.
This commit is contained in:
parent
3c2ba0dcbc
commit
09a0769ba7
@ -556,7 +556,7 @@ arena_malloc(size_t size, bool zero)
|
|||||||
assert(size <= arena_maxclass);
|
assert(size <= arena_maxclass);
|
||||||
|
|
||||||
if (size <= SMALL_MAXCLASS) {
|
if (size <= SMALL_MAXCLASS) {
|
||||||
if ((tcache = tcache_get()) != NULL)
|
if ((tcache = tcache_get(true)) != NULL)
|
||||||
return (tcache_alloc_small(tcache, size, zero));
|
return (tcache_alloc_small(tcache, size, zero));
|
||||||
else
|
else
|
||||||
return (arena_malloc_small(choose_arena(), size, zero));
|
return (arena_malloc_small(choose_arena(), size, zero));
|
||||||
@ -565,7 +565,8 @@ arena_malloc(size_t size, bool zero)
|
|||||||
* Initialize tcache after checking size in order to avoid
|
* Initialize tcache after checking size in order to avoid
|
||||||
* infinite recursion during tcache initialization.
|
* infinite recursion during tcache initialization.
|
||||||
*/
|
*/
|
||||||
if (size <= tcache_maxclass && (tcache = tcache_get()) != NULL)
|
if (size <= tcache_maxclass && (tcache = tcache_get(true)) !=
|
||||||
|
NULL)
|
||||||
return (tcache_alloc_large(tcache, size, zero));
|
return (tcache_alloc_large(tcache, size, zero));
|
||||||
else
|
else
|
||||||
return (arena_malloc_large(choose_arena(), size, zero));
|
return (arena_malloc_large(choose_arena(), size, zero));
|
||||||
@ -590,7 +591,7 @@ arena_dalloc(arena_t *arena, arena_chunk_t *chunk, void *ptr)
|
|||||||
{
|
{
|
||||||
size_t pageind;
|
size_t pageind;
|
||||||
arena_chunk_map_t *mapelm;
|
arena_chunk_map_t *mapelm;
|
||||||
tcache_t *tcache = tcache_get();
|
tcache_t *tcache = tcache_get(false);
|
||||||
|
|
||||||
assert(arena != NULL);
|
assert(arena != NULL);
|
||||||
assert(chunk->arena == arena);
|
assert(chunk->arena == arena);
|
||||||
|
@ -126,7 +126,7 @@ malloc_tsd_protos(JEMALLOC_ATTR(unused), tcache_enabled, tcache_enabled_t)
|
|||||||
void tcache_event(tcache_t *tcache);
|
void tcache_event(tcache_t *tcache);
|
||||||
void tcache_flush(void);
|
void tcache_flush(void);
|
||||||
bool tcache_enabled_get(void);
|
bool tcache_enabled_get(void);
|
||||||
tcache_t *tcache_get(void);
|
tcache_t *tcache_get(bool create);
|
||||||
void tcache_enabled_set(bool enabled);
|
void tcache_enabled_set(bool enabled);
|
||||||
void *tcache_alloc_easy(tcache_bin_t *tbin);
|
void *tcache_alloc_easy(tcache_bin_t *tbin);
|
||||||
void *tcache_alloc_small(tcache_t *tcache, size_t size, bool zero);
|
void *tcache_alloc_small(tcache_t *tcache, size_t size, bool zero);
|
||||||
@ -205,7 +205,7 @@ tcache_enabled_set(bool enabled)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE tcache_t *
|
JEMALLOC_INLINE tcache_t *
|
||||||
tcache_get(void)
|
tcache_get(bool create)
|
||||||
{
|
{
|
||||||
tcache_t *tcache;
|
tcache_t *tcache;
|
||||||
|
|
||||||
@ -219,6 +219,18 @@ tcache_get(void)
|
|||||||
if (tcache == TCACHE_STATE_DISABLED)
|
if (tcache == TCACHE_STATE_DISABLED)
|
||||||
return (NULL);
|
return (NULL);
|
||||||
if (tcache == NULL) {
|
if (tcache == NULL) {
|
||||||
|
if (create == false) {
|
||||||
|
/*
|
||||||
|
* Creating a tcache here would cause
|
||||||
|
* allocation as a side effect of free().
|
||||||
|
* Ordinarily that would be okay since
|
||||||
|
* tcache_create() failure is a soft failure
|
||||||
|
* that doesn't propagate. However, if TLS
|
||||||
|
* data are freed via free() as in glibc,
|
||||||
|
* subtle TLS corruption could result.
|
||||||
|
*/
|
||||||
|
return (NULL);
|
||||||
|
}
|
||||||
if (tcache_enabled_get() == false) {
|
if (tcache_enabled_get() == false) {
|
||||||
tcache_enabled_set(false); /* Memoize. */
|
tcache_enabled_set(false); /* Memoize. */
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
Loading…
Reference in New Issue
Block a user