Fix tsd cleanup regressions.
Fix tsd cleanup regressions that were introduced in
5460aa6f66
(Convert all tsd variables to
reside in a single tsd structure.). These regressions were twofold:
1) tsd_tryget() should never (and need never) return NULL. Rename it to
tsd_fetch() and simplify all callers.
2) tsd_*_set() must only be called when tsd is in the nominal state,
because cleanup happens during the nominal-->purgatory transition,
and re-initialization must not happen while in the purgatory state.
Add tsd_nominal() and use it as needed. Note that tsd_*{p,}_get()
can still be used as long as no re-initialization that would require
cleanup occurs. This means that e.g. the thread_allocated counter
can be updated unconditionally.
This commit is contained in:
26
src/ctl.c
26
src/ctl.c
@@ -571,9 +571,7 @@ ctl_grow(void)
|
||||
ctl_arena_stats_t *astats;
|
||||
arena_t **tarenas;
|
||||
|
||||
tsd = tsd_tryget();
|
||||
if (tsd == NULL)
|
||||
return (true);
|
||||
tsd = tsd_fetch();
|
||||
|
||||
/* Allocate extended arena stats and arenas arrays. */
|
||||
astats = (ctl_arena_stats_t *)imalloc(tsd, (ctl_stats.narenas + 2) *
|
||||
@@ -1132,11 +1130,7 @@ n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
|
||||
if (!(c)) \
|
||||
return (ENOENT); \
|
||||
READONLY(); \
|
||||
tsd = tsd_tryget(); \
|
||||
if (tsd == NULL) { \
|
||||
ret = EAGAIN; \
|
||||
goto label_return; \
|
||||
} \
|
||||
tsd = tsd_fetch(); \
|
||||
oldval = (m(tsd)); \
|
||||
READ(oldval, t); \
|
||||
\
|
||||
@@ -1239,9 +1233,7 @@ thread_arena_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
|
||||
tsd_t *tsd;
|
||||
unsigned newind, oldind;
|
||||
|
||||
tsd = tsd_tryget();
|
||||
if (tsd == NULL)
|
||||
return (EAGAIN);
|
||||
tsd = tsd_fetch();
|
||||
|
||||
malloc_mutex_lock(&ctl_mtx);
|
||||
newind = oldind = choose_arena(tsd, NULL)->ind;
|
||||
@@ -1359,11 +1351,7 @@ thread_prof_name_ctl(const size_t *mib, size_t miblen, void *oldp,
|
||||
goto label_return;
|
||||
}
|
||||
|
||||
tsd = tsd_tryget();
|
||||
if (tsd == NULL) {
|
||||
ret = EAGAIN;
|
||||
goto label_return;
|
||||
}
|
||||
tsd = tsd_fetch();
|
||||
|
||||
if ((ret = prof_thread_name_set(tsd, *(const char **)newp)) !=
|
||||
0)
|
||||
@@ -1763,11 +1751,7 @@ prof_reset_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
|
||||
if (lg_sample >= (sizeof(uint64_t) << 3))
|
||||
lg_sample = (sizeof(uint64_t) << 3) - 1;
|
||||
|
||||
tsd = tsd_tryget();
|
||||
if (tsd == NULL) {
|
||||
ret = EAGAIN;
|
||||
goto label_return;
|
||||
}
|
||||
tsd = tsd_fetch();
|
||||
|
||||
prof_reset(tsd, lg_sample);
|
||||
|
||||
|
@@ -194,7 +194,8 @@ choose_arena_hard(tsd_t *tsd)
|
||||
malloc_mutex_unlock(&arenas_lock);
|
||||
}
|
||||
|
||||
tsd_arena_set(tsd, ret);
|
||||
if (tsd_nominal(tsd))
|
||||
tsd_arena_set(tsd, ret);
|
||||
|
||||
return (ret);
|
||||
}
|
||||
@@ -908,8 +909,9 @@ JEMALLOC_ALWAYS_INLINE_C void *
|
||||
imalloc_body(size_t size, tsd_t **tsd, size_t *usize)
|
||||
{
|
||||
|
||||
if (unlikely(malloc_init()) || unlikely((*tsd = tsd_tryget()) == NULL))
|
||||
if (unlikely(malloc_init()))
|
||||
return (NULL);
|
||||
*tsd = tsd_fetch();
|
||||
|
||||
if (config_prof && opt_prof) {
|
||||
*usize = s2u(size);
|
||||
@@ -1000,10 +1002,11 @@ imemalign(void **memptr, size_t alignment, size_t size, size_t min_alignment)
|
||||
|
||||
assert(min_alignment != 0);
|
||||
|
||||
if (unlikely(malloc_init()) || unlikely((tsd = tsd_tryget()) == NULL)) {
|
||||
if (unlikely(malloc_init())) {
|
||||
result = NULL;
|
||||
goto label_oom;
|
||||
} else {
|
||||
tsd = tsd_fetch();
|
||||
if (size == 0)
|
||||
size = 1;
|
||||
|
||||
@@ -1124,11 +1127,12 @@ je_calloc(size_t num, size_t size)
|
||||
size_t num_size;
|
||||
size_t usize JEMALLOC_CC_SILENCE_INIT(0);
|
||||
|
||||
if (unlikely(malloc_init()) || unlikely((tsd = tsd_tryget()) == NULL)) {
|
||||
if (unlikely(malloc_init())) {
|
||||
num_size = 0;
|
||||
ret = NULL;
|
||||
goto label_return;
|
||||
}
|
||||
tsd = tsd_fetch();
|
||||
|
||||
num_size = num * size;
|
||||
if (unlikely(num_size == 0)) {
|
||||
@@ -1228,7 +1232,7 @@ ifree(tsd_t *tsd, void *ptr, bool try_tcache)
|
||||
prof_free(tsd, ptr, usize);
|
||||
} else if (config_stats || config_valgrind)
|
||||
usize = isalloc(ptr, config_prof);
|
||||
if (config_stats && likely(tsd != NULL))
|
||||
if (config_stats)
|
||||
*tsd_thread_deallocatedp_get(tsd) += usize;
|
||||
if (config_valgrind && unlikely(in_valgrind))
|
||||
rzsize = p2rz(ptr);
|
||||
@@ -1246,7 +1250,7 @@ isfree(tsd_t *tsd, void *ptr, size_t usize, bool try_tcache)
|
||||
|
||||
if (config_prof && opt_prof)
|
||||
prof_free(tsd, ptr, usize);
|
||||
if (config_stats && likely(tsd != NULL))
|
||||
if (config_stats)
|
||||
*tsd_thread_deallocatedp_get(tsd) += usize;
|
||||
if (config_valgrind && unlikely(in_valgrind))
|
||||
rzsize = p2rz(ptr);
|
||||
@@ -1267,7 +1271,7 @@ je_realloc(void *ptr, size_t size)
|
||||
if (ptr != NULL) {
|
||||
/* realloc(ptr, 0) is equivalent to free(ptr). */
|
||||
UTRACE(ptr, 0, 0);
|
||||
tsd = tsd_tryget();
|
||||
tsd = tsd_fetch();
|
||||
ifree(tsd, ptr, true);
|
||||
return (NULL);
|
||||
}
|
||||
@@ -1277,27 +1281,23 @@ je_realloc(void *ptr, size_t size)
|
||||
if (likely(ptr != NULL)) {
|
||||
assert(malloc_initialized || IS_INITIALIZER);
|
||||
malloc_thread_init();
|
||||
tsd = tsd_fetch();
|
||||
|
||||
if ((tsd = tsd_tryget()) != NULL) {
|
||||
if ((config_prof && opt_prof) || config_stats ||
|
||||
(config_valgrind && unlikely(in_valgrind)))
|
||||
old_usize = isalloc(ptr, config_prof);
|
||||
if (config_valgrind && unlikely(in_valgrind)) {
|
||||
old_rzsize = config_prof ? p2rz(ptr) :
|
||||
u2rz(old_usize);
|
||||
}
|
||||
if ((config_prof && opt_prof) || config_stats ||
|
||||
(config_valgrind && unlikely(in_valgrind)))
|
||||
old_usize = isalloc(ptr, config_prof);
|
||||
if (config_valgrind && unlikely(in_valgrind))
|
||||
old_rzsize = config_prof ? p2rz(ptr) : u2rz(old_usize);
|
||||
|
||||
if (config_prof && opt_prof) {
|
||||
if (config_prof && opt_prof) {
|
||||
usize = s2u(size);
|
||||
ret = irealloc_prof(tsd, ptr, old_usize, usize);
|
||||
} else {
|
||||
if (config_stats || (config_valgrind &&
|
||||
unlikely(in_valgrind)))
|
||||
usize = s2u(size);
|
||||
ret = irealloc_prof(tsd, ptr, old_usize, usize);
|
||||
} else {
|
||||
if (config_stats || (config_valgrind &&
|
||||
unlikely(in_valgrind)))
|
||||
usize = s2u(size);
|
||||
ret = iralloc(tsd, ptr, size, 0, false);
|
||||
}
|
||||
} else
|
||||
ret = NULL;
|
||||
ret = iralloc(tsd, ptr, size, 0, false);
|
||||
}
|
||||
} else {
|
||||
/* realloc(NULL, size) is equivalent to malloc(size). */
|
||||
ret = imalloc_body(size, &tsd, &usize);
|
||||
@@ -1313,10 +1313,8 @@ je_realloc(void *ptr, size_t size)
|
||||
}
|
||||
if (config_stats && likely(ret != NULL)) {
|
||||
assert(usize == isalloc(ret, config_prof));
|
||||
if (tsd != NULL) {
|
||||
*tsd_thread_allocatedp_get(tsd) += usize;
|
||||
*tsd_thread_deallocatedp_get(tsd) += old_usize;
|
||||
}
|
||||
*tsd_thread_allocatedp_get(tsd) += usize;
|
||||
*tsd_thread_deallocatedp_get(tsd) += old_usize;
|
||||
}
|
||||
UTRACE(ptr, size, ret);
|
||||
JEMALLOC_VALGRIND_REALLOC(true, ret, usize, true, ptr, old_usize,
|
||||
@@ -1330,7 +1328,7 @@ je_free(void *ptr)
|
||||
|
||||
UTRACE(ptr, 0, 0);
|
||||
if (likely(ptr != NULL))
|
||||
ifree(tsd_tryget(), ptr, true);
|
||||
ifree(tsd_fetch(), ptr, true);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1543,8 +1541,9 @@ je_mallocx(size_t size, int flags)
|
||||
|
||||
assert(size != 0);
|
||||
|
||||
if (unlikely(malloc_init()) || unlikely((tsd = tsd_tryget()) == NULL))
|
||||
if (unlikely(malloc_init()))
|
||||
goto label_oom;
|
||||
tsd = tsd_fetch();
|
||||
|
||||
if (config_prof && opt_prof)
|
||||
p = imallocx_prof(tsd, size, flags, &usize);
|
||||
@@ -1554,10 +1553,8 @@ je_mallocx(size_t size, int flags)
|
||||
goto label_oom;
|
||||
|
||||
if (config_stats) {
|
||||
tsd_t *tsd = tsd_tryget();
|
||||
assert(usize == isalloc(p, config_prof));
|
||||
if (tsd != NULL)
|
||||
*tsd_thread_allocatedp_get(tsd) += usize;
|
||||
*tsd_thread_allocatedp_get(tsd) += usize;
|
||||
}
|
||||
UTRACE(0, size, p);
|
||||
JEMALLOC_VALGRIND_MALLOC(true, p, usize, MALLOCX_ZERO_GET(flags));
|
||||
@@ -1649,9 +1646,7 @@ je_rallocx(void *ptr, size_t size, int flags)
|
||||
assert(size != 0);
|
||||
assert(malloc_initialized || IS_INITIALIZER);
|
||||
malloc_thread_init();
|
||||
|
||||
if (unlikely((tsd = tsd_tryget()) == NULL))
|
||||
goto label_oom;
|
||||
tsd = tsd_fetch();
|
||||
|
||||
if (unlikely((flags & MALLOCX_ARENA_MASK) != 0)) {
|
||||
unsigned arena_ind = MALLOCX_ARENA_GET(flags);
|
||||
@@ -1794,6 +1789,7 @@ je_xallocx(void *ptr, size_t size, size_t extra, int flags)
|
||||
assert(SIZE_T_MAX - size >= extra);
|
||||
assert(malloc_initialized || IS_INITIALIZER);
|
||||
malloc_thread_init();
|
||||
tsd = tsd_fetch();
|
||||
|
||||
if (unlikely((flags & MALLOCX_ARENA_MASK) != 0)) {
|
||||
unsigned arena_ind = MALLOCX_ARENA_GET(flags);
|
||||
@@ -1802,10 +1798,6 @@ je_xallocx(void *ptr, size_t size, size_t extra, int flags)
|
||||
arena = NULL;
|
||||
|
||||
old_usize = isalloc(ptr, config_prof);
|
||||
if (unlikely((tsd = tsd_tryget()) == NULL)) {
|
||||
usize = old_usize;
|
||||
goto label_not_resized;
|
||||
}
|
||||
if (config_valgrind && unlikely(in_valgrind))
|
||||
old_rzsize = u2rz(old_usize);
|
||||
|
||||
@@ -1865,7 +1857,7 @@ je_dallocx(void *ptr, int flags)
|
||||
try_tcache = true;
|
||||
|
||||
UTRACE(ptr, 0, 0);
|
||||
ifree(tsd_tryget(), ptr, try_tcache);
|
||||
ifree(tsd_fetch(), ptr, try_tcache);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE_C size_t
|
||||
@@ -1901,7 +1893,7 @@ je_sdallocx(void *ptr, size_t size, int flags)
|
||||
try_tcache = true;
|
||||
|
||||
UTRACE(ptr, 0, 0);
|
||||
isfree(tsd_tryget(), ptr, usize, try_tcache);
|
||||
isfree(tsd_fetch(), ptr, usize, try_tcache);
|
||||
}
|
||||
|
||||
size_t
|
||||
|
29
src/prof.c
29
src/prof.c
@@ -850,8 +850,7 @@ prof_bt_count(void)
|
||||
tsd_t *tsd;
|
||||
prof_tdata_t *tdata;
|
||||
|
||||
if ((tsd = tsd_tryget()) == NULL)
|
||||
return (0);
|
||||
tsd = tsd_fetch();
|
||||
tdata = prof_tdata_get(tsd, false);
|
||||
if (tdata == NULL)
|
||||
return (0);
|
||||
@@ -1475,8 +1474,7 @@ prof_fdump(void)
|
||||
|
||||
if (!prof_booted)
|
||||
return;
|
||||
if ((tsd = tsd_tryget()) == NULL)
|
||||
return;
|
||||
tsd = tsd_fetch();
|
||||
|
||||
if (opt_prof_final && opt_prof_prefix[0] != '\0') {
|
||||
malloc_mutex_lock(&prof_dump_seq_mtx);
|
||||
@@ -1497,8 +1495,7 @@ prof_idump(void)
|
||||
|
||||
if (!prof_booted)
|
||||
return;
|
||||
if ((tsd = tsd_tryget()) == NULL)
|
||||
return;
|
||||
tsd = tsd_fetch();
|
||||
tdata = prof_tdata_get(tsd, false);
|
||||
if (tdata == NULL)
|
||||
return;
|
||||
@@ -1526,8 +1523,7 @@ prof_mdump(const char *filename)
|
||||
|
||||
if (!opt_prof || !prof_booted)
|
||||
return (true);
|
||||
if ((tsd = tsd_tryget()) == NULL)
|
||||
return (true);
|
||||
tsd = tsd_fetch();
|
||||
|
||||
if (filename == NULL) {
|
||||
/* No filename specified, so automatically generate one. */
|
||||
@@ -1553,8 +1549,7 @@ prof_gdump(void)
|
||||
|
||||
if (!prof_booted)
|
||||
return;
|
||||
if ((tsd = tsd_tryget()) == NULL)
|
||||
return;
|
||||
tsd = tsd_fetch();
|
||||
tdata = prof_tdata_get(tsd, false);
|
||||
if (tdata == NULL)
|
||||
return;
|
||||
@@ -1677,6 +1672,7 @@ prof_tdata_destroy_locked(tsd_t *tsd, prof_tdata_t *tdata)
|
||||
{
|
||||
|
||||
assert(prof_tdata_should_destroy(tdata));
|
||||
assert(tsd_prof_tdata_get(tsd) != tdata);
|
||||
|
||||
tdata_tree_remove(&tdatas, tdata);
|
||||
|
||||
@@ -1704,6 +1700,7 @@ prof_tdata_detach(tsd_t *tsd, prof_tdata_t *tdata)
|
||||
if (tdata->attached) {
|
||||
tdata->attached = false;
|
||||
destroy_tdata = prof_tdata_should_destroy(tdata);
|
||||
tsd_prof_tdata_set(tsd, NULL);
|
||||
} else
|
||||
destroy_tdata = false;
|
||||
malloc_mutex_unlock(tdata->lock);
|
||||
@@ -1819,8 +1816,7 @@ prof_thread_name_get(void)
|
||||
tsd_t *tsd;
|
||||
prof_tdata_t *tdata;
|
||||
|
||||
if ((tsd = tsd_tryget()) == NULL)
|
||||
return ("");
|
||||
tsd = tsd_fetch();
|
||||
tdata = prof_tdata_get(tsd, true);
|
||||
if (tdata == NULL)
|
||||
return ("");
|
||||
@@ -1886,8 +1882,7 @@ prof_thread_active_get(void)
|
||||
tsd_t *tsd;
|
||||
prof_tdata_t *tdata;
|
||||
|
||||
if ((tsd = tsd_tryget()) == NULL)
|
||||
return (false);
|
||||
tsd = tsd_fetch();
|
||||
tdata = prof_tdata_get(tsd, true);
|
||||
if (tdata == NULL)
|
||||
return (false);
|
||||
@@ -1900,8 +1895,7 @@ prof_thread_active_set(bool active)
|
||||
tsd_t *tsd;
|
||||
prof_tdata_t *tdata;
|
||||
|
||||
if ((tsd = tsd_tryget()) == NULL)
|
||||
return (true);
|
||||
tsd = tsd_fetch();
|
||||
tdata = prof_tdata_get(tsd, true);
|
||||
if (tdata == NULL)
|
||||
return (true);
|
||||
@@ -1988,8 +1982,7 @@ prof_boot2(void)
|
||||
if (malloc_mutex_init(&prof_thread_active_init_mtx))
|
||||
return (true);
|
||||
|
||||
if ((tsd = tsd_tryget()) == NULL)
|
||||
return (true);
|
||||
tsd = tsd_fetch();
|
||||
if (ckh_new(tsd, &bt2gctx, PROF_CKH_MINITEMS, prof_bt_hash,
|
||||
prof_bt_keycomp))
|
||||
return (true);
|
||||
|
@@ -263,7 +263,8 @@ tcache_get_hard(tsd_t *tsd)
|
||||
{
|
||||
|
||||
if (!tcache_enabled_get()) {
|
||||
tcache_enabled_set(false); /* Memoize. */
|
||||
if (tsd_nominal(tsd))
|
||||
tcache_enabled_set(false); /* Memoize. */
|
||||
return (NULL);
|
||||
}
|
||||
return (tcache_create(choose_arena(tsd, NULL)));
|
||||
|
Reference in New Issue
Block a user