Push down ctl_mtx.
Many mallctl*() end points require no locking, so push the locking down to just the functions that need it. This is of particular import for "thread.allocated" and "thread.deallocated", which are intended as a low-overhead way to introspect per thread allocation activity.
This commit is contained in:
parent
1f17bd9395
commit
fc4dcfa2f5
@ -4,6 +4,13 @@
|
|||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
/* Data. */
|
/* Data. */
|
||||||
|
|
||||||
|
/*
|
||||||
|
* ctl_mtx protects the following:
|
||||||
|
* - ctl_stats.*
|
||||||
|
* - opt_prof_active
|
||||||
|
* - swap_enabled
|
||||||
|
* - swap_prezeroed
|
||||||
|
*/
|
||||||
static malloc_mutex_t ctl_mtx;
|
static malloc_mutex_t ctl_mtx;
|
||||||
static bool ctl_initialized;
|
static bool ctl_initialized;
|
||||||
static uint64_t ctl_epoch;
|
static uint64_t ctl_epoch;
|
||||||
@ -680,7 +687,9 @@ ctl_refresh(void)
|
|||||||
static bool
|
static bool
|
||||||
ctl_init(void)
|
ctl_init(void)
|
||||||
{
|
{
|
||||||
|
bool ret;
|
||||||
|
|
||||||
|
malloc_mutex_lock(&ctl_mtx);
|
||||||
if (ctl_initialized == false) {
|
if (ctl_initialized == false) {
|
||||||
#ifdef JEMALLOC_STATS
|
#ifdef JEMALLOC_STATS
|
||||||
unsigned i;
|
unsigned i;
|
||||||
@ -692,8 +701,10 @@ ctl_init(void)
|
|||||||
*/
|
*/
|
||||||
ctl_stats.arenas = (ctl_arena_stats_t *)base_alloc(
|
ctl_stats.arenas = (ctl_arena_stats_t *)base_alloc(
|
||||||
(narenas + 1) * sizeof(ctl_arena_stats_t));
|
(narenas + 1) * sizeof(ctl_arena_stats_t));
|
||||||
if (ctl_stats.arenas == NULL)
|
if (ctl_stats.arenas == NULL) {
|
||||||
return (true);
|
ret = true;
|
||||||
|
goto RETURN;
|
||||||
|
}
|
||||||
memset(ctl_stats.arenas, 0, (narenas + 1) *
|
memset(ctl_stats.arenas, 0, (narenas + 1) *
|
||||||
sizeof(ctl_arena_stats_t));
|
sizeof(ctl_arena_stats_t));
|
||||||
|
|
||||||
@ -704,8 +715,10 @@ ctl_init(void)
|
|||||||
*/
|
*/
|
||||||
#ifdef JEMALLOC_STATS
|
#ifdef JEMALLOC_STATS
|
||||||
for (i = 0; i <= narenas; i++) {
|
for (i = 0; i <= narenas; i++) {
|
||||||
if (ctl_arena_init(&ctl_stats.arenas[i]))
|
if (ctl_arena_init(&ctl_stats.arenas[i])) {
|
||||||
return (true);
|
ret = true;
|
||||||
|
goto RETURN;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
ctl_stats.arenas[narenas].initialized = true;
|
ctl_stats.arenas[narenas].initialized = true;
|
||||||
@ -715,7 +728,10 @@ ctl_init(void)
|
|||||||
ctl_initialized = true;
|
ctl_initialized = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
return (false);
|
ret = false;
|
||||||
|
RETURN:
|
||||||
|
malloc_mutex_unlock(&ctl_mtx);
|
||||||
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int
|
static int
|
||||||
@ -825,8 +841,7 @@ ctl_byname(const char *name, void *oldp, size_t *oldlenp, void *newp,
|
|||||||
ctl_node_t const *nodes[CTL_MAX_DEPTH];
|
ctl_node_t const *nodes[CTL_MAX_DEPTH];
|
||||||
size_t mib[CTL_MAX_DEPTH];
|
size_t mib[CTL_MAX_DEPTH];
|
||||||
|
|
||||||
malloc_mutex_lock(&ctl_mtx);
|
if (ctl_initialized == false && ctl_init()) {
|
||||||
if (ctl_init()) {
|
|
||||||
ret = EAGAIN;
|
ret = EAGAIN;
|
||||||
goto RETURN;
|
goto RETURN;
|
||||||
}
|
}
|
||||||
@ -841,10 +856,9 @@ ctl_byname(const char *name, void *oldp, size_t *oldlenp, void *newp,
|
|||||||
ret = ENOENT;
|
ret = ENOENT;
|
||||||
goto RETURN;
|
goto RETURN;
|
||||||
}
|
}
|
||||||
ret = nodes[depth-1]->ctl(mib, depth, oldp, oldlenp, newp, newlen);
|
|
||||||
|
|
||||||
|
ret = nodes[depth-1]->ctl(mib, depth, oldp, oldlenp, newp, newlen);
|
||||||
RETURN:
|
RETURN:
|
||||||
malloc_mutex_unlock(&ctl_mtx);
|
|
||||||
return(ret);
|
return(ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -853,16 +867,13 @@ ctl_nametomib(const char *name, size_t *mibp, size_t *miblenp)
|
|||||||
{
|
{
|
||||||
int ret;
|
int ret;
|
||||||
|
|
||||||
malloc_mutex_lock(&ctl_mtx);
|
if (ctl_initialized == false && ctl_init()) {
|
||||||
if (ctl_init()) {
|
|
||||||
ret = EAGAIN;
|
ret = EAGAIN;
|
||||||
goto RETURN;
|
goto RETURN;
|
||||||
}
|
}
|
||||||
|
|
||||||
ret = ctl_lookup(name, NULL, mibp, miblenp);
|
ret = ctl_lookup(name, NULL, mibp, miblenp);
|
||||||
|
|
||||||
RETURN:
|
RETURN:
|
||||||
malloc_mutex_unlock(&ctl_mtx);
|
|
||||||
return(ret);
|
return(ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -874,8 +885,7 @@ ctl_bymib(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
|
|||||||
const ctl_node_t *node;
|
const ctl_node_t *node;
|
||||||
size_t i;
|
size_t i;
|
||||||
|
|
||||||
malloc_mutex_lock(&ctl_mtx);
|
if (ctl_initialized == false && ctl_init()) {
|
||||||
if (ctl_init()) {
|
|
||||||
ret = EAGAIN;
|
ret = EAGAIN;
|
||||||
goto RETURN;
|
goto RETURN;
|
||||||
}
|
}
|
||||||
@ -912,7 +922,6 @@ ctl_bymib(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
|
|||||||
ret = node->ctl(mib, miblen, oldp, oldlenp, newp, newlen);
|
ret = node->ctl(mib, miblen, oldp, oldlenp, newp, newlen);
|
||||||
|
|
||||||
RETURN:
|
RETURN:
|
||||||
malloc_mutex_unlock(&ctl_mtx);
|
|
||||||
return(ret);
|
return(ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -975,6 +984,29 @@ ctl_boot(void)
|
|||||||
|
|
||||||
#define CTL_RO_GEN(n, v, t) \
|
#define CTL_RO_GEN(n, v, t) \
|
||||||
static int \
|
static int \
|
||||||
|
n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
|
||||||
|
void *newp, size_t newlen) \
|
||||||
|
{ \
|
||||||
|
int ret; \
|
||||||
|
t oldval; \
|
||||||
|
\
|
||||||
|
malloc_mutex_lock(&ctl_mtx); \
|
||||||
|
READONLY(); \
|
||||||
|
oldval = v; \
|
||||||
|
READ(oldval, t); \
|
||||||
|
\
|
||||||
|
ret = 0; \
|
||||||
|
RETURN: \
|
||||||
|
malloc_mutex_unlock(&ctl_mtx); \
|
||||||
|
return (ret); \
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* ctl_mtx is not acquired, under the assumption that no pertinent data will
|
||||||
|
* mutate during the call.
|
||||||
|
*/
|
||||||
|
#define CTL_RO_NL_GEN(n, v, t) \
|
||||||
|
static int \
|
||||||
n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
|
n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
|
||||||
void *newp, size_t newlen) \
|
void *newp, size_t newlen) \
|
||||||
{ \
|
{ \
|
||||||
@ -1024,7 +1056,7 @@ RETURN: \
|
|||||||
return (ret); \
|
return (ret); \
|
||||||
}
|
}
|
||||||
|
|
||||||
CTL_RO_GEN(version, JEMALLOC_VERSION, const char *)
|
CTL_RO_NL_GEN(version, JEMALLOC_VERSION, const char *)
|
||||||
|
|
||||||
static int
|
static int
|
||||||
epoch_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
|
epoch_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
|
||||||
@ -1033,6 +1065,7 @@ epoch_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
|
|||||||
int ret;
|
int ret;
|
||||||
uint64_t newval;
|
uint64_t newval;
|
||||||
|
|
||||||
|
malloc_mutex_lock(&ctl_mtx);
|
||||||
newval = 0;
|
newval = 0;
|
||||||
WRITE(newval, uint64_t);
|
WRITE(newval, uint64_t);
|
||||||
if (newval != 0)
|
if (newval != 0)
|
||||||
@ -1041,6 +1074,7 @@ epoch_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
|
|||||||
|
|
||||||
ret = 0;
|
ret = 0;
|
||||||
RETURN:
|
RETURN:
|
||||||
|
malloc_mutex_unlock(&ctl_mtx);
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1107,8 +1141,8 @@ RETURN:
|
|||||||
}
|
}
|
||||||
|
|
||||||
#ifdef JEMALLOC_STATS
|
#ifdef JEMALLOC_STATS
|
||||||
CTL_RO_GEN(thread_allocated, ALLOCATED_GET(), uint64_t);
|
CTL_RO_NL_GEN(thread_allocated, ALLOCATED_GET(), uint64_t);
|
||||||
CTL_RO_GEN(thread_deallocated, DEALLOCATED_GET(), uint64_t);
|
CTL_RO_NL_GEN(thread_deallocated, DEALLOCATED_GET(), uint64_t);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
@ -1205,48 +1239,48 @@ CTL_RO_FALSE_GEN(config_xmalloc)
|
|||||||
|
|
||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
|
|
||||||
CTL_RO_GEN(opt_abort, opt_abort, bool)
|
CTL_RO_NL_GEN(opt_abort, opt_abort, bool)
|
||||||
CTL_RO_GEN(opt_lg_qspace_max, opt_lg_qspace_max, size_t)
|
CTL_RO_NL_GEN(opt_lg_qspace_max, opt_lg_qspace_max, size_t)
|
||||||
CTL_RO_GEN(opt_lg_cspace_max, opt_lg_cspace_max, size_t)
|
CTL_RO_NL_GEN(opt_lg_cspace_max, opt_lg_cspace_max, size_t)
|
||||||
CTL_RO_GEN(opt_lg_chunk, opt_lg_chunk, size_t)
|
CTL_RO_NL_GEN(opt_lg_chunk, opt_lg_chunk, size_t)
|
||||||
CTL_RO_GEN(opt_narenas, opt_narenas, size_t)
|
CTL_RO_NL_GEN(opt_narenas, opt_narenas, size_t)
|
||||||
CTL_RO_GEN(opt_lg_dirty_mult, opt_lg_dirty_mult, ssize_t)
|
CTL_RO_NL_GEN(opt_lg_dirty_mult, opt_lg_dirty_mult, ssize_t)
|
||||||
CTL_RO_GEN(opt_stats_print, opt_stats_print, bool)
|
CTL_RO_NL_GEN(opt_stats_print, opt_stats_print, bool)
|
||||||
#ifdef JEMALLOC_FILL
|
#ifdef JEMALLOC_FILL
|
||||||
CTL_RO_GEN(opt_junk, opt_junk, bool)
|
CTL_RO_NL_GEN(opt_junk, opt_junk, bool)
|
||||||
CTL_RO_GEN(opt_zero, opt_zero, bool)
|
CTL_RO_NL_GEN(opt_zero, opt_zero, bool)
|
||||||
#endif
|
#endif
|
||||||
#ifdef JEMALLOC_SYSV
|
#ifdef JEMALLOC_SYSV
|
||||||
CTL_RO_GEN(opt_sysv, opt_sysv, bool)
|
CTL_RO_NL_GEN(opt_sysv, opt_sysv, bool)
|
||||||
#endif
|
#endif
|
||||||
#ifdef JEMALLOC_XMALLOC
|
#ifdef JEMALLOC_XMALLOC
|
||||||
CTL_RO_GEN(opt_xmalloc, opt_xmalloc, bool)
|
CTL_RO_NL_GEN(opt_xmalloc, opt_xmalloc, bool)
|
||||||
#endif
|
#endif
|
||||||
#ifdef JEMALLOC_TCACHE
|
#ifdef JEMALLOC_TCACHE
|
||||||
CTL_RO_GEN(opt_tcache, opt_tcache, bool)
|
CTL_RO_NL_GEN(opt_tcache, opt_tcache, bool)
|
||||||
CTL_RO_GEN(opt_lg_tcache_gc_sweep, opt_lg_tcache_gc_sweep, ssize_t)
|
CTL_RO_NL_GEN(opt_lg_tcache_gc_sweep, opt_lg_tcache_gc_sweep, ssize_t)
|
||||||
#endif
|
#endif
|
||||||
#ifdef JEMALLOC_PROF
|
#ifdef JEMALLOC_PROF
|
||||||
CTL_RO_GEN(opt_prof, opt_prof, bool)
|
CTL_RO_NL_GEN(opt_prof, opt_prof, bool)
|
||||||
CTL_RO_GEN(opt_prof_prefix, opt_prof_prefix, const char *)
|
CTL_RO_NL_GEN(opt_prof_prefix, opt_prof_prefix, const char *)
|
||||||
CTL_RO_GEN(opt_prof_active, opt_prof_active, bool)
|
CTL_RO_GEN(opt_prof_active, opt_prof_active, bool) /* Mutable. */
|
||||||
CTL_RO_GEN(opt_lg_prof_bt_max, opt_lg_prof_bt_max, size_t)
|
CTL_RO_NL_GEN(opt_lg_prof_bt_max, opt_lg_prof_bt_max, size_t)
|
||||||
CTL_RO_GEN(opt_lg_prof_sample, opt_lg_prof_sample, size_t)
|
CTL_RO_NL_GEN(opt_lg_prof_sample, opt_lg_prof_sample, size_t)
|
||||||
CTL_RO_GEN(opt_lg_prof_interval, opt_lg_prof_interval, ssize_t)
|
CTL_RO_NL_GEN(opt_lg_prof_interval, opt_lg_prof_interval, ssize_t)
|
||||||
CTL_RO_GEN(opt_prof_gdump, opt_prof_gdump, bool)
|
CTL_RO_NL_GEN(opt_prof_gdump, opt_prof_gdump, bool)
|
||||||
CTL_RO_GEN(opt_prof_leak, opt_prof_leak, bool)
|
CTL_RO_NL_GEN(opt_prof_leak, opt_prof_leak, bool)
|
||||||
CTL_RO_GEN(opt_prof_accum, opt_prof_accum, bool)
|
CTL_RO_NL_GEN(opt_prof_accum, opt_prof_accum, bool)
|
||||||
CTL_RO_GEN(opt_lg_prof_tcmax, opt_lg_prof_tcmax, ssize_t)
|
CTL_RO_NL_GEN(opt_lg_prof_tcmax, opt_lg_prof_tcmax, ssize_t)
|
||||||
#endif
|
#endif
|
||||||
#ifdef JEMALLOC_SWAP
|
#ifdef JEMALLOC_SWAP
|
||||||
CTL_RO_GEN(opt_overcommit, opt_overcommit, bool)
|
CTL_RO_NL_GEN(opt_overcommit, opt_overcommit, bool)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
|
|
||||||
CTL_RO_GEN(arenas_bin_i_size, arenas[0]->bins[mib[2]].reg_size, size_t)
|
CTL_RO_NL_GEN(arenas_bin_i_size, arenas[0]->bins[mib[2]].reg_size, size_t)
|
||||||
CTL_RO_GEN(arenas_bin_i_nregs, arenas[0]->bins[mib[2]].nregs, uint32_t)
|
CTL_RO_NL_GEN(arenas_bin_i_nregs, arenas[0]->bins[mib[2]].nregs, uint32_t)
|
||||||
CTL_RO_GEN(arenas_bin_i_run_size, arenas[0]->bins[mib[2]].run_size, size_t)
|
CTL_RO_NL_GEN(arenas_bin_i_run_size, arenas[0]->bins[mib[2]].run_size, size_t)
|
||||||
const ctl_node_t *
|
const ctl_node_t *
|
||||||
arenas_bin_i_index(const size_t *mib, size_t miblen, size_t i)
|
arenas_bin_i_index(const size_t *mib, size_t miblen, size_t i)
|
||||||
{
|
{
|
||||||
@ -1256,7 +1290,7 @@ arenas_bin_i_index(const size_t *mib, size_t miblen, size_t i)
|
|||||||
return (super_arenas_bin_i_node);
|
return (super_arenas_bin_i_node);
|
||||||
}
|
}
|
||||||
|
|
||||||
CTL_RO_GEN(arenas_lrun_i_size, ((mib[2]+1) << PAGE_SHIFT), size_t)
|
CTL_RO_NL_GEN(arenas_lrun_i_size, ((mib[2]+1) << PAGE_SHIFT), size_t)
|
||||||
const ctl_node_t *
|
const ctl_node_t *
|
||||||
arenas_lrun_i_index(const size_t *mib, size_t miblen, size_t i)
|
arenas_lrun_i_index(const size_t *mib, size_t miblen, size_t i)
|
||||||
{
|
{
|
||||||
@ -1266,7 +1300,7 @@ arenas_lrun_i_index(const size_t *mib, size_t miblen, size_t i)
|
|||||||
return (super_arenas_lrun_i_node);
|
return (super_arenas_lrun_i_node);
|
||||||
}
|
}
|
||||||
|
|
||||||
CTL_RO_GEN(arenas_narenas, narenas, unsigned)
|
CTL_RO_NL_GEN(arenas_narenas, narenas, unsigned)
|
||||||
|
|
||||||
static int
|
static int
|
||||||
arenas_initialized_ctl(const size_t *mib, size_t miblen, void *oldp,
|
arenas_initialized_ctl(const size_t *mib, size_t miblen, void *oldp,
|
||||||
@ -1275,6 +1309,7 @@ arenas_initialized_ctl(const size_t *mib, size_t miblen, void *oldp,
|
|||||||
int ret;
|
int ret;
|
||||||
unsigned nread, i;
|
unsigned nread, i;
|
||||||
|
|
||||||
|
malloc_mutex_lock(&ctl_mtx);
|
||||||
READONLY();
|
READONLY();
|
||||||
if (*oldlenp != narenas * sizeof(bool)) {
|
if (*oldlenp != narenas * sizeof(bool)) {
|
||||||
ret = EINVAL;
|
ret = EINVAL;
|
||||||
@ -1289,36 +1324,37 @@ arenas_initialized_ctl(const size_t *mib, size_t miblen, void *oldp,
|
|||||||
((bool *)oldp)[i] = ctl_stats.arenas[i].initialized;
|
((bool *)oldp)[i] = ctl_stats.arenas[i].initialized;
|
||||||
|
|
||||||
RETURN:
|
RETURN:
|
||||||
|
malloc_mutex_unlock(&ctl_mtx);
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
CTL_RO_GEN(arenas_quantum, QUANTUM, size_t)
|
CTL_RO_NL_GEN(arenas_quantum, QUANTUM, size_t)
|
||||||
CTL_RO_GEN(arenas_cacheline, CACHELINE, size_t)
|
CTL_RO_NL_GEN(arenas_cacheline, CACHELINE, size_t)
|
||||||
CTL_RO_GEN(arenas_subpage, SUBPAGE, size_t)
|
CTL_RO_NL_GEN(arenas_subpage, SUBPAGE, size_t)
|
||||||
CTL_RO_GEN(arenas_pagesize, PAGE_SIZE, size_t)
|
CTL_RO_NL_GEN(arenas_pagesize, PAGE_SIZE, size_t)
|
||||||
CTL_RO_GEN(arenas_chunksize, chunksize, size_t)
|
CTL_RO_NL_GEN(arenas_chunksize, chunksize, size_t)
|
||||||
#ifdef JEMALLOC_TINY
|
#ifdef JEMALLOC_TINY
|
||||||
CTL_RO_GEN(arenas_tspace_min, (1U << LG_TINY_MIN), size_t)
|
CTL_RO_NL_GEN(arenas_tspace_min, (1U << LG_TINY_MIN), size_t)
|
||||||
CTL_RO_GEN(arenas_tspace_max, (qspace_min >> 1), size_t)
|
CTL_RO_NL_GEN(arenas_tspace_max, (qspace_min >> 1), size_t)
|
||||||
#endif
|
#endif
|
||||||
CTL_RO_GEN(arenas_qspace_min, qspace_min, size_t)
|
CTL_RO_NL_GEN(arenas_qspace_min, qspace_min, size_t)
|
||||||
CTL_RO_GEN(arenas_qspace_max, qspace_max, size_t)
|
CTL_RO_NL_GEN(arenas_qspace_max, qspace_max, size_t)
|
||||||
CTL_RO_GEN(arenas_cspace_min, cspace_min, size_t)
|
CTL_RO_NL_GEN(arenas_cspace_min, cspace_min, size_t)
|
||||||
CTL_RO_GEN(arenas_cspace_max, cspace_max, size_t)
|
CTL_RO_NL_GEN(arenas_cspace_max, cspace_max, size_t)
|
||||||
CTL_RO_GEN(arenas_sspace_min, sspace_min, size_t)
|
CTL_RO_NL_GEN(arenas_sspace_min, sspace_min, size_t)
|
||||||
CTL_RO_GEN(arenas_sspace_max, sspace_max, size_t)
|
CTL_RO_NL_GEN(arenas_sspace_max, sspace_max, size_t)
|
||||||
#ifdef JEMALLOC_TCACHE
|
#ifdef JEMALLOC_TCACHE
|
||||||
CTL_RO_GEN(arenas_tcache_max, tcache_maxclass, size_t)
|
CTL_RO_NL_GEN(arenas_tcache_max, tcache_maxclass, size_t)
|
||||||
#endif
|
#endif
|
||||||
CTL_RO_GEN(arenas_ntbins, ntbins, unsigned)
|
CTL_RO_NL_GEN(arenas_ntbins, ntbins, unsigned)
|
||||||
CTL_RO_GEN(arenas_nqbins, nqbins, unsigned)
|
CTL_RO_NL_GEN(arenas_nqbins, nqbins, unsigned)
|
||||||
CTL_RO_GEN(arenas_ncbins, ncbins, unsigned)
|
CTL_RO_NL_GEN(arenas_ncbins, ncbins, unsigned)
|
||||||
CTL_RO_GEN(arenas_nsbins, nsbins, unsigned)
|
CTL_RO_NL_GEN(arenas_nsbins, nsbins, unsigned)
|
||||||
CTL_RO_GEN(arenas_nbins, nbins, unsigned)
|
CTL_RO_NL_GEN(arenas_nbins, nbins, unsigned)
|
||||||
#ifdef JEMALLOC_TCACHE
|
#ifdef JEMALLOC_TCACHE
|
||||||
CTL_RO_GEN(arenas_nhbins, nhbins, unsigned)
|
CTL_RO_NL_GEN(arenas_nhbins, nhbins, unsigned)
|
||||||
#endif
|
#endif
|
||||||
CTL_RO_GEN(arenas_nlruns, nlclasses, size_t)
|
CTL_RO_NL_GEN(arenas_nlruns, nlclasses, size_t)
|
||||||
|
|
||||||
static int
|
static int
|
||||||
arenas_purge_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
|
arenas_purge_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
|
||||||
@ -1368,6 +1404,7 @@ prof_active_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
|
|||||||
int ret;
|
int ret;
|
||||||
bool oldval;
|
bool oldval;
|
||||||
|
|
||||||
|
malloc_mutex_lock(&ctl_mtx); /* Protect opt_prof_active. */
|
||||||
oldval = opt_prof_active;
|
oldval = opt_prof_active;
|
||||||
if (newp != NULL) {
|
if (newp != NULL) {
|
||||||
/*
|
/*
|
||||||
@ -1382,6 +1419,7 @@ prof_active_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
|
|||||||
|
|
||||||
ret = 0;
|
ret = 0;
|
||||||
RETURN:
|
RETURN:
|
||||||
|
malloc_mutex_unlock(&ctl_mtx);
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1405,7 +1443,7 @@ RETURN:
|
|||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
CTL_RO_GEN(prof_interval, prof_interval, uint64_t)
|
CTL_RO_NL_GEN(prof_interval, prof_interval, uint64_t)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
@ -1503,10 +1541,18 @@ CTL_RO_GEN(stats_arenas_i_purged, ctl_stats.arenas[mib[2]].astats.purged,
|
|||||||
const ctl_node_t *
|
const ctl_node_t *
|
||||||
stats_arenas_i_index(const size_t *mib, size_t miblen, size_t i)
|
stats_arenas_i_index(const size_t *mib, size_t miblen, size_t i)
|
||||||
{
|
{
|
||||||
|
const ctl_node_t * ret;
|
||||||
|
|
||||||
if (ctl_stats.arenas[i].initialized == false)
|
malloc_mutex_lock(&ctl_mtx);
|
||||||
return (NULL);
|
if (ctl_stats.arenas[i].initialized == false) {
|
||||||
return (super_stats_arenas_i_node);
|
ret = NULL;
|
||||||
|
goto RETURN;
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = super_stats_arenas_i_node;
|
||||||
|
RETURN:
|
||||||
|
malloc_mutex_unlock(&ctl_mtx);
|
||||||
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef JEMALLOC_STATS
|
#ifdef JEMALLOC_STATS
|
||||||
@ -1528,6 +1574,7 @@ swap_prezeroed_ctl(const size_t *mib, size_t miblen, void *oldp,
|
|||||||
{
|
{
|
||||||
int ret;
|
int ret;
|
||||||
|
|
||||||
|
malloc_mutex_lock(&ctl_mtx);
|
||||||
if (swap_enabled) {
|
if (swap_enabled) {
|
||||||
READONLY();
|
READONLY();
|
||||||
} else {
|
} else {
|
||||||
@ -1545,6 +1592,7 @@ swap_prezeroed_ctl(const size_t *mib, size_t miblen, void *oldp,
|
|||||||
|
|
||||||
ret = 0;
|
ret = 0;
|
||||||
RETURN:
|
RETURN:
|
||||||
|
malloc_mutex_unlock(&ctl_mtx);
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1556,6 +1604,7 @@ swap_fds_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
|
|||||||
{
|
{
|
||||||
int ret;
|
int ret;
|
||||||
|
|
||||||
|
malloc_mutex_lock(&ctl_mtx);
|
||||||
if (swap_enabled) {
|
if (swap_enabled) {
|
||||||
READONLY();
|
READONLY();
|
||||||
} else if (newp != NULL) {
|
} else if (newp != NULL) {
|
||||||
@ -1586,6 +1635,7 @@ swap_fds_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
|
|||||||
|
|
||||||
ret = 0;
|
ret = 0;
|
||||||
RETURN:
|
RETURN:
|
||||||
|
malloc_mutex_unlock(&ctl_mtx);
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
Loading…
Reference in New Issue
Block a user