Add sampling activation/deactivation control.

Add the E/e options to control whether the application starts with
sampling active/inactive (secondary control to F/f).  Add the
prof.active mallctl so that the application can activate/deactivate
sampling on the fly.
This commit is contained in:
Jason Evans 2010-03-31 18:43:24 -07:00
parent a02fc08ec9
commit f18c982001
5 changed files with 68 additions and 1 deletions

View File

@ -341,6 +341,16 @@ physical memory becomes scarce and the pages remain unused.
The default minimum ratio is 32:1; The default minimum ratio is 32:1;
.Ev JEMALLOC_OPTIONS=6D .Ev JEMALLOC_OPTIONS=6D
will disable dirty page purging. will disable dirty page purging.
@roff_prof@.It E
@roff_prof@Activate/deactivate profiling.
@roff_prof@This is a secondary control mechanism that makes it possible to
@roff_prof@start the application with profiling enabled (see the
@roff_prof@.Dq F
@roff_prof@option) but inactive, then toggle profiling at any time during
@roff_prof@program execution with the
@roff_prof@.Dq prof.active
@roff_prof@mallctl.
@roff_prof@This option is enabled by default.
@roff_prof@.It F @roff_prof@.It F
@roff_prof@Profile memory allocation activity, and use an @roff_prof@Profile memory allocation activity, and use an
@roff_prof@.Xr atexit 3 @roff_prof@.Xr atexit 3
@ -356,6 +366,9 @@ will disable dirty page purging.
@roff_prof@.Dq B @roff_prof@.Dq B
@roff_prof@option for backtrace depth control. @roff_prof@option for backtrace depth control.
@roff_prof@See the @roff_prof@See the
@roff_prof@.Dq E
@roff_prof@option for on-the-fly activation/deactivation.
@roff_prof@See the
@roff_prof@.Dq S @roff_prof@.Dq S
@roff_prof@option for probabilistic sampling control. @roff_prof@option for probabilistic sampling control.
@roff_prof@See the @roff_prof@See the
@ -993,6 +1006,14 @@ Total number of large size classes.
Maximum size supported by this large size class. Maximum size supported by this large size class.
.Ed .Ed
.\"----------------------------------------------------------------------------- .\"-----------------------------------------------------------------------------
@roff_prof@.It Sy "prof.active (bool) rw"
@roff_prof@.Bd -ragged -offset indent -compact
@roff_prof@Control whether sampling is currently active.
@roff_prof@See the
@roff_prof@.Dq E
@roff_prof@option for additional information.
@roff_prof@.Ed
.\"-----------------------------------------------------------------------------
@roff_prof@.It Sy "prof.dump (const char *) -w" @roff_prof@.It Sy "prof.dump (const char *) -w"
@roff_prof@.Bd -ragged -offset indent -compact @roff_prof@.Bd -ragged -offset indent -compact
@roff_prof@Dump a memory profile to the specified file, or if NULL is specified, @roff_prof@Dump a memory profile to the specified file, or if NULL is specified,

View File

@ -119,6 +119,13 @@ struct prof_ctx_s {
#ifdef JEMALLOC_H_EXTERNS #ifdef JEMALLOC_H_EXTERNS
extern bool opt_prof; extern bool opt_prof;
/*
* Even if opt_prof is true, sampling can be temporarily disabled by setting
* opt_prof_active to false. No locking is used when updating opt_prof_active,
* so there are no guarantees regarding how long it will take for all threads
* to notice state changes.
*/
extern bool opt_prof_active;
extern size_t opt_lg_prof_bt_max; /* Maximum backtrace depth. */ extern size_t opt_lg_prof_bt_max; /* Maximum backtrace depth. */
extern size_t opt_lg_prof_sample; /* Mean bytes between samples. */ extern size_t opt_lg_prof_sample; /* Mean bytes between samples. */
extern ssize_t opt_lg_prof_interval; /* lg(prof_interval). */ extern ssize_t opt_lg_prof_interval; /* lg(prof_interval). */

View File

@ -75,6 +75,7 @@ CTL_PROTO(opt_lg_tcache_gc_sweep)
#endif #endif
#ifdef JEMALLOC_PROF #ifdef JEMALLOC_PROF
CTL_PROTO(opt_prof) CTL_PROTO(opt_prof)
CTL_PROTO(opt_prof_active)
CTL_PROTO(opt_lg_prof_bt_max) CTL_PROTO(opt_lg_prof_bt_max)
CTL_PROTO(opt_lg_prof_sample) CTL_PROTO(opt_lg_prof_sample)
CTL_PROTO(opt_lg_prof_interval) CTL_PROTO(opt_lg_prof_interval)
@ -125,6 +126,7 @@ CTL_PROTO(arenas_nhbins)
#endif #endif
CTL_PROTO(arenas_nlruns) CTL_PROTO(arenas_nlruns)
#ifdef JEMALLOC_PROF #ifdef JEMALLOC_PROF
CTL_PROTO(prof_active)
CTL_PROTO(prof_dump) CTL_PROTO(prof_dump)
CTL_PROTO(prof_interval) CTL_PROTO(prof_interval)
#endif #endif
@ -246,6 +248,7 @@ static const ctl_node_t opt_node[] = {
#endif #endif
#ifdef JEMALLOC_PROF #ifdef JEMALLOC_PROF
{NAME("prof"), CTL(opt_prof)}, {NAME("prof"), CTL(opt_prof)},
{NAME("prof_active"), CTL(opt_prof_active)},
{NAME("lg_prof_bt_max"), CTL(opt_lg_prof_bt_max)}, {NAME("lg_prof_bt_max"), CTL(opt_lg_prof_bt_max)},
{NAME("lg_prof_sample"), CTL(opt_lg_prof_sample)}, {NAME("lg_prof_sample"), CTL(opt_lg_prof_sample)},
{NAME("lg_prof_interval"), CTL(opt_lg_prof_interval)}, {NAME("lg_prof_interval"), CTL(opt_lg_prof_interval)},
@ -323,6 +326,7 @@ static const ctl_node_t arenas_node[] = {
#ifdef JEMALLOC_PROF #ifdef JEMALLOC_PROF
static const ctl_node_t prof_node[] = { static const ctl_node_t prof_node[] = {
{NAME("active"), CTL(prof_active)},
{NAME("dump"), CTL(prof_dump)}, {NAME("dump"), CTL(prof_dump)},
{NAME("interval"), CTL(prof_interval)} {NAME("interval"), CTL(prof_interval)}
}; };
@ -1151,6 +1155,7 @@ CTL_RO_GEN(opt_lg_tcache_gc_sweep, opt_lg_tcache_gc_sweep, ssize_t)
#endif #endif
#ifdef JEMALLOC_PROF #ifdef JEMALLOC_PROF
CTL_RO_GEN(opt_prof, opt_prof, bool) CTL_RO_GEN(opt_prof, opt_prof, bool)
CTL_RO_GEN(opt_prof_active, opt_prof_active, bool)
CTL_RO_GEN(opt_lg_prof_bt_max, opt_lg_prof_bt_max, size_t) CTL_RO_GEN(opt_lg_prof_bt_max, opt_lg_prof_bt_max, size_t)
CTL_RO_GEN(opt_lg_prof_sample, opt_lg_prof_sample, size_t) CTL_RO_GEN(opt_lg_prof_sample, opt_lg_prof_sample, size_t)
CTL_RO_GEN(opt_lg_prof_interval, opt_lg_prof_interval, ssize_t) CTL_RO_GEN(opt_lg_prof_interval, opt_lg_prof_interval, ssize_t)
@ -1247,6 +1252,30 @@ CTL_RO_GEN(arenas_nlruns, nlclasses, size_t)
/******************************************************************************/ /******************************************************************************/
#ifdef JEMALLOC_PROF #ifdef JEMALLOC_PROF
static int
prof_active_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
void *newp, size_t newlen)
{
int ret;
bool oldval;
oldval = opt_prof_active;
if (newp != NULL) {
/*
* The memory barriers will tend to make opt_prof_active
* propagate faster on systems with weak memory ordering.
*/
mb_write();
WRITE(opt_prof_active, bool);
mb_write();
}
READ(oldval, bool);
ret = 0;
RETURN:
return (ret);
}
static int static int
prof_dump_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, prof_dump_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
void *newp, size_t newlen) void *newp, size_t newlen)

View File

@ -460,6 +460,12 @@ MALLOC_OUT:
opt_lg_dirty_mult--; opt_lg_dirty_mult--;
break; break;
#ifdef JEMALLOC_PROF #ifdef JEMALLOC_PROF
case 'e':
opt_prof_active = false;
break;
case 'E':
opt_prof_active = true;
break;
case 'f': case 'f':
opt_prof = false; opt_prof = false;
break; break;

View File

@ -18,6 +18,7 @@
/* Data. */ /* Data. */
bool opt_prof = false; bool opt_prof = false;
bool opt_prof_active = true;
size_t opt_lg_prof_bt_max = LG_PROF_BT_MAX_DEFAULT; size_t opt_lg_prof_bt_max = LG_PROF_BT_MAX_DEFAULT;
size_t opt_lg_prof_sample = LG_PROF_SAMPLE_DEFAULT; size_t opt_lg_prof_sample = LG_PROF_SAMPLE_DEFAULT;
ssize_t opt_lg_prof_interval = LG_PROF_INTERVAL_DEFAULT; ssize_t opt_lg_prof_interval = LG_PROF_INTERVAL_DEFAULT;
@ -537,7 +538,10 @@ prof_alloc_prep(size_t size)
void *vec[prof_bt_max]; void *vec[prof_bt_max];
prof_bt_t bt; prof_bt_t bt;
if (opt_lg_prof_sample == 0) { if (opt_prof_active == false) {
/* Sampling is currently inactive, so avoid sampling. */
ret = (prof_thr_cnt_t *)(uintptr_t)1U;
} else if (opt_lg_prof_sample == 0) {
/* /*
* Don't bother with sampling logic, since sampling interval is * Don't bother with sampling logic, since sampling interval is
* 1. * 1.