Remove obsolete stats.arenas.<i>.metadata.mapped mallctl.

Rename stats.arenas.<i>.metadata.allocated mallctl to
stats.arenas.<i>.metadata .
This commit is contained in:
Jason Evans 2016-06-01 13:40:48 -07:00
parent 03eea4fb8b
commit 751f2c332d
8 changed files with 34 additions and 73 deletions

View File

@ -2059,10 +2059,8 @@ typedef struct {
</term>
<listitem><para>Total number of bytes dedicated to metadata, which
comprise base allocations used for bootstrap-sensitive internal
allocator data structures, arena chunk headers (see <link
linkend="stats.arenas.i.metadata.mapped"><mallctl>stats.arenas.&lt;i&gt;.metadata.mapped</mallctl></link>),
and internal allocations (see <link
linkend="stats.arenas.i.metadata.allocated"><mallctl>stats.arenas.&lt;i&gt;.metadata.allocated</mallctl></link>).</para></listitem>
allocator data structures and internal allocations (see <link
linkend="stats.arenas.i.metadata"><mallctl>stats.arenas.&lt;i&gt;.metadata</mallctl></link>).</para></listitem>
</varlistentry>
<varlistentry id="stats.resident">
@ -2210,20 +2208,9 @@ typedef struct {
details.</para></listitem>
</varlistentry>
<varlistentry id="stats.arenas.i.metadata.mapped">
<varlistentry id="stats.arenas.i.metadata">
<term>
<mallctl>stats.arenas.&lt;i&gt;.metadata.mapped</mallctl>
(<type>size_t</type>)
<literal>r-</literal>
[<option>--enable-stats</option>]
</term>
<listitem><para>Number of mapped bytes in arena chunk headers, which
track the states of the non-metadata pages.</para></listitem>
</varlistentry>
<varlistentry id="stats.arenas.i.metadata.allocated">
<term>
<mallctl>stats.arenas.&lt;i&gt;.metadata.allocated</mallctl>
<mallctl>stats.arenas.&lt;i&gt;.metadata</mallctl>
(<type>size_t</type>)
<literal>r-</literal>
[<option>--enable-stats</option>]
@ -2232,9 +2219,7 @@ typedef struct {
Internal allocations differ from application-originated allocations in
that they are for internal use, and that they are omitted from heap
profiles. This statistic is reported separately from <link
linkend="stats.metadata"><mallctl>stats.metadata</mallctl></link> and
<link
linkend="stats.arenas.i.metadata.mapped"><mallctl>stats.arenas.&lt;i&gt;.metadata.mapped</mallctl></link>
linkend="stats.metadata"><mallctl>stats.metadata</mallctl></link>
because it overlaps with e.g. the <link
linkend="stats.allocated"><mallctl>stats.allocated</mallctl></link> and
<link linkend="stats.active"><mallctl>stats.active</mallctl></link>

View File

@ -359,9 +359,9 @@ void arena_postfork_child(tsdn_t *tsdn, arena_t *arena);
#ifdef JEMALLOC_H_INLINES
#ifndef JEMALLOC_ENABLE_INLINE
void arena_metadata_allocated_add(arena_t *arena, size_t size);
void arena_metadata_allocated_sub(arena_t *arena, size_t size);
size_t arena_metadata_allocated_get(arena_t *arena);
void arena_metadata_add(arena_t *arena, size_t size);
void arena_metadata_sub(arena_t *arena, size_t size);
size_t arena_metadata_get(arena_t *arena);
bool arena_prof_accum_impl(arena_t *arena, uint64_t accumbytes);
bool arena_prof_accum_locked(arena_t *arena, uint64_t accumbytes);
bool arena_prof_accum(tsdn_t *tsdn, arena_t *arena, uint64_t accumbytes);
@ -387,24 +387,24 @@ void arena_sdalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_))
# ifdef JEMALLOC_ARENA_INLINE_A
JEMALLOC_INLINE void
arena_metadata_allocated_add(arena_t *arena, size_t size)
arena_metadata_add(arena_t *arena, size_t size)
{
atomic_add_z(&arena->stats.metadata_allocated, size);
atomic_add_z(&arena->stats.metadata, size);
}
JEMALLOC_INLINE void
arena_metadata_allocated_sub(arena_t *arena, size_t size)
arena_metadata_sub(arena_t *arena, size_t size)
{
atomic_sub_z(&arena->stats.metadata_allocated, size);
atomic_sub_z(&arena->stats.metadata, size);
}
JEMALLOC_INLINE size_t
arena_metadata_allocated_get(arena_t *arena)
arena_metadata_get(arena_t *arena)
{
return (atomic_read_z(&arena->stats.metadata_allocated));
return (atomic_read_z(&arena->stats.metadata));
}
JEMALLOC_INLINE bool

View File

@ -1006,7 +1006,7 @@ iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero, tcache_t *tcache,
ret = arena_malloc(tsdn, arena, size, ind, zero, tcache, slow_path);
if (config_stats && is_metadata && likely(ret != NULL)) {
arena_metadata_allocated_add(iaalloc(tsdn, ret), isalloc(tsdn,
arena_metadata_add(iaalloc(tsdn, ret), isalloc(tsdn,
iealloc(tsdn, ret), ret));
}
return (ret);
@ -1034,7 +1034,7 @@ ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
ret = arena_palloc(tsdn, arena, usize, alignment, zero, tcache);
assert(ALIGNMENT_ADDR2BASE(ret, alignment) == ret);
if (config_stats && is_metadata && likely(ret != NULL)) {
arena_metadata_allocated_add(iaalloc(tsdn, ret), isalloc(tsdn,
arena_metadata_add(iaalloc(tsdn, ret), isalloc(tsdn,
iealloc(tsdn, ret), ret));
}
return (ret);
@ -1088,8 +1088,8 @@ idalloctm(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
assert(!is_metadata || tcache == NULL);
assert(!is_metadata || iaalloc(tsdn, ptr)->ind < narenas_auto);
if (config_stats && is_metadata) {
arena_metadata_allocated_sub(iaalloc(tsdn, ptr), isalloc(tsdn,
extent, ptr));
arena_metadata_sub(iaalloc(tsdn, ptr), isalloc(tsdn, extent,
ptr));
}
arena_dalloc(tsdn, extent, ptr, tcache, slow_path);

View File

@ -41,9 +41,9 @@ arena_lg_dirty_mult_set
arena_malloc
arena_malloc_hard
arena_maybe_purge
arena_metadata_allocated_add
arena_metadata_allocated_get
arena_metadata_allocated_sub
arena_metadata_add
arena_metadata_get
arena_metadata_sub
arena_migrate
arena_new
arena_nthreads_dec

View File

@ -101,12 +101,8 @@ struct arena_stats_s {
uint64_t nmadvise;
uint64_t purged;
/*
* Number of bytes currently mapped purely for metadata purposes, and
* number of bytes currently allocated for internal metadata.
*/
size_t metadata_mapped;
size_t metadata_allocated; /* Protected via atomic_*_z(). */
/* Number of bytes currently allocated for internal metadata. */
size_t metadata; /* Protected via atomic_*_z(). */
size_t allocated_large;
uint64_t nmalloc_large;

View File

@ -1686,8 +1686,7 @@ arena_stats_merge(tsdn_t *tsdn, arena_t *arena, unsigned *nthreads,
astats->npurge += arena->stats.npurge;
astats->nmadvise += arena->stats.nmadvise;
astats->purged += arena->stats.purged;
astats->metadata_mapped += arena->stats.metadata_mapped;
astats->metadata_allocated += arena_metadata_allocated_get(arena);
astats->metadata += arena_metadata_get(arena);
astats->allocated_large += arena->stats.allocated_large;
astats->nmalloc_large += arena->stats.nmalloc_large;
astats->ndalloc_large += arena->stats.ndalloc_large;

View File

@ -180,8 +180,7 @@ CTL_PROTO(stats_arenas_i_retained)
CTL_PROTO(stats_arenas_i_npurge)
CTL_PROTO(stats_arenas_i_nmadvise)
CTL_PROTO(stats_arenas_i_purged)
CTL_PROTO(stats_arenas_i_metadata_mapped)
CTL_PROTO(stats_arenas_i_metadata_allocated)
CTL_PROTO(stats_arenas_i_metadata)
INDEX_PROTO(stats_arenas_i)
CTL_PROTO(stats_cactive)
CTL_PROTO(stats_allocated)
@ -347,11 +346,6 @@ static const ctl_named_node_t prof_node[] = {
{NAME("lg_sample"), CTL(lg_prof_sample)}
};
static const ctl_named_node_t stats_arenas_i_metadata_node[] = {
{NAME("mapped"), CTL(stats_arenas_i_metadata_mapped)},
{NAME("allocated"), CTL(stats_arenas_i_metadata_allocated)}
};
static const ctl_named_node_t stats_arenas_i_small_node[] = {
{NAME("allocated"), CTL(stats_arenas_i_small_allocated)},
{NAME("nmalloc"), CTL(stats_arenas_i_small_nmalloc)},
@ -411,7 +405,7 @@ static const ctl_named_node_t stats_arenas_i_node[] = {
{NAME("npurge"), CTL(stats_arenas_i_npurge)},
{NAME("nmadvise"), CTL(stats_arenas_i_nmadvise)},
{NAME("purged"), CTL(stats_arenas_i_purged)},
{NAME("metadata"), CHILD(named, stats_arenas_i_metadata)},
{NAME("metadata"), CTL(stats_arenas_i_metadata)},
{NAME("small"), CHILD(named, stats_arenas_i_small)},
{NAME("large"), CHILD(named, stats_arenas_i_large)},
{NAME("bins"), CHILD(indexed, stats_arenas_i_bins)},
@ -522,10 +516,7 @@ ctl_arena_stats_smerge(ctl_arena_stats_t *sstats, ctl_arena_stats_t *astats)
sstats->astats.nmadvise += astats->astats.nmadvise;
sstats->astats.purged += astats->astats.purged;
sstats->astats.metadata_mapped +=
astats->astats.metadata_mapped;
sstats->astats.metadata_allocated +=
astats->astats.metadata_allocated;
sstats->astats.metadata += astats->astats.metadata;
sstats->allocated_small += astats->allocated_small;
sstats->nmalloc_small += astats->nmalloc_small;
@ -649,11 +640,8 @@ ctl_refresh(tsdn_t *tsdn)
ctl_stats.active =
(ctl_stats.arenas[ctl_stats.narenas].pactive << LG_PAGE);
ctl_stats.metadata = base_allocated +
ctl_stats.arenas[ctl_stats.narenas].astats.metadata_mapped +
ctl_stats.arenas[ctl_stats.narenas].astats
.metadata_allocated;
ctl_stats.arenas[ctl_stats.narenas].astats.metadata;
ctl_stats.resident = base_resident +
ctl_stats.arenas[ctl_stats.narenas].astats.metadata_mapped +
((ctl_stats.arenas[ctl_stats.narenas].pactive +
ctl_stats.arenas[ctl_stats.narenas].pdirty) << LG_PAGE);
ctl_stats.mapped = base_mapped +
@ -2001,10 +1989,8 @@ CTL_RO_CGEN(config_stats, stats_arenas_i_nmadvise,
ctl_stats.arenas[mib[2]].astats.nmadvise, uint64_t)
CTL_RO_CGEN(config_stats, stats_arenas_i_purged,
ctl_stats.arenas[mib[2]].astats.purged, uint64_t)
CTL_RO_CGEN(config_stats, stats_arenas_i_metadata_mapped,
ctl_stats.arenas[mib[2]].astats.metadata_mapped, size_t)
CTL_RO_CGEN(config_stats, stats_arenas_i_metadata_allocated,
ctl_stats.arenas[mib[2]].astats.metadata_allocated, size_t)
CTL_RO_CGEN(config_stats, stats_arenas_i_metadata,
ctl_stats.arenas[mib[2]].astats.metadata, size_t)
CTL_RO_CGEN(config_stats, stats_arenas_i_small_allocated,
ctl_stats.arenas[mib[2]].allocated_small, size_t)

View File

@ -211,8 +211,7 @@ stats_arena_print(void (*write_cb)(void *, const char *), void *cbopaque,
unsigned nthreads;
const char *dss;
ssize_t lg_dirty_mult, decay_time;
size_t page, pactive, pdirty, mapped, retained;
size_t metadata_mapped, metadata_allocated;
size_t page, pactive, pdirty, mapped, retained, metadata;
uint64_t npurge, nmadvise, purged;
size_t small_allocated;
uint64_t small_nmalloc, small_ndalloc, small_nrequests;
@ -291,13 +290,9 @@ stats_arena_print(void (*write_cb)(void *, const char *), void *cbopaque,
CTL_M2_GET("stats.arenas.0.retained", i, &retained, size_t);
malloc_cprintf(write_cb, cbopaque,
"retained: %12zu\n", retained);
CTL_M2_GET("stats.arenas.0.metadata.mapped", i, &metadata_mapped,
size_t);
CTL_M2_GET("stats.arenas.0.metadata.allocated", i, &metadata_allocated,
size_t);
malloc_cprintf(write_cb, cbopaque,
"metadata: mapped: %zu, allocated: %zu\n",
metadata_mapped, metadata_allocated);
CTL_M2_GET("stats.arenas.0.metadata", i, &metadata, size_t);
malloc_cprintf(write_cb, cbopaque, "metadata: %12zu\n",
metadata);
if (bins)
stats_arena_bins_print(write_cb, cbopaque, i);