From 81c6027592d59383107b3a7a26caddb787ed10c7 Mon Sep 17 00:00:00 2001 From: David Goldblatt Date: Thu, 12 Mar 2020 09:36:25 -0700 Subject: [PATCH] Arena stats: Give it its own "mapped". This distinguishes it from the PA mapped stat, which is now named "pa_mapped" to avoid confusion. The (derived) arena stat includes base memory, and the PA stat is no longer partially derived. --- include/jemalloc/internal/arena_stats.h | 1 + include/jemalloc/internal/pa.h | 9 +++++---- src/arena.c | 5 ++--- src/ctl.c | 11 +++-------- src/extent.c | 2 +- src/pa.c | 10 +++++----- 6 files changed, 17 insertions(+), 21 deletions(-) diff --git a/include/jemalloc/internal/arena_stats.h b/include/jemalloc/internal/arena_stats.h index 3b3441f1..9effa61b 100644 --- a/include/jemalloc/internal/arena_stats.h +++ b/include/jemalloc/internal/arena_stats.h @@ -53,6 +53,7 @@ struct arena_stats_s { size_t base; /* Derived. */ size_t resident; /* Derived. */ size_t metadata_thp; /* Derived. */ + size_t mapped; /* Derived. */ atomic_zu_t internal; diff --git a/include/jemalloc/internal/pa.h b/include/jemalloc/internal/pa.h index 8c82823c..1bffa9ef 100644 --- a/include/jemalloc/internal/pa.h +++ b/include/jemalloc/internal/pa.h @@ -70,12 +70,13 @@ struct pa_shard_stats_s { size_t retained; /* Derived. */ /* - * Number of bytes currently mapped, excluding retained memory. + * Number of bytes currently mapped, excluding retained memory (and any + * base-allocated memory, which is tracked by the arena stats). * - * Partially derived -- we maintain our own counter, but add in the - * base's own counter at merge. + * We name this "pa_mapped" to avoid confusion with the arena_stats + * "mapped". */ - atomic_zu_t mapped; + atomic_zu_t pa_mapped; /* Number of edata_t structs allocated by base, but not being used. */ size_t edata_avail; /* Derived. */ diff --git a/src/arena.c b/src/arena.c index 10a24688..07a60510 100644 --- a/src/arena.c +++ b/src/arena.c @@ -88,10 +88,9 @@ arena_stats_merge(tsdn_t *tsdn, arena_t *arena, unsigned *nthreads, size_t base_allocated, base_resident, base_mapped, metadata_thp; base_stats_get(tsdn, arena->base, &base_allocated, &base_resident, &base_mapped, &metadata_thp); - size_t mapped = atomic_load_zu(&arena->pa_shard.stats->mapped, + size_t pa_mapped = atomic_load_zu(&arena->pa_shard.stats->pa_mapped, ATOMIC_RELAXED); - atomic_load_add_store_zu(&astats->pa_shard_stats.mapped, - base_mapped + mapped); + astats->mapped += base_mapped + pa_mapped; LOCKEDINT_MTX_LOCK(tsdn, arena->stats.mtx); diff --git a/src/ctl.c b/src/ctl.c index a3cc74ac..00fd7441 100644 --- a/src/ctl.c +++ b/src/ctl.c @@ -855,9 +855,7 @@ ctl_arena_stats_sdmerge(ctl_arena_t *ctl_sdarena, ctl_arena_t *ctl_arena, ctl_arena_stats_t *astats = ctl_arena->astats; if (!destroyed) { - ctl_accum_atomic_zu( - &sdstats->astats.pa_shard_stats.mapped, - &astats->astats.pa_shard_stats.mapped); + sdstats->astats.mapped += astats->astats.mapped; sdstats->astats.pa_shard_stats.retained += astats->astats.pa_shard_stats.retained; sdstats->astats.pa_shard_stats.edata_avail @@ -1085,9 +1083,7 @@ ctl_refresh(tsdn_t *tsdn) { ctl_stats->resident = ctl_sarena->astats->astats.resident; ctl_stats->metadata_thp = ctl_sarena->astats->astats.metadata_thp; - ctl_stats->mapped = atomic_load_zu( - &ctl_sarena->astats->astats.pa_shard_stats.mapped, - ATOMIC_RELAXED); + ctl_stats->mapped = ctl_sarena->astats->astats.mapped; ctl_stats->retained = ctl_sarena->astats->astats.pa_shard_stats.retained; @@ -2898,8 +2894,7 @@ CTL_RO_GEN(stats_arenas_i_pactive, arenas_i(mib[2])->pactive, size_t) CTL_RO_GEN(stats_arenas_i_pdirty, arenas_i(mib[2])->pdirty, size_t) CTL_RO_GEN(stats_arenas_i_pmuzzy, arenas_i(mib[2])->pmuzzy, size_t) CTL_RO_CGEN(config_stats, stats_arenas_i_mapped, - atomic_load_zu(&arenas_i(mib[2])->astats->astats.pa_shard_stats.mapped, - ATOMIC_RELAXED), size_t) + arenas_i(mib[2])->astats->astats.mapped, size_t) CTL_RO_CGEN(config_stats, stats_arenas_i_retained, arenas_i(mib[2])->astats->astats.pa_shard_stats.retained, size_t) diff --git a/src/extent.c b/src/extent.c index 62ebff52..05d1755e 100644 --- a/src/extent.c +++ b/src/extent.c @@ -967,7 +967,7 @@ extent_maximally_purge(tsdn_t *tsdn, pa_shard_t *shard, ehooks_t *ehooks, &shard->stats->decay_dirty.purged, extent_size >> LG_PAGE); LOCKEDINT_MTX_UNLOCK(tsdn, *shard->stats_mtx); - atomic_fetch_sub_zu(&shard->stats->mapped, extent_size, + atomic_fetch_sub_zu(&shard->stats->pa_mapped, extent_size, ATOMIC_RELAXED); } } diff --git a/src/pa.c b/src/pa.c index 10a4401e..1b7d374c 100644 --- a/src/pa.c +++ b/src/pa.c @@ -101,8 +101,8 @@ pa_alloc(tsdn_t *tsdn, pa_shard_t *shard, size_t size, size_t alignment, if (edata != NULL) { pa_nactive_add(shard, size >> LG_PAGE); if (config_stats && mapped_add > 0) { - atomic_fetch_add_zu(&shard->stats->mapped, mapped_add, - ATOMIC_RELAXED); + atomic_fetch_add_zu(&shard->stats->pa_mapped, + mapped_add, ATOMIC_RELAXED); } } return edata; @@ -147,7 +147,7 @@ pa_expand(tsdn_t *tsdn, pa_shard_t *shard, edata_t *edata, size_t old_size, return true; } if (config_stats && mapped_add > 0) { - atomic_fetch_add_zu(&shard->stats->mapped, mapped_add, + atomic_fetch_add_zu(&shard->stats->pa_mapped, mapped_add, ATOMIC_RELAXED); } pa_nactive_add(shard, expand_amount >> LG_PAGE); @@ -270,8 +270,8 @@ pa_decay_stashed(tsdn_t *tsdn, pa_shard_t *shard, decay_t *decay, locked_inc_u64(tsdn, LOCKEDINT_MTX(*shard->stats_mtx), &decay_stats->purged, npurged); LOCKEDINT_MTX_UNLOCK(tsdn, *shard->stats_mtx); - atomic_fetch_sub_zu(&shard->stats->mapped, nunmapped << LG_PAGE, - ATOMIC_RELAXED); + atomic_fetch_sub_zu(&shard->stats->pa_mapped, + nunmapped << LG_PAGE, ATOMIC_RELAXED); } return npurged;