PA: Move mapped into pa stats.
This commit is contained in:
committed by
David Goldblatt
parent
6ca918d0cf
commit
70d12ffa05
15
src/arena.c
15
src/arena.c
@@ -95,9 +95,10 @@ arena_stats_merge(tsdn_t *tsdn, arena_t *arena, unsigned *nthreads,
|
||||
|
||||
LOCKEDINT_MTX_LOCK(tsdn, arena->stats.mtx);
|
||||
|
||||
locked_inc_zu_unsynchronized(&astats->mapped, base_mapped
|
||||
+ locked_read_zu(tsdn, LOCKEDINT_MTX(arena->stats.mtx),
|
||||
&arena->stats.mapped));
|
||||
locked_inc_zu_unsynchronized(&astats->pa_shard_stats.mapped,
|
||||
base_mapped + locked_read_zu(tsdn,
|
||||
LOCKEDINT_MTX(*arena->pa_shard.stats_mtx),
|
||||
&arena->pa_shard.stats->mapped));
|
||||
locked_inc_zu_unsynchronized(&astats->retained,
|
||||
ecache_npages_get(&arena->pa_shard.ecache_retained) << LG_PAGE);
|
||||
|
||||
@@ -488,7 +489,7 @@ arena_extent_alloc_large(tsdn_t *tsdn, arena_t *arena, size_t usize,
|
||||
if (mapped_add != 0) {
|
||||
locked_inc_zu(tsdn,
|
||||
LOCKEDINT_MTX(arena->stats.mtx),
|
||||
&arena->stats.mapped, mapped_add);
|
||||
&arena->pa_shard.stats->mapped, mapped_add);
|
||||
}
|
||||
LOCKEDINT_MTX_UNLOCK(tsdn, arena->stats.mtx);
|
||||
}
|
||||
@@ -919,7 +920,7 @@ arena_decay_stashed(tsdn_t *tsdn, arena_t *arena, ehooks_t *ehooks,
|
||||
locked_inc_u64(tsdn, LOCKEDINT_MTX(arena->stats.mtx),
|
||||
&decay->stats->purged, npurged);
|
||||
locked_dec_zu(tsdn, LOCKEDINT_MTX(arena->stats.mtx),
|
||||
&arena->stats.mapped, nunmapped << LG_PAGE);
|
||||
&arena->pa_shard.stats->mapped, nunmapped << LG_PAGE);
|
||||
LOCKEDINT_MTX_UNLOCK(tsdn, arena->stats.mtx);
|
||||
}
|
||||
|
||||
@@ -1240,7 +1241,7 @@ arena_slab_alloc_hard(tsdn_t *tsdn, arena_t *arena, ehooks_t *ehooks,
|
||||
true, szind, &zero);
|
||||
|
||||
if (config_stats && slab != NULL) {
|
||||
arena_stats_mapped_add(tsdn, &arena->stats,
|
||||
pa_shard_stats_mapped_add(tsdn, &arena->pa_shard,
|
||||
bin_info->slab_size);
|
||||
}
|
||||
|
||||
@@ -2039,7 +2040,7 @@ arena_new(tsdn_t *tsdn, unsigned ind, extent_hooks_t *extent_hooks) {
|
||||
}
|
||||
|
||||
if (pa_shard_init(tsdn, &arena->pa_shard, base, ind,
|
||||
&arena->stats.pa_shard_stats)) {
|
||||
&arena->stats.pa_shard_stats, LOCKEDINT_MTX(arena->stats.mtx))) {
|
||||
goto label_error;
|
||||
}
|
||||
|
||||
|
11
src/ctl.c
11
src/ctl.c
@@ -861,8 +861,9 @@ ctl_arena_stats_sdmerge(ctl_arena_t *ctl_sdarena, ctl_arena_t *ctl_arena,
|
||||
ctl_arena_stats_t *astats = ctl_arena->astats;
|
||||
|
||||
if (!destroyed) {
|
||||
ctl_accum_locked_zu(&sdstats->astats.mapped,
|
||||
&astats->astats.mapped);
|
||||
ctl_accum_locked_zu(
|
||||
&sdstats->astats.pa_shard_stats.mapped,
|
||||
&astats->astats.pa_shard_stats.mapped);
|
||||
ctl_accum_locked_zu(&sdstats->astats.retained,
|
||||
&astats->astats.retained);
|
||||
ctl_accum_atomic_zu(&sdstats->astats.edata_avail,
|
||||
@@ -1103,7 +1104,7 @@ ctl_refresh(tsdn_t *tsdn) {
|
||||
ctl_stats->resident = atomic_load_zu(
|
||||
&ctl_sarena->astats->astats.resident, ATOMIC_RELAXED);
|
||||
ctl_stats->mapped = locked_read_atomic_zu(
|
||||
&ctl_sarena->astats->astats.mapped);
|
||||
&ctl_sarena->astats->astats.pa_shard_stats.mapped);
|
||||
ctl_stats->retained = locked_read_atomic_zu(
|
||||
&ctl_sarena->astats->astats.retained);
|
||||
|
||||
@@ -2914,8 +2915,8 @@ CTL_RO_GEN(stats_arenas_i_pactive, arenas_i(mib[2])->pactive, size_t)
|
||||
CTL_RO_GEN(stats_arenas_i_pdirty, arenas_i(mib[2])->pdirty, size_t)
|
||||
CTL_RO_GEN(stats_arenas_i_pmuzzy, arenas_i(mib[2])->pmuzzy, size_t)
|
||||
CTL_RO_CGEN(config_stats, stats_arenas_i_mapped,
|
||||
locked_read_atomic_zu(&arenas_i(mib[2])->astats->astats.mapped),
|
||||
size_t)
|
||||
locked_read_atomic_zu(&arenas_i(
|
||||
mib[2])->astats->astats.pa_shard_stats.mapped), size_t)
|
||||
CTL_RO_CGEN(config_stats, stats_arenas_i_retained,
|
||||
locked_read_atomic_zu(&arenas_i(mib[2])->astats->astats.retained),
|
||||
size_t)
|
||||
|
@@ -151,7 +151,7 @@ large_ralloc_no_move_expand(tsdn_t *tsdn, edata_t *edata, size_t usize,
|
||||
emap_remap(tsdn, &emap_global, edata, szind, false);
|
||||
|
||||
if (config_stats && new_mapping) {
|
||||
arena_stats_mapped_add(tsdn, &arena->stats, trailsize);
|
||||
pa_shard_stats_mapped_add(tsdn, &arena->pa_shard, trailsize);
|
||||
}
|
||||
|
||||
if (zero) {
|
||||
|
3
src/pa.c
3
src/pa.c
@@ -3,7 +3,7 @@
|
||||
|
||||
bool
|
||||
pa_shard_init(tsdn_t *tsdn, pa_shard_t *shard, base_t *base, unsigned ind,
|
||||
pa_shard_stats_t *stats) {
|
||||
pa_shard_stats_t *stats, malloc_mutex_t *stats_mtx) {
|
||||
/* This will change eventually, but for now it should hold. */
|
||||
assert(base_ind_get(base) == ind);
|
||||
/*
|
||||
@@ -44,6 +44,7 @@ pa_shard_init(tsdn_t *tsdn, pa_shard_t *shard, base_t *base, unsigned ind,
|
||||
|
||||
atomic_store_zu(&shard->extent_sn_next, 0, ATOMIC_RELAXED);
|
||||
|
||||
shard->stats_mtx = stats_mtx;
|
||||
shard->stats = stats;
|
||||
memset(shard->stats, 0, sizeof(*shard->stats));
|
||||
|
||||
|
Reference in New Issue
Block a user