Add UNUSED to avoid compiler warnings.
This commit is contained in:
parent
2a80d6f15b
commit
0fadf4a2e3
@ -28,7 +28,7 @@ arena_prof_tctx_get(tsdn_t *tsdn, const void *ptr, alloc_ctx_t *alloc_ctx) {
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
|
||||
arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, UNUSED size_t usize,
|
||||
alloc_ctx_t *alloc_ctx, prof_tctx_t *tctx) {
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
@ -47,7 +47,7 @@ arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
|
||||
}
|
||||
|
||||
static inline void
|
||||
arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, prof_tctx_t *tctx) {
|
||||
arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, UNUSED prof_tctx_t *tctx) {
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
|
||||
|
@ -95,7 +95,7 @@ struct arena_stats_s {
|
||||
};
|
||||
|
||||
static inline bool
|
||||
arena_stats_init(tsdn_t *tsdn, arena_stats_t *arena_stats) {
|
||||
arena_stats_init(UNUSED tsdn_t *tsdn, arena_stats_t *arena_stats) {
|
||||
if (config_debug) {
|
||||
for (size_t i = 0; i < sizeof(arena_stats_t); i++) {
|
||||
assert(((char *)arena_stats)[i] == 0);
|
||||
|
@ -190,7 +190,7 @@ extent_addr_set(extent_t *extent, void *addr) {
|
||||
}
|
||||
|
||||
static inline void
|
||||
extent_addr_randomize(tsdn_t *tsdn, extent_t *extent, size_t alignment) {
|
||||
extent_addr_randomize(UNUSED tsdn_t *tsdn, extent_t *extent, size_t alignment) {
|
||||
assert(extent_base_get(extent) == extent_addr_get(extent));
|
||||
|
||||
if (alignment < PAGE) {
|
||||
|
@ -208,8 +208,8 @@ rtree_leaf_elm_bits_slab_get(uintptr_t bits) {
|
||||
# endif
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE extent_t *
|
||||
rtree_leaf_elm_extent_read(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm,
|
||||
bool dependent) {
|
||||
rtree_leaf_elm_extent_read(UNUSED tsdn_t *tsdn, UNUSED rtree_t *rtree,
|
||||
rtree_leaf_elm_t *elm, bool dependent) {
|
||||
#ifdef RTREE_LEAF_COMPACT
|
||||
uintptr_t bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm, dependent);
|
||||
return rtree_leaf_elm_bits_extent_get(bits);
|
||||
@ -221,8 +221,8 @@ rtree_leaf_elm_extent_read(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm,
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE szind_t
|
||||
rtree_leaf_elm_szind_read(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm,
|
||||
bool dependent) {
|
||||
rtree_leaf_elm_szind_read(UNUSED tsdn_t *tsdn, UNUSED rtree_t *rtree,
|
||||
rtree_leaf_elm_t *elm, bool dependent) {
|
||||
#ifdef RTREE_LEAF_COMPACT
|
||||
uintptr_t bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm, dependent);
|
||||
return rtree_leaf_elm_bits_szind_get(bits);
|
||||
@ -233,8 +233,8 @@ rtree_leaf_elm_szind_read(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm,
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE bool
|
||||
rtree_leaf_elm_slab_read(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm,
|
||||
bool dependent) {
|
||||
rtree_leaf_elm_slab_read(UNUSED tsdn_t *tsdn, UNUSED rtree_t *rtree,
|
||||
rtree_leaf_elm_t *elm, bool dependent) {
|
||||
#ifdef RTREE_LEAF_COMPACT
|
||||
uintptr_t bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm, dependent);
|
||||
return rtree_leaf_elm_bits_slab_get(bits);
|
||||
@ -245,8 +245,8 @@ rtree_leaf_elm_slab_read(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm,
|
||||
}
|
||||
|
||||
static inline void
|
||||
rtree_leaf_elm_extent_write(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm,
|
||||
extent_t *extent) {
|
||||
rtree_leaf_elm_extent_write(UNUSED tsdn_t *tsdn, UNUSED rtree_t *rtree,
|
||||
rtree_leaf_elm_t *elm, extent_t *extent) {
|
||||
#ifdef RTREE_LEAF_COMPACT
|
||||
uintptr_t old_bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm, true);
|
||||
uintptr_t bits = ((uintptr_t)rtree_leaf_elm_bits_szind_get(old_bits) <<
|
||||
@ -259,8 +259,8 @@ rtree_leaf_elm_extent_write(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm,
|
||||
}
|
||||
|
||||
static inline void
|
||||
rtree_leaf_elm_szind_write(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm,
|
||||
szind_t szind) {
|
||||
rtree_leaf_elm_szind_write(UNUSED tsdn_t *tsdn, UNUSED rtree_t *rtree,
|
||||
rtree_leaf_elm_t *elm, szind_t szind) {
|
||||
assert(szind <= NSIZES);
|
||||
|
||||
#ifdef RTREE_LEAF_COMPACT
|
||||
@ -277,8 +277,8 @@ rtree_leaf_elm_szind_write(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm,
|
||||
}
|
||||
|
||||
static inline void
|
||||
rtree_leaf_elm_slab_write(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm,
|
||||
bool slab) {
|
||||
rtree_leaf_elm_slab_write(UNUSED tsdn_t *tsdn, UNUSED rtree_t *rtree,
|
||||
rtree_leaf_elm_t *elm, bool slab) {
|
||||
#ifdef RTREE_LEAF_COMPACT
|
||||
uintptr_t old_bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm,
|
||||
true);
|
||||
|
@ -39,8 +39,8 @@ tcache_event(tsd_t *tsd, tcache_t *tcache) {
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
|
||||
szind_t binind, bool zero, bool slow_path) {
|
||||
tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache,
|
||||
UNUSED size_t size, szind_t binind, bool zero, bool slow_path) {
|
||||
void *ret;
|
||||
cache_bin_t *bin;
|
||||
bool tcache_success;
|
||||
|
@ -39,7 +39,7 @@ tsd_get_allocates(void) {
|
||||
|
||||
/* Get/set. */
|
||||
JEMALLOC_ALWAYS_INLINE tsd_t *
|
||||
tsd_get(bool init) {
|
||||
tsd_get(UNUSED bool init) {
|
||||
assert(tsd_booted);
|
||||
return &tsd_tls;
|
||||
}
|
||||
|
33
src/arena.c
33
src/arena.c
@ -61,7 +61,7 @@ static void arena_bin_lower_slab(tsdn_t *tsdn, arena_t *arena, extent_t *slab,
|
||||
/******************************************************************************/
|
||||
|
||||
void
|
||||
arena_basic_stats_merge(tsdn_t *tsdn, arena_t *arena, unsigned *nthreads,
|
||||
arena_basic_stats_merge(UNUSED tsdn_t *tsdn, arena_t *arena, unsigned *nthreads,
|
||||
const char **dss, ssize_t *dirty_decay_ms, ssize_t *muzzy_decay_ms,
|
||||
size_t *nactive, size_t *ndirty, size_t *nmuzzy) {
|
||||
*nthreads += arena_nthreads_get(arena, false);
|
||||
@ -221,7 +221,7 @@ arena_extents_dirty_dalloc(tsdn_t *tsdn, arena_t *arena,
|
||||
}
|
||||
|
||||
static void *
|
||||
arena_slab_reg_alloc(tsdn_t *tsdn, extent_t *slab, const bin_info_t *bin_info) {
|
||||
arena_slab_reg_alloc(extent_t *slab, const bin_info_t *bin_info) {
|
||||
void *ret;
|
||||
arena_slab_data_t *slab_data = extent_slab_data_get(slab);
|
||||
size_t regind;
|
||||
@ -261,8 +261,7 @@ arena_slab_regind(extent_t *slab, szind_t binind, const void *ptr) {
|
||||
}
|
||||
|
||||
static void
|
||||
arena_slab_reg_dalloc(tsdn_t *tsdn, extent_t *slab,
|
||||
arena_slab_data_t *slab_data, void *ptr) {
|
||||
arena_slab_reg_dalloc(extent_t *slab, arena_slab_data_t *slab_data, void *ptr) {
|
||||
szind_t binind = extent_szind_get(slab);
|
||||
const bin_info_t *bin_info = &bin_infos[binind];
|
||||
size_t regind = arena_slab_regind(slab, binind, ptr);
|
||||
@ -561,7 +560,7 @@ arena_decay_epoch_advance(tsdn_t *tsdn, arena_t *arena, arena_decay_t *decay,
|
||||
}
|
||||
|
||||
static void
|
||||
arena_decay_reinit(arena_decay_t *decay, extents_t *extents, ssize_t decay_ms) {
|
||||
arena_decay_reinit(arena_decay_t *decay, ssize_t decay_ms) {
|
||||
arena_decay_ms_write(decay, decay_ms);
|
||||
if (decay_ms > 0) {
|
||||
nstime_init(&decay->interval, (uint64_t)decay_ms *
|
||||
@ -578,7 +577,7 @@ arena_decay_reinit(arena_decay_t *decay, extents_t *extents, ssize_t decay_ms) {
|
||||
}
|
||||
|
||||
static bool
|
||||
arena_decay_init(arena_decay_t *decay, extents_t *extents, ssize_t decay_ms,
|
||||
arena_decay_init(arena_decay_t *decay, ssize_t decay_ms,
|
||||
arena_stats_decay_t *stats) {
|
||||
if (config_debug) {
|
||||
for (size_t i = 0; i < sizeof(arena_decay_t); i++) {
|
||||
@ -591,7 +590,7 @@ arena_decay_init(arena_decay_t *decay, extents_t *extents, ssize_t decay_ms,
|
||||
return true;
|
||||
}
|
||||
decay->purging = false;
|
||||
arena_decay_reinit(decay, extents, decay_ms);
|
||||
arena_decay_reinit(decay, decay_ms);
|
||||
/* Memory is zeroed, so there is no need to clear stats. */
|
||||
if (config_stats) {
|
||||
decay->stats = stats;
|
||||
@ -700,7 +699,7 @@ arena_decay_ms_set(tsdn_t *tsdn, arena_t *arena, arena_decay_t *decay,
|
||||
* infrequent, either between the {-1, 0, >0} states, or a one-time
|
||||
* arbitrary change during initial arena configuration.
|
||||
*/
|
||||
arena_decay_reinit(decay, extents, decay_ms);
|
||||
arena_decay_reinit(decay, decay_ms);
|
||||
arena_maybe_decay(tsdn, arena, decay, extents, false);
|
||||
malloc_mutex_unlock(tsdn, &decay->mtx);
|
||||
|
||||
@ -1210,7 +1209,7 @@ arena_bin_malloc_hard(tsdn_t *tsdn, arena_t *arena, bin_t *bin,
|
||||
* bin lock in arena_bin_nonfull_slab_get().
|
||||
*/
|
||||
if (extent_nfree_get(bin->slabcur) > 0) {
|
||||
void *ret = arena_slab_reg_alloc(tsdn, bin->slabcur,
|
||||
void *ret = arena_slab_reg_alloc(bin->slabcur,
|
||||
bin_info);
|
||||
if (slab != NULL) {
|
||||
/*
|
||||
@ -1244,7 +1243,7 @@ arena_bin_malloc_hard(tsdn_t *tsdn, arena_t *arena, bin_t *bin,
|
||||
|
||||
assert(extent_nfree_get(bin->slabcur) > 0);
|
||||
|
||||
return arena_slab_reg_alloc(tsdn, slab, bin_info);
|
||||
return arena_slab_reg_alloc(slab, bin_info);
|
||||
}
|
||||
|
||||
void
|
||||
@ -1266,8 +1265,7 @@ arena_tcache_fill_small(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache,
|
||||
void *ptr;
|
||||
if ((slab = bin->slabcur) != NULL && extent_nfree_get(slab) >
|
||||
0) {
|
||||
ptr = arena_slab_reg_alloc(tsdn, slab,
|
||||
&bin_infos[binind]);
|
||||
ptr = arena_slab_reg_alloc(slab, &bin_infos[binind]);
|
||||
} else {
|
||||
ptr = arena_bin_malloc_hard(tsdn, arena, bin, binind);
|
||||
}
|
||||
@ -1328,7 +1326,7 @@ arena_malloc_small(tsdn_t *tsdn, arena_t *arena, szind_t binind, bool zero) {
|
||||
|
||||
malloc_mutex_lock(tsdn, &bin->lock);
|
||||
if ((slab = bin->slabcur) != NULL && extent_nfree_get(slab) > 0) {
|
||||
ret = arena_slab_reg_alloc(tsdn, slab, &bin_infos[binind]);
|
||||
ret = arena_slab_reg_alloc(slab, &bin_infos[binind]);
|
||||
} else {
|
||||
ret = arena_bin_malloc_hard(tsdn, arena, bin, binind);
|
||||
}
|
||||
@ -1501,7 +1499,8 @@ arena_dalloc_bin_slab(tsdn_t *tsdn, arena_t *arena, extent_t *slab,
|
||||
}
|
||||
|
||||
static void
|
||||
arena_bin_lower_slab(tsdn_t *tsdn, arena_t *arena, extent_t *slab, bin_t *bin) {
|
||||
arena_bin_lower_slab(UNUSED tsdn_t *tsdn, arena_t *arena, extent_t *slab,
|
||||
bin_t *bin) {
|
||||
assert(extent_nfree_get(slab) > 0);
|
||||
|
||||
/*
|
||||
@ -1538,7 +1537,7 @@ arena_dalloc_bin_locked_impl(tsdn_t *tsdn, arena_t *arena, extent_t *slab,
|
||||
arena_dalloc_junk_small(ptr, bin_info);
|
||||
}
|
||||
|
||||
arena_slab_reg_dalloc(tsdn, slab, slab_data, ptr);
|
||||
arena_slab_reg_dalloc(slab, slab_data, ptr);
|
||||
unsigned nfree = extent_nfree_get(slab);
|
||||
if (nfree == bin_info->nregs) {
|
||||
arena_dissociate_bin_slab(arena, slab, bin);
|
||||
@ -1856,11 +1855,11 @@ arena_new(tsdn_t *tsdn, unsigned ind, extent_hooks_t *extent_hooks) {
|
||||
goto label_error;
|
||||
}
|
||||
|
||||
if (arena_decay_init(&arena->decay_dirty, &arena->extents_dirty,
|
||||
if (arena_decay_init(&arena->decay_dirty,
|
||||
arena_dirty_decay_ms_default_get(), &arena->stats.decay_dirty)) {
|
||||
goto label_error;
|
||||
}
|
||||
if (arena_decay_init(&arena->decay_muzzy, &arena->extents_muzzy,
|
||||
if (arena_decay_init(&arena->decay_muzzy,
|
||||
arena_muzzy_decay_ms_default_get(), &arena->stats.decay_muzzy)) {
|
||||
goto label_error;
|
||||
}
|
||||
|
14
src/base.c
14
src/base.c
@ -195,8 +195,8 @@ base_extent_bump_alloc_helper(extent_t *extent, size_t *gap_size, size_t size,
|
||||
}
|
||||
|
||||
static void
|
||||
base_extent_bump_alloc_post(tsdn_t *tsdn, base_t *base, extent_t *extent,
|
||||
size_t gap_size, void *addr, size_t size) {
|
||||
base_extent_bump_alloc_post(base_t *base, extent_t *extent, size_t gap_size,
|
||||
void *addr, size_t size) {
|
||||
if (extent_bsize_get(extent) > 0) {
|
||||
/*
|
||||
* Compute the index for the largest size class that does not
|
||||
@ -229,13 +229,13 @@ base_extent_bump_alloc_post(tsdn_t *tsdn, base_t *base, extent_t *extent,
|
||||
}
|
||||
|
||||
static void *
|
||||
base_extent_bump_alloc(tsdn_t *tsdn, base_t *base, extent_t *extent,
|
||||
size_t size, size_t alignment) {
|
||||
base_extent_bump_alloc(base_t *base, extent_t *extent, size_t size,
|
||||
size_t alignment) {
|
||||
void *ret;
|
||||
size_t gap_size;
|
||||
|
||||
ret = base_extent_bump_alloc_helper(extent, &gap_size, size, alignment);
|
||||
base_extent_bump_alloc_post(tsdn, base, extent, gap_size, ret, size);
|
||||
base_extent_bump_alloc_post(base, extent, gap_size, ret, size);
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -386,7 +386,7 @@ base_new(tsdn_t *tsdn, unsigned ind, extent_hooks_t *extent_hooks) {
|
||||
assert(base->resident <= base->mapped);
|
||||
assert(base->n_thp << LG_HUGEPAGE <= base->mapped);
|
||||
}
|
||||
base_extent_bump_alloc_post(tsdn, base, &block->extent, gap_size, base,
|
||||
base_extent_bump_alloc_post(base, &block->extent, gap_size, base,
|
||||
base_size);
|
||||
|
||||
return base;
|
||||
@ -443,7 +443,7 @@ base_alloc_impl(tsdn_t *tsdn, base_t *base, size_t size, size_t alignment,
|
||||
goto label_return;
|
||||
}
|
||||
|
||||
ret = base_extent_bump_alloc(tsdn, base, extent, usize, alignment);
|
||||
ret = base_extent_bump_alloc(base, extent, usize, alignment);
|
||||
if (esn != NULL) {
|
||||
*esn = extent_sn_get(extent);
|
||||
}
|
||||
|
@ -2893,7 +2893,7 @@ label_not_resized:
|
||||
|
||||
JEMALLOC_EXPORT size_t JEMALLOC_NOTHROW
|
||||
JEMALLOC_ATTR(pure)
|
||||
je_sallocx(const void *ptr, int flags) {
|
||||
je_sallocx(const void *ptr, UNUSED int flags) {
|
||||
size_t usize;
|
||||
tsdn_t *tsdn;
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user