Add UNUSED to avoid compiler warnings.

This commit is contained in:
Qi Wang 2018-04-16 12:08:27 -07:00 committed by Qi Wang
parent 2a80d6f15b
commit 0fadf4a2e3
9 changed files with 43 additions and 44 deletions

View File

@ -28,7 +28,7 @@ arena_prof_tctx_get(tsdn_t *tsdn, const void *ptr, alloc_ctx_t *alloc_ctx) {
} }
JEMALLOC_ALWAYS_INLINE void JEMALLOC_ALWAYS_INLINE void
arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize, arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, UNUSED size_t usize,
alloc_ctx_t *alloc_ctx, prof_tctx_t *tctx) { alloc_ctx_t *alloc_ctx, prof_tctx_t *tctx) {
cassert(config_prof); cassert(config_prof);
assert(ptr != NULL); assert(ptr != NULL);
@ -47,7 +47,7 @@ arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
} }
static inline void static inline void
arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, prof_tctx_t *tctx) { arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, UNUSED prof_tctx_t *tctx) {
cassert(config_prof); cassert(config_prof);
assert(ptr != NULL); assert(ptr != NULL);

View File

@ -95,7 +95,7 @@ struct arena_stats_s {
}; };
static inline bool static inline bool
arena_stats_init(tsdn_t *tsdn, arena_stats_t *arena_stats) { arena_stats_init(UNUSED tsdn_t *tsdn, arena_stats_t *arena_stats) {
if (config_debug) { if (config_debug) {
for (size_t i = 0; i < sizeof(arena_stats_t); i++) { for (size_t i = 0; i < sizeof(arena_stats_t); i++) {
assert(((char *)arena_stats)[i] == 0); assert(((char *)arena_stats)[i] == 0);

View File

@ -190,7 +190,7 @@ extent_addr_set(extent_t *extent, void *addr) {
} }
static inline void static inline void
extent_addr_randomize(tsdn_t *tsdn, extent_t *extent, size_t alignment) { extent_addr_randomize(UNUSED tsdn_t *tsdn, extent_t *extent, size_t alignment) {
assert(extent_base_get(extent) == extent_addr_get(extent)); assert(extent_base_get(extent) == extent_addr_get(extent));
if (alignment < PAGE) { if (alignment < PAGE) {

View File

@ -208,8 +208,8 @@ rtree_leaf_elm_bits_slab_get(uintptr_t bits) {
# endif # endif
JEMALLOC_ALWAYS_INLINE extent_t * JEMALLOC_ALWAYS_INLINE extent_t *
rtree_leaf_elm_extent_read(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm, rtree_leaf_elm_extent_read(UNUSED tsdn_t *tsdn, UNUSED rtree_t *rtree,
bool dependent) { rtree_leaf_elm_t *elm, bool dependent) {
#ifdef RTREE_LEAF_COMPACT #ifdef RTREE_LEAF_COMPACT
uintptr_t bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm, dependent); uintptr_t bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm, dependent);
return rtree_leaf_elm_bits_extent_get(bits); return rtree_leaf_elm_bits_extent_get(bits);
@ -221,8 +221,8 @@ rtree_leaf_elm_extent_read(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm,
} }
JEMALLOC_ALWAYS_INLINE szind_t JEMALLOC_ALWAYS_INLINE szind_t
rtree_leaf_elm_szind_read(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm, rtree_leaf_elm_szind_read(UNUSED tsdn_t *tsdn, UNUSED rtree_t *rtree,
bool dependent) { rtree_leaf_elm_t *elm, bool dependent) {
#ifdef RTREE_LEAF_COMPACT #ifdef RTREE_LEAF_COMPACT
uintptr_t bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm, dependent); uintptr_t bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm, dependent);
return rtree_leaf_elm_bits_szind_get(bits); return rtree_leaf_elm_bits_szind_get(bits);
@ -233,8 +233,8 @@ rtree_leaf_elm_szind_read(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm,
} }
JEMALLOC_ALWAYS_INLINE bool JEMALLOC_ALWAYS_INLINE bool
rtree_leaf_elm_slab_read(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm, rtree_leaf_elm_slab_read(UNUSED tsdn_t *tsdn, UNUSED rtree_t *rtree,
bool dependent) { rtree_leaf_elm_t *elm, bool dependent) {
#ifdef RTREE_LEAF_COMPACT #ifdef RTREE_LEAF_COMPACT
uintptr_t bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm, dependent); uintptr_t bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm, dependent);
return rtree_leaf_elm_bits_slab_get(bits); return rtree_leaf_elm_bits_slab_get(bits);
@ -245,8 +245,8 @@ rtree_leaf_elm_slab_read(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm,
} }
static inline void static inline void
rtree_leaf_elm_extent_write(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm, rtree_leaf_elm_extent_write(UNUSED tsdn_t *tsdn, UNUSED rtree_t *rtree,
extent_t *extent) { rtree_leaf_elm_t *elm, extent_t *extent) {
#ifdef RTREE_LEAF_COMPACT #ifdef RTREE_LEAF_COMPACT
uintptr_t old_bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm, true); uintptr_t old_bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm, true);
uintptr_t bits = ((uintptr_t)rtree_leaf_elm_bits_szind_get(old_bits) << uintptr_t bits = ((uintptr_t)rtree_leaf_elm_bits_szind_get(old_bits) <<
@ -259,8 +259,8 @@ rtree_leaf_elm_extent_write(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm,
} }
static inline void static inline void
rtree_leaf_elm_szind_write(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm, rtree_leaf_elm_szind_write(UNUSED tsdn_t *tsdn, UNUSED rtree_t *rtree,
szind_t szind) { rtree_leaf_elm_t *elm, szind_t szind) {
assert(szind <= NSIZES); assert(szind <= NSIZES);
#ifdef RTREE_LEAF_COMPACT #ifdef RTREE_LEAF_COMPACT
@ -277,8 +277,8 @@ rtree_leaf_elm_szind_write(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm,
} }
static inline void static inline void
rtree_leaf_elm_slab_write(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm, rtree_leaf_elm_slab_write(UNUSED tsdn_t *tsdn, UNUSED rtree_t *rtree,
bool slab) { rtree_leaf_elm_t *elm, bool slab) {
#ifdef RTREE_LEAF_COMPACT #ifdef RTREE_LEAF_COMPACT
uintptr_t old_bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm, uintptr_t old_bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm,
true); true);

View File

@ -39,8 +39,8 @@ tcache_event(tsd_t *tsd, tcache_t *tcache) {
} }
JEMALLOC_ALWAYS_INLINE void * JEMALLOC_ALWAYS_INLINE void *
tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size, tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache,
szind_t binind, bool zero, bool slow_path) { UNUSED size_t size, szind_t binind, bool zero, bool slow_path) {
void *ret; void *ret;
cache_bin_t *bin; cache_bin_t *bin;
bool tcache_success; bool tcache_success;

View File

@ -39,7 +39,7 @@ tsd_get_allocates(void) {
/* Get/set. */ /* Get/set. */
JEMALLOC_ALWAYS_INLINE tsd_t * JEMALLOC_ALWAYS_INLINE tsd_t *
tsd_get(bool init) { tsd_get(UNUSED bool init) {
assert(tsd_booted); assert(tsd_booted);
return &tsd_tls; return &tsd_tls;
} }

View File

@ -61,7 +61,7 @@ static void arena_bin_lower_slab(tsdn_t *tsdn, arena_t *arena, extent_t *slab,
/******************************************************************************/ /******************************************************************************/
void void
arena_basic_stats_merge(tsdn_t *tsdn, arena_t *arena, unsigned *nthreads, arena_basic_stats_merge(UNUSED tsdn_t *tsdn, arena_t *arena, unsigned *nthreads,
const char **dss, ssize_t *dirty_decay_ms, ssize_t *muzzy_decay_ms, const char **dss, ssize_t *dirty_decay_ms, ssize_t *muzzy_decay_ms,
size_t *nactive, size_t *ndirty, size_t *nmuzzy) { size_t *nactive, size_t *ndirty, size_t *nmuzzy) {
*nthreads += arena_nthreads_get(arena, false); *nthreads += arena_nthreads_get(arena, false);
@ -221,7 +221,7 @@ arena_extents_dirty_dalloc(tsdn_t *tsdn, arena_t *arena,
} }
static void * static void *
arena_slab_reg_alloc(tsdn_t *tsdn, extent_t *slab, const bin_info_t *bin_info) { arena_slab_reg_alloc(extent_t *slab, const bin_info_t *bin_info) {
void *ret; void *ret;
arena_slab_data_t *slab_data = extent_slab_data_get(slab); arena_slab_data_t *slab_data = extent_slab_data_get(slab);
size_t regind; size_t regind;
@ -261,8 +261,7 @@ arena_slab_regind(extent_t *slab, szind_t binind, const void *ptr) {
} }
static void static void
arena_slab_reg_dalloc(tsdn_t *tsdn, extent_t *slab, arena_slab_reg_dalloc(extent_t *slab, arena_slab_data_t *slab_data, void *ptr) {
arena_slab_data_t *slab_data, void *ptr) {
szind_t binind = extent_szind_get(slab); szind_t binind = extent_szind_get(slab);
const bin_info_t *bin_info = &bin_infos[binind]; const bin_info_t *bin_info = &bin_infos[binind];
size_t regind = arena_slab_regind(slab, binind, ptr); size_t regind = arena_slab_regind(slab, binind, ptr);
@ -561,7 +560,7 @@ arena_decay_epoch_advance(tsdn_t *tsdn, arena_t *arena, arena_decay_t *decay,
} }
static void static void
arena_decay_reinit(arena_decay_t *decay, extents_t *extents, ssize_t decay_ms) { arena_decay_reinit(arena_decay_t *decay, ssize_t decay_ms) {
arena_decay_ms_write(decay, decay_ms); arena_decay_ms_write(decay, decay_ms);
if (decay_ms > 0) { if (decay_ms > 0) {
nstime_init(&decay->interval, (uint64_t)decay_ms * nstime_init(&decay->interval, (uint64_t)decay_ms *
@ -578,7 +577,7 @@ arena_decay_reinit(arena_decay_t *decay, extents_t *extents, ssize_t decay_ms) {
} }
static bool static bool
arena_decay_init(arena_decay_t *decay, extents_t *extents, ssize_t decay_ms, arena_decay_init(arena_decay_t *decay, ssize_t decay_ms,
arena_stats_decay_t *stats) { arena_stats_decay_t *stats) {
if (config_debug) { if (config_debug) {
for (size_t i = 0; i < sizeof(arena_decay_t); i++) { for (size_t i = 0; i < sizeof(arena_decay_t); i++) {
@ -591,7 +590,7 @@ arena_decay_init(arena_decay_t *decay, extents_t *extents, ssize_t decay_ms,
return true; return true;
} }
decay->purging = false; decay->purging = false;
arena_decay_reinit(decay, extents, decay_ms); arena_decay_reinit(decay, decay_ms);
/* Memory is zeroed, so there is no need to clear stats. */ /* Memory is zeroed, so there is no need to clear stats. */
if (config_stats) { if (config_stats) {
decay->stats = stats; decay->stats = stats;
@ -700,7 +699,7 @@ arena_decay_ms_set(tsdn_t *tsdn, arena_t *arena, arena_decay_t *decay,
* infrequent, either between the {-1, 0, >0} states, or a one-time * infrequent, either between the {-1, 0, >0} states, or a one-time
* arbitrary change during initial arena configuration. * arbitrary change during initial arena configuration.
*/ */
arena_decay_reinit(decay, extents, decay_ms); arena_decay_reinit(decay, decay_ms);
arena_maybe_decay(tsdn, arena, decay, extents, false); arena_maybe_decay(tsdn, arena, decay, extents, false);
malloc_mutex_unlock(tsdn, &decay->mtx); malloc_mutex_unlock(tsdn, &decay->mtx);
@ -1210,7 +1209,7 @@ arena_bin_malloc_hard(tsdn_t *tsdn, arena_t *arena, bin_t *bin,
* bin lock in arena_bin_nonfull_slab_get(). * bin lock in arena_bin_nonfull_slab_get().
*/ */
if (extent_nfree_get(bin->slabcur) > 0) { if (extent_nfree_get(bin->slabcur) > 0) {
void *ret = arena_slab_reg_alloc(tsdn, bin->slabcur, void *ret = arena_slab_reg_alloc(bin->slabcur,
bin_info); bin_info);
if (slab != NULL) { if (slab != NULL) {
/* /*
@ -1244,7 +1243,7 @@ arena_bin_malloc_hard(tsdn_t *tsdn, arena_t *arena, bin_t *bin,
assert(extent_nfree_get(bin->slabcur) > 0); assert(extent_nfree_get(bin->slabcur) > 0);
return arena_slab_reg_alloc(tsdn, slab, bin_info); return arena_slab_reg_alloc(slab, bin_info);
} }
void void
@ -1266,8 +1265,7 @@ arena_tcache_fill_small(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache,
void *ptr; void *ptr;
if ((slab = bin->slabcur) != NULL && extent_nfree_get(slab) > if ((slab = bin->slabcur) != NULL && extent_nfree_get(slab) >
0) { 0) {
ptr = arena_slab_reg_alloc(tsdn, slab, ptr = arena_slab_reg_alloc(slab, &bin_infos[binind]);
&bin_infos[binind]);
} else { } else {
ptr = arena_bin_malloc_hard(tsdn, arena, bin, binind); ptr = arena_bin_malloc_hard(tsdn, arena, bin, binind);
} }
@ -1328,7 +1326,7 @@ arena_malloc_small(tsdn_t *tsdn, arena_t *arena, szind_t binind, bool zero) {
malloc_mutex_lock(tsdn, &bin->lock); malloc_mutex_lock(tsdn, &bin->lock);
if ((slab = bin->slabcur) != NULL && extent_nfree_get(slab) > 0) { if ((slab = bin->slabcur) != NULL && extent_nfree_get(slab) > 0) {
ret = arena_slab_reg_alloc(tsdn, slab, &bin_infos[binind]); ret = arena_slab_reg_alloc(slab, &bin_infos[binind]);
} else { } else {
ret = arena_bin_malloc_hard(tsdn, arena, bin, binind); ret = arena_bin_malloc_hard(tsdn, arena, bin, binind);
} }
@ -1501,7 +1499,8 @@ arena_dalloc_bin_slab(tsdn_t *tsdn, arena_t *arena, extent_t *slab,
} }
static void static void
arena_bin_lower_slab(tsdn_t *tsdn, arena_t *arena, extent_t *slab, bin_t *bin) { arena_bin_lower_slab(UNUSED tsdn_t *tsdn, arena_t *arena, extent_t *slab,
bin_t *bin) {
assert(extent_nfree_get(slab) > 0); assert(extent_nfree_get(slab) > 0);
/* /*
@ -1538,7 +1537,7 @@ arena_dalloc_bin_locked_impl(tsdn_t *tsdn, arena_t *arena, extent_t *slab,
arena_dalloc_junk_small(ptr, bin_info); arena_dalloc_junk_small(ptr, bin_info);
} }
arena_slab_reg_dalloc(tsdn, slab, slab_data, ptr); arena_slab_reg_dalloc(slab, slab_data, ptr);
unsigned nfree = extent_nfree_get(slab); unsigned nfree = extent_nfree_get(slab);
if (nfree == bin_info->nregs) { if (nfree == bin_info->nregs) {
arena_dissociate_bin_slab(arena, slab, bin); arena_dissociate_bin_slab(arena, slab, bin);
@ -1856,11 +1855,11 @@ arena_new(tsdn_t *tsdn, unsigned ind, extent_hooks_t *extent_hooks) {
goto label_error; goto label_error;
} }
if (arena_decay_init(&arena->decay_dirty, &arena->extents_dirty, if (arena_decay_init(&arena->decay_dirty,
arena_dirty_decay_ms_default_get(), &arena->stats.decay_dirty)) { arena_dirty_decay_ms_default_get(), &arena->stats.decay_dirty)) {
goto label_error; goto label_error;
} }
if (arena_decay_init(&arena->decay_muzzy, &arena->extents_muzzy, if (arena_decay_init(&arena->decay_muzzy,
arena_muzzy_decay_ms_default_get(), &arena->stats.decay_muzzy)) { arena_muzzy_decay_ms_default_get(), &arena->stats.decay_muzzy)) {
goto label_error; goto label_error;
} }

View File

@ -195,8 +195,8 @@ base_extent_bump_alloc_helper(extent_t *extent, size_t *gap_size, size_t size,
} }
static void static void
base_extent_bump_alloc_post(tsdn_t *tsdn, base_t *base, extent_t *extent, base_extent_bump_alloc_post(base_t *base, extent_t *extent, size_t gap_size,
size_t gap_size, void *addr, size_t size) { void *addr, size_t size) {
if (extent_bsize_get(extent) > 0) { if (extent_bsize_get(extent) > 0) {
/* /*
* Compute the index for the largest size class that does not * Compute the index for the largest size class that does not
@ -229,13 +229,13 @@ base_extent_bump_alloc_post(tsdn_t *tsdn, base_t *base, extent_t *extent,
} }
static void * static void *
base_extent_bump_alloc(tsdn_t *tsdn, base_t *base, extent_t *extent, base_extent_bump_alloc(base_t *base, extent_t *extent, size_t size,
size_t size, size_t alignment) { size_t alignment) {
void *ret; void *ret;
size_t gap_size; size_t gap_size;
ret = base_extent_bump_alloc_helper(extent, &gap_size, size, alignment); ret = base_extent_bump_alloc_helper(extent, &gap_size, size, alignment);
base_extent_bump_alloc_post(tsdn, base, extent, gap_size, ret, size); base_extent_bump_alloc_post(base, extent, gap_size, ret, size);
return ret; return ret;
} }
@ -386,7 +386,7 @@ base_new(tsdn_t *tsdn, unsigned ind, extent_hooks_t *extent_hooks) {
assert(base->resident <= base->mapped); assert(base->resident <= base->mapped);
assert(base->n_thp << LG_HUGEPAGE <= base->mapped); assert(base->n_thp << LG_HUGEPAGE <= base->mapped);
} }
base_extent_bump_alloc_post(tsdn, base, &block->extent, gap_size, base, base_extent_bump_alloc_post(base, &block->extent, gap_size, base,
base_size); base_size);
return base; return base;
@ -443,7 +443,7 @@ base_alloc_impl(tsdn_t *tsdn, base_t *base, size_t size, size_t alignment,
goto label_return; goto label_return;
} }
ret = base_extent_bump_alloc(tsdn, base, extent, usize, alignment); ret = base_extent_bump_alloc(base, extent, usize, alignment);
if (esn != NULL) { if (esn != NULL) {
*esn = extent_sn_get(extent); *esn = extent_sn_get(extent);
} }

View File

@ -2893,7 +2893,7 @@ label_not_resized:
JEMALLOC_EXPORT size_t JEMALLOC_NOTHROW JEMALLOC_EXPORT size_t JEMALLOC_NOTHROW
JEMALLOC_ATTR(pure) JEMALLOC_ATTR(pure)
je_sallocx(const void *ptr, int flags) { je_sallocx(const void *ptr, UNUSED int flags) {
size_t usize; size_t usize;
tsdn_t *tsdn; tsdn_t *tsdn;