Fix shadowed variable usage.
Verified with EXTRA_CFLAGS=-Wshadow.
This commit is contained in:
@@ -45,9 +45,9 @@ do_allocs(size_t sz, size_t cnt, bool do_frees) {
|
||||
|
||||
int
|
||||
main(void) {
|
||||
size_t lg_prof_sample = 19;
|
||||
int err = mallctl("prof.reset", NULL, NULL, (void *)&lg_prof_sample,
|
||||
sizeof(lg_prof_sample));
|
||||
size_t lg_prof_sample_local = 19;
|
||||
int err = mallctl("prof.reset", NULL, NULL,
|
||||
(void *)&lg_prof_sample_local, sizeof(lg_prof_sample_local));
|
||||
assert(err == 0);
|
||||
|
||||
prof_backtrace_hook_set(mock_backtrace);
|
||||
|
@@ -87,8 +87,8 @@ test_fail(const char *format, ...) {
|
||||
}
|
||||
|
||||
static const char *
|
||||
test_status_string(test_status_t test_status) {
|
||||
switch (test_status) {
|
||||
test_status_string(test_status_t current_status) {
|
||||
switch (current_status) {
|
||||
case test_status_pass: return "pass";
|
||||
case test_status_skip: return "skip";
|
||||
case test_status_fail: return "fail";
|
||||
|
@@ -258,12 +258,12 @@ TEST_BEGIN(test_arena_destroy_hooks_default) {
|
||||
|
||||
/* Try arena.create with custom hooks. */
|
||||
size_t sz = sizeof(extent_hooks_t *);
|
||||
extent_hooks_t *default_hooks;
|
||||
expect_d_eq(mallctl("arena.0.extent_hooks", (void *)&default_hooks,
|
||||
extent_hooks_t *a0_default_hooks;
|
||||
expect_d_eq(mallctl("arena.0.extent_hooks", (void *)&a0_default_hooks,
|
||||
&sz, NULL, 0), 0, "Unexpected mallctlnametomib() failure");
|
||||
|
||||
/* Default impl; but wrapped as "customized". */
|
||||
extent_hooks_t new_hooks = *default_hooks;
|
||||
extent_hooks_t new_hooks = *a0_default_hooks;
|
||||
extent_hooks_t *hook = &new_hooks;
|
||||
sz = sizeof(unsigned);
|
||||
expect_d_eq(mallctl("arenas.create", (void *)&arena_ind, &sz,
|
||||
|
@@ -45,7 +45,7 @@
|
||||
*/ \
|
||||
atomic_store_##ta(&atom, val1, ATOMIC_RELAXED); \
|
||||
success = false; \
|
||||
for (int i = 0; i < 10 && !success; i++) { \
|
||||
for (int retry = 0; retry < 10 && !success; retry++) { \
|
||||
expected = val2; \
|
||||
success = atomic_compare_exchange_weak_##ta(&atom, \
|
||||
&expected, val3, ATOMIC_RELAXED, ATOMIC_RELAXED); \
|
||||
|
@@ -1,7 +1,7 @@
|
||||
#include "test/jemalloc_test.h"
|
||||
|
||||
#define BATCH_MAX ((1U << 16) + 1024)
|
||||
static void *ptrs[BATCH_MAX];
|
||||
static void *global_ptrs[BATCH_MAX];
|
||||
|
||||
#define PAGE_ALIGNED(ptr) (((uintptr_t)ptr & PAGE_MASK) == 0)
|
||||
|
||||
@@ -122,13 +122,14 @@ test_wrapper(size_t size, size_t alignment, bool zero, unsigned arena_flag) {
|
||||
}
|
||||
size_t batch = base + (size_t)j;
|
||||
assert(batch < BATCH_MAX);
|
||||
size_t filled = batch_alloc_wrapper(ptrs, batch, size,
|
||||
flags);
|
||||
size_t filled = batch_alloc_wrapper(global_ptrs, batch,
|
||||
size, flags);
|
||||
assert_zu_eq(filled, batch, "");
|
||||
verify_batch_basic(tsd, ptrs, batch, usize, zero);
|
||||
verify_batch_locality(tsd, ptrs, batch, usize, arena,
|
||||
nregs);
|
||||
release_batch(ptrs, batch, usize);
|
||||
verify_batch_basic(tsd, global_ptrs, batch, usize,
|
||||
zero);
|
||||
verify_batch_locality(tsd, global_ptrs, batch, usize,
|
||||
arena, nregs);
|
||||
release_batch(global_ptrs, batch, usize);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -163,16 +164,16 @@ TEST_BEGIN(test_batch_alloc_large) {
|
||||
size_t size = SC_LARGE_MINCLASS;
|
||||
for (size_t batch = 0; batch < 4; ++batch) {
|
||||
assert(batch < BATCH_MAX);
|
||||
size_t filled = batch_alloc(ptrs, batch, size, 0);
|
||||
size_t filled = batch_alloc(global_ptrs, batch, size, 0);
|
||||
assert_zu_eq(filled, batch, "");
|
||||
release_batch(ptrs, batch, size);
|
||||
release_batch(global_ptrs, batch, size);
|
||||
}
|
||||
size = tcache_maxclass + 1;
|
||||
for (size_t batch = 0; batch < 4; ++batch) {
|
||||
assert(batch < BATCH_MAX);
|
||||
size_t filled = batch_alloc(ptrs, batch, size, 0);
|
||||
size_t filled = batch_alloc(global_ptrs, batch, size, 0);
|
||||
assert_zu_eq(filled, batch, "");
|
||||
release_batch(ptrs, batch, size);
|
||||
release_batch(global_ptrs, batch, size);
|
||||
}
|
||||
}
|
||||
TEST_END
|
||||
|
@@ -69,10 +69,10 @@ test_data_t *init_test_data(ssize_t dirty_decay_ms, ssize_t muzzy_decay_ms) {
|
||||
&hpa_hooks_default);
|
||||
assert_false(err, "");
|
||||
|
||||
const size_t oversize_threshold = 8 * 1024 * 1024;
|
||||
const size_t pa_oversize_threshold = 8 * 1024 * 1024;
|
||||
err = pa_shard_init(TSDN_NULL, &test_data->shard, &test_data->central,
|
||||
&test_data->emap, test_data->base, /* ind */ 1, &test_data->stats,
|
||||
&test_data->stats_mtx, &time, oversize_threshold, dirty_decay_ms,
|
||||
&test_data->stats_mtx, &time, pa_oversize_threshold, dirty_decay_ms,
|
||||
muzzy_decay_ms);
|
||||
assert_false(err, "");
|
||||
|
||||
|
@@ -26,14 +26,14 @@ TEST_BEGIN(test_idump) {
|
||||
bool active;
|
||||
void *p;
|
||||
|
||||
const char *prefix = TEST_PREFIX;
|
||||
const char *test_prefix = TEST_PREFIX;
|
||||
|
||||
test_skip_if(!config_prof);
|
||||
|
||||
active = true;
|
||||
|
||||
expect_d_eq(mallctl("prof.prefix", NULL, NULL, (void *)&prefix,
|
||||
sizeof(prefix)), 0,
|
||||
expect_d_eq(mallctl("prof.prefix", NULL, NULL, (void *)&test_prefix,
|
||||
sizeof(test_prefix)), 0,
|
||||
"Unexpected mallctl failure while overwriting dump prefix");
|
||||
|
||||
expect_d_eq(mallctl("prof.active", NULL, NULL, (void *)&active,
|
||||
|
@@ -15,7 +15,7 @@ confirm_prof_setup() {
|
||||
"opt_prof_recent_alloc_max not set correctly");
|
||||
|
||||
/* Dynamics */
|
||||
assert_true(prof_active, "prof_active not on");
|
||||
assert_true(prof_active_state, "prof_active not on");
|
||||
assert_zd_eq(prof_recent_alloc_max_ctl_read(), OPT_ALLOC_MAX,
|
||||
"prof_recent_alloc_max not set correctly");
|
||||
}
|
||||
|
@@ -21,26 +21,25 @@ set_prof_active(bool active) {
|
||||
|
||||
static size_t
|
||||
get_lg_prof_sample(void) {
|
||||
size_t lg_prof_sample;
|
||||
size_t ret;
|
||||
size_t sz = sizeof(size_t);
|
||||
|
||||
expect_d_eq(mallctl("prof.lg_sample", (void *)&lg_prof_sample, &sz,
|
||||
NULL, 0), 0,
|
||||
expect_d_eq(mallctl("prof.lg_sample", (void *)&ret, &sz, NULL, 0), 0,
|
||||
"Unexpected mallctl failure while reading profiling sample rate");
|
||||
return lg_prof_sample;
|
||||
return ret;
|
||||
}
|
||||
|
||||
static void
|
||||
do_prof_reset(size_t lg_prof_sample) {
|
||||
do_prof_reset(size_t lg_prof_sample_input) {
|
||||
expect_d_eq(mallctl("prof.reset", NULL, NULL,
|
||||
(void *)&lg_prof_sample, sizeof(size_t)), 0,
|
||||
(void *)&lg_prof_sample_input, sizeof(size_t)), 0,
|
||||
"Unexpected mallctl failure while resetting profile data");
|
||||
expect_zu_eq(lg_prof_sample, get_lg_prof_sample(),
|
||||
expect_zu_eq(lg_prof_sample_input, get_lg_prof_sample(),
|
||||
"Expected profile sample rate change");
|
||||
}
|
||||
|
||||
TEST_BEGIN(test_prof_reset_basic) {
|
||||
size_t lg_prof_sample_orig, lg_prof_sample, lg_prof_sample_next;
|
||||
size_t lg_prof_sample_orig, lg_prof_sample_cur, lg_prof_sample_next;
|
||||
size_t sz;
|
||||
unsigned i;
|
||||
|
||||
@@ -52,8 +51,8 @@ TEST_BEGIN(test_prof_reset_basic) {
|
||||
"Unexpected mallctl failure while reading profiling sample rate");
|
||||
expect_zu_eq(lg_prof_sample_orig, 0,
|
||||
"Unexpected profiling sample rate");
|
||||
lg_prof_sample = get_lg_prof_sample();
|
||||
expect_zu_eq(lg_prof_sample_orig, lg_prof_sample,
|
||||
lg_prof_sample_cur = get_lg_prof_sample();
|
||||
expect_zu_eq(lg_prof_sample_orig, lg_prof_sample_cur,
|
||||
"Unexpected disagreement between \"opt.lg_prof_sample\" and "
|
||||
"\"prof.lg_sample\"");
|
||||
|
||||
@@ -61,8 +60,8 @@ TEST_BEGIN(test_prof_reset_basic) {
|
||||
for (i = 0; i < 2; i++) {
|
||||
expect_d_eq(mallctl("prof.reset", NULL, NULL, NULL, 0), 0,
|
||||
"Unexpected mallctl failure while resetting profile data");
|
||||
lg_prof_sample = get_lg_prof_sample();
|
||||
expect_zu_eq(lg_prof_sample_orig, lg_prof_sample,
|
||||
lg_prof_sample_cur = get_lg_prof_sample();
|
||||
expect_zu_eq(lg_prof_sample_orig, lg_prof_sample_cur,
|
||||
"Unexpected profile sample rate change");
|
||||
}
|
||||
|
||||
@@ -70,15 +69,15 @@ TEST_BEGIN(test_prof_reset_basic) {
|
||||
lg_prof_sample_next = 1;
|
||||
for (i = 0; i < 2; i++) {
|
||||
do_prof_reset(lg_prof_sample_next);
|
||||
lg_prof_sample = get_lg_prof_sample();
|
||||
expect_zu_eq(lg_prof_sample, lg_prof_sample_next,
|
||||
lg_prof_sample_cur = get_lg_prof_sample();
|
||||
expect_zu_eq(lg_prof_sample_cur, lg_prof_sample_next,
|
||||
"Expected profile sample rate change");
|
||||
lg_prof_sample_next = lg_prof_sample_orig;
|
||||
}
|
||||
|
||||
/* Make sure the test code restored prof.lg_sample. */
|
||||
lg_prof_sample = get_lg_prof_sample();
|
||||
expect_zu_eq(lg_prof_sample_orig, lg_prof_sample,
|
||||
lg_prof_sample_cur = get_lg_prof_sample();
|
||||
expect_zu_eq(lg_prof_sample_orig, lg_prof_sample_cur,
|
||||
"Unexpected disagreement between \"opt.lg_prof_sample\" and "
|
||||
"\"prof.lg_sample\"");
|
||||
}
|
||||
|
@@ -964,7 +964,7 @@ do_update_search_test(int nnodes, int ntrees, int nremovals,
|
||||
tree_insert(&tree, &nodes[j]);
|
||||
}
|
||||
}
|
||||
for (int i = 0; i < nupdates; i++) {
|
||||
for (int j = 0; j < nupdates; j++) {
|
||||
uint32_t ind = gen_rand32_range(sfmt, nnodes);
|
||||
nodes[ind].specialness = 1 - nodes[ind].specialness;
|
||||
tree_update_summaries(&tree, &nodes[ind]);
|
||||
|
@@ -13,43 +13,43 @@ static atomic_u_t nfinished;
|
||||
|
||||
static unsigned
|
||||
do_arena_create(extent_hooks_t *h) {
|
||||
unsigned arena_ind;
|
||||
size_t sz = sizeof(unsigned);
|
||||
expect_d_eq(mallctl("arenas.create", (void *)&arena_ind, &sz,
|
||||
unsigned new_arena_ind;
|
||||
size_t ind_sz = sizeof(unsigned);
|
||||
expect_d_eq(mallctl("arenas.create", (void *)&new_arena_ind, &ind_sz,
|
||||
(void *)(h != NULL ? &h : NULL), (h != NULL ? sizeof(h) : 0)), 0,
|
||||
"Unexpected mallctl() failure");
|
||||
return arena_ind;
|
||||
return new_arena_ind;
|
||||
}
|
||||
|
||||
static void
|
||||
do_arena_destroy(unsigned arena_ind) {
|
||||
do_arena_destroy(unsigned ind) {
|
||||
size_t mib[3];
|
||||
size_t miblen;
|
||||
|
||||
miblen = sizeof(mib)/sizeof(size_t);
|
||||
expect_d_eq(mallctlnametomib("arena.0.destroy", mib, &miblen), 0,
|
||||
"Unexpected mallctlnametomib() failure");
|
||||
mib[1] = (size_t)arena_ind;
|
||||
mib[1] = (size_t)ind;
|
||||
expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, NULL, 0), 0,
|
||||
"Unexpected mallctlbymib() failure");
|
||||
}
|
||||
|
||||
static void
|
||||
do_refresh(void) {
|
||||
uint64_t epoch = 1;
|
||||
expect_d_eq(mallctl("epoch", NULL, NULL, (void *)&epoch,
|
||||
sizeof(epoch)), 0, "Unexpected mallctl() failure");
|
||||
uint64_t refresh_epoch = 1;
|
||||
expect_d_eq(mallctl("epoch", NULL, NULL, (void *)&refresh_epoch,
|
||||
sizeof(refresh_epoch)), 0, "Unexpected mallctl() failure");
|
||||
}
|
||||
|
||||
static size_t
|
||||
do_get_size_impl(const char *cmd, unsigned arena_ind) {
|
||||
do_get_size_impl(const char *cmd, unsigned ind) {
|
||||
size_t mib[4];
|
||||
size_t miblen = sizeof(mib) / sizeof(size_t);
|
||||
size_t z = sizeof(size_t);
|
||||
|
||||
expect_d_eq(mallctlnametomib(cmd, mib, &miblen),
|
||||
0, "Unexpected mallctlnametomib(\"%s\", ...) failure", cmd);
|
||||
mib[2] = arena_ind;
|
||||
mib[2] = ind;
|
||||
size_t size;
|
||||
expect_d_eq(mallctlbymib(mib, miblen, (void *)&size, &z, NULL, 0),
|
||||
0, "Unexpected mallctlbymib([\"%s\"], ...) failure", cmd);
|
||||
@@ -58,13 +58,13 @@ do_get_size_impl(const char *cmd, unsigned arena_ind) {
|
||||
}
|
||||
|
||||
static size_t
|
||||
do_get_active(unsigned arena_ind) {
|
||||
return do_get_size_impl("stats.arenas.0.pactive", arena_ind) * PAGE;
|
||||
do_get_active(unsigned ind) {
|
||||
return do_get_size_impl("stats.arenas.0.pactive", ind) * PAGE;
|
||||
}
|
||||
|
||||
static size_t
|
||||
do_get_mapped(unsigned arena_ind) {
|
||||
return do_get_size_impl("stats.arenas.0.mapped", arena_ind);
|
||||
do_get_mapped(unsigned ind) {
|
||||
return do_get_size_impl("stats.arenas.0.mapped", ind);
|
||||
}
|
||||
|
||||
static void *
|
||||
|
Reference in New Issue
Block a user