Rename huge to large.

This commit is contained in:
Jason Evans
2016-05-31 14:50:21 -07:00
parent 714d1640f3
commit 7d63fed0fd
37 changed files with 587 additions and 626 deletions

View File

@@ -25,10 +25,10 @@ get_nsmall(void)
}
static unsigned
get_nhuge(void)
get_nlarge(void)
{
return (get_nsizes_impl("arenas.nhchunks"));
return (get_nsizes_impl("arenas.nlextents"));
}
static size_t
@@ -58,10 +58,10 @@ get_small_size(size_t ind)
}
static size_t
get_huge_size(size_t ind)
get_large_size(size_t ind)
{
return (get_size_impl("arenas.hchunk.0.size", ind));
return (get_size_impl("arenas.lextent.0.size", ind));
}
/* Like ivsalloc(), but safe to call on discarded allocations. */
@@ -81,8 +81,8 @@ vsalloc(tsdn_t *tsdn, const void *ptr)
TEST_BEGIN(test_arena_reset)
{
#define NHUGE 32
unsigned arena_ind, nsmall, nhuge, nptrs, i;
#define NLARGE 32
unsigned arena_ind, nsmall, nlarge, nptrs, i;
size_t sz, miblen;
void **ptrs;
int flags;
@@ -96,8 +96,8 @@ TEST_BEGIN(test_arena_reset)
flags = MALLOCX_ARENA(arena_ind) | MALLOCX_TCACHE_NONE;
nsmall = get_nsmall();
nhuge = get_nhuge() > NHUGE ? NHUGE : get_nhuge();
nptrs = nsmall + nhuge;
nlarge = get_nlarge() > NLARGE ? NLARGE : get_nlarge();
nptrs = nsmall + nlarge;
ptrs = (void **)malloc(nptrs * sizeof(void *));
assert_ptr_not_null(ptrs, "Unexpected malloc() failure");
@@ -108,8 +108,8 @@ TEST_BEGIN(test_arena_reset)
assert_ptr_not_null(ptrs[i],
"Unexpected mallocx(%zu, %#x) failure", sz, flags);
}
for (i = 0; i < nhuge; i++) {
sz = get_huge_size(i);
for (i = 0; i < nlarge; i++) {
sz = get_large_size(i);
ptrs[nsmall + i] = mallocx(sz, flags);
assert_ptr_not_null(ptrs[i],
"Unexpected mallocx(%zu, %#x) failure", sz, flags);

View File

@@ -22,7 +22,7 @@ TEST_BEGIN(test_decay_ticks)
{
ticker_t *decay_ticker;
unsigned tick0, tick1;
size_t sz, huge0;
size_t sz, large0;
void *p;
test_skip_if(opt_purge != purge_mode_decay);
@@ -32,18 +32,18 @@ TEST_BEGIN(test_decay_ticks)
"Unexpected failure getting decay ticker");
sz = sizeof(size_t);
assert_d_eq(mallctl("arenas.hchunk.0.size", &huge0, &sz, NULL, 0), 0,
assert_d_eq(mallctl("arenas.lextent.0.size", &large0, &sz, NULL, 0), 0,
"Unexpected mallctl failure");
/*
* Test the standard APIs using a huge size class, since we can't
* Test the standard APIs using a large size class, since we can't
* control tcache interactions for small size classes (except by
* completely disabling tcache for the entire test program).
*/
/* malloc(). */
tick0 = ticker_read(decay_ticker);
p = malloc(huge0);
p = malloc(large0);
assert_ptr_not_null(p, "Unexpected malloc() failure");
tick1 = ticker_read(decay_ticker);
assert_u32_ne(tick1, tick0, "Expected ticker to tick during malloc()");
@@ -55,7 +55,7 @@ TEST_BEGIN(test_decay_ticks)
/* calloc(). */
tick0 = ticker_read(decay_ticker);
p = calloc(1, huge0);
p = calloc(1, large0);
assert_ptr_not_null(p, "Unexpected calloc() failure");
tick1 = ticker_read(decay_ticker);
assert_u32_ne(tick1, tick0, "Expected ticker to tick during calloc()");
@@ -63,7 +63,7 @@ TEST_BEGIN(test_decay_ticks)
/* posix_memalign(). */
tick0 = ticker_read(decay_ticker);
assert_d_eq(posix_memalign(&p, sizeof(size_t), huge0), 0,
assert_d_eq(posix_memalign(&p, sizeof(size_t), large0), 0,
"Unexpected posix_memalign() failure");
tick1 = ticker_read(decay_ticker);
assert_u32_ne(tick1, tick0,
@@ -72,7 +72,7 @@ TEST_BEGIN(test_decay_ticks)
/* aligned_alloc(). */
tick0 = ticker_read(decay_ticker);
p = aligned_alloc(sizeof(size_t), huge0);
p = aligned_alloc(sizeof(size_t), large0);
assert_ptr_not_null(p, "Unexpected aligned_alloc() failure");
tick1 = ticker_read(decay_ticker);
assert_u32_ne(tick1, tick0,
@@ -82,13 +82,13 @@ TEST_BEGIN(test_decay_ticks)
/* realloc(). */
/* Allocate. */
tick0 = ticker_read(decay_ticker);
p = realloc(NULL, huge0);
p = realloc(NULL, large0);
assert_ptr_not_null(p, "Unexpected realloc() failure");
tick1 = ticker_read(decay_ticker);
assert_u32_ne(tick1, tick0, "Expected ticker to tick during realloc()");
/* Reallocate. */
tick0 = ticker_read(decay_ticker);
p = realloc(p, huge0);
p = realloc(p, large0);
assert_ptr_not_null(p, "Unexpected realloc() failure");
tick1 = ticker_read(decay_ticker);
assert_u32_ne(tick1, tick0, "Expected ticker to tick during realloc()");
@@ -99,13 +99,13 @@ TEST_BEGIN(test_decay_ticks)
assert_u32_ne(tick1, tick0, "Expected ticker to tick during realloc()");
/*
* Test the *allocx() APIs using huge and small size classes, with
* Test the *allocx() APIs using large and small size classes, with
* tcache explicitly disabled.
*/
{
unsigned i;
size_t allocx_sizes[2];
allocx_sizes[0] = huge0;
allocx_sizes[0] = large0;
allocx_sizes[1] = 1;
for (i = 0; i < sizeof(allocx_sizes) / sizeof(size_t); i++) {
@@ -154,13 +154,13 @@ TEST_BEGIN(test_decay_ticks)
}
/*
* Test tcache fill/flush interactions for huge and small size classes,
* Test tcache fill/flush interactions for large and small size classes,
* using an explicit tcache.
*/
if (config_tcache) {
unsigned tcache_ind, i;
size_t tcache_sizes[2];
tcache_sizes[0] = huge0;
tcache_sizes[0] = large0;
tcache_sizes[1] = 1;
sz = sizeof(unsigned);
@@ -201,14 +201,14 @@ TEST_BEGIN(test_decay_ticker)
uint64_t epoch;
uint64_t npurge0 = 0;
uint64_t npurge1 = 0;
size_t sz, huge;
size_t sz, large;
unsigned i, nupdates0;
nstime_t time, decay_time, deadline;
test_skip_if(opt_purge != purge_mode_decay);
/*
* Allocate a bunch of huge objects, pause the clock, deallocate the
* Allocate a bunch of large objects, pause the clock, deallocate the
* objects, restore the clock, then [md]allocx() in a tight loop to
* verify the ticker triggers purging.
*/
@@ -219,10 +219,10 @@ TEST_BEGIN(test_decay_ticker)
sz = sizeof(size_t);
assert_d_eq(mallctl("arenas.tcache_max", &tcache_max, &sz, NULL,
0), 0, "Unexpected mallctl failure");
huge = nallocx(tcache_max + 1, flags);
large = nallocx(tcache_max + 1, flags);
} else {
sz = sizeof(size_t);
assert_d_eq(mallctl("arenas.hchunk.0.size", &huge, &sz, NULL,
assert_d_eq(mallctl("arenas.lextent.0.size", &large, &sz, NULL,
0), 0, "Unexpected mallctl failure");
}
@@ -235,7 +235,7 @@ TEST_BEGIN(test_decay_ticker)
config_stats ? 0 : ENOENT, "Unexpected mallctl result");
for (i = 0; i < NPS; i++) {
ps[i] = mallocx(huge, flags);
ps[i] = mallocx(large, flags);
assert_ptr_not_null(ps[i], "Unexpected mallocx() failure");
}
@@ -293,13 +293,13 @@ TEST_BEGIN(test_decay_nonmonotonic)
uint64_t epoch;
uint64_t npurge0 = 0;
uint64_t npurge1 = 0;
size_t sz, huge0;
size_t sz, large0;
unsigned i, nupdates0;
test_skip_if(opt_purge != purge_mode_decay);
sz = sizeof(size_t);
assert_d_eq(mallctl("arenas.hchunk.0.size", &huge0, &sz, NULL, 0), 0,
assert_d_eq(mallctl("arenas.lextent.0.size", &large0, &sz, NULL, 0), 0,
"Unexpected mallctl failure");
assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
@@ -319,7 +319,7 @@ TEST_BEGIN(test_decay_nonmonotonic)
nstime_update = nstime_update_mock;
for (i = 0; i < NPS; i++) {
ps[i] = mallocx(huge0, flags);
ps[i] = mallocx(large0, flags);
assert_ptr_not_null(ps[i], "Unexpected mallocx() failure");
}

View File

@@ -35,16 +35,16 @@ TEST_BEGIN(test_small_extent_size)
}
TEST_END
TEST_BEGIN(test_huge_extent_size)
TEST_BEGIN(test_large_extent_size)
{
bool cache_oblivious;
unsigned nhchunks, i;
unsigned nlextents, i;
size_t sz, extent_size_prev, ceil_prev;
size_t mib[4];
size_t miblen = sizeof(mib) / sizeof(size_t);
/*
* Iterate over all huge size classes, get their extent sizes, and
* Iterate over all large size classes, get their extent sizes, and
* verify that the quantized size is the same as the extent size.
*/
@@ -53,12 +53,12 @@ TEST_BEGIN(test_huge_extent_size)
NULL, 0), 0, "Unexpected mallctl failure");
sz = sizeof(unsigned);
assert_d_eq(mallctl("arenas.nhchunks", &nhchunks, &sz, NULL, 0), 0,
assert_d_eq(mallctl("arenas.nlextents", &nlextents, &sz, NULL, 0), 0,
"Unexpected mallctl failure");
assert_d_eq(mallctlnametomib("arenas.hchunk.0.size", mib, &miblen), 0,
assert_d_eq(mallctlnametomib("arenas.lextent.0.size", mib, &miblen), 0,
"Unexpected mallctlnametomib failure");
for (i = 0; i < nhchunks; i++) {
for (i = 0; i < nlextents; i++) {
size_t lextent_size, extent_size, floor, ceil;
mib[2] = i;
@@ -91,7 +91,7 @@ TEST_BEGIN(test_huge_extent_size)
ceil_prev, extent_size);
}
}
if (i + 1 < nhchunks) {
if (i + 1 < nlextents) {
extent_size_prev = floor;
ceil_prev = extent_size_quantize_ceil(extent_size +
PAGE);
@@ -141,6 +141,6 @@ main(void)
return (test(
test_small_extent_size,
test_huge_extent_size,
test_large_extent_size,
test_monotonic));
}

View File

@@ -9,7 +9,7 @@ const char *malloc_conf =
#endif
static arena_dalloc_junk_small_t *arena_dalloc_junk_small_orig;
static huge_dalloc_junk_t *huge_dalloc_junk_orig;
static large_dalloc_junk_t *large_dalloc_junk_orig;
static void *watch_for_junking;
static bool saw_junking;
@@ -37,10 +37,10 @@ arena_dalloc_junk_small_intercept(void *ptr, const arena_bin_info_t *bin_info)
}
static void
huge_dalloc_junk_intercept(void *ptr, size_t usize)
large_dalloc_junk_intercept(void *ptr, size_t usize)
{
huge_dalloc_junk_orig(ptr, usize);
large_dalloc_junk_orig(ptr, usize);
/*
* The conditions under which junk filling actually occurs are nuanced
* enough that it doesn't make sense to duplicate the decision logic in
@@ -59,8 +59,8 @@ test_junk(size_t sz_min, size_t sz_max)
if (opt_junk_free) {
arena_dalloc_junk_small_orig = arena_dalloc_junk_small;
arena_dalloc_junk_small = arena_dalloc_junk_small_intercept;
huge_dalloc_junk_orig = huge_dalloc_junk;
huge_dalloc_junk = huge_dalloc_junk_intercept;
large_dalloc_junk_orig = large_dalloc_junk;
large_dalloc_junk = large_dalloc_junk_intercept;
}
sz_prev = 0;
@@ -110,7 +110,7 @@ test_junk(size_t sz_min, size_t sz_max)
if (opt_junk_free) {
arena_dalloc_junk_small = arena_dalloc_junk_small_orig;
huge_dalloc_junk = huge_dalloc_junk_orig;
large_dalloc_junk = large_dalloc_junk_orig;
}
}
@@ -122,7 +122,7 @@ TEST_BEGIN(test_junk_small)
}
TEST_END
TEST_BEGIN(test_junk_huge)
TEST_BEGIN(test_junk_large)
{
test_skip_if(!config_fill);
@@ -136,5 +136,5 @@ main(void)
return (test(
test_junk_small,
test_junk_huge));
test_junk_large));
}

View File

@@ -596,7 +596,7 @@ TEST_BEGIN(test_arenas_constants)
TEST_ARENAS_CONSTANT(size_t, quantum, QUANTUM);
TEST_ARENAS_CONSTANT(size_t, page, PAGE);
TEST_ARENAS_CONSTANT(unsigned, nbins, NBINS);
TEST_ARENAS_CONSTANT(unsigned, nhchunks, NSIZES - NBINS);
TEST_ARENAS_CONSTANT(unsigned, nlextents, NSIZES - NBINS);
#undef TEST_ARENAS_CONSTANT
}
@@ -622,13 +622,13 @@ TEST_BEGIN(test_arenas_bin_constants)
}
TEST_END
TEST_BEGIN(test_arenas_hchunk_constants)
TEST_BEGIN(test_arenas_lextent_constants)
{
#define TEST_ARENAS_HCHUNK_CONSTANT(t, name, expected) do { \
t name; \
size_t sz = sizeof(t); \
assert_d_eq(mallctl("arenas.hchunk.0."#name, &name, &sz, NULL, \
assert_d_eq(mallctl("arenas.lextent.0."#name, &name, &sz, NULL, \
0), 0, "Unexpected mallctl() failure"); \
assert_zu_eq(name, expected, "Incorrect "#name" size"); \
} while (0)
@@ -704,7 +704,7 @@ main(void)
test_arenas_decay_time,
test_arenas_constants,
test_arenas_bin_constants,
test_arenas_hchunk_constants,
test_arenas_lextent_constants,
test_arenas_extend,
test_stats_arenas));
}

View File

@@ -3,18 +3,18 @@
static size_t
get_max_size_class(void)
{
unsigned nhchunks;
unsigned nlextents;
size_t mib[4];
size_t sz, miblen, max_size_class;
sz = sizeof(unsigned);
assert_d_eq(mallctl("arenas.nhchunks", &nhchunks, &sz, NULL, 0), 0,
assert_d_eq(mallctl("arenas.nlextents", &nlextents, &sz, NULL, 0), 0,
"Unexpected mallctl() error");
miblen = sizeof(mib) / sizeof(size_t);
assert_d_eq(mallctlnametomib("arenas.hchunk.0.size", mib, &miblen), 0,
assert_d_eq(mallctlnametomib("arenas.lextent.0.size", mib, &miblen), 0,
"Unexpected mallctlnametomib() error");
mib[2] = nhchunks - 1;
mib[2] = nlextents - 1;
sz = sizeof(size_t);
assert_d_eq(mallctlbymib(mib, miblen, &max_size_class, &sz, NULL, 0), 0,

View File

@@ -33,7 +33,7 @@ TEST_BEGIN(test_stats_summary)
}
TEST_END
TEST_BEGIN(test_stats_huge)
TEST_BEGIN(test_stats_large)
{
void *p;
uint64_t epoch;
@@ -49,14 +49,14 @@ TEST_BEGIN(test_stats_huge)
"Unexpected mallctl() failure");
sz = sizeof(size_t);
assert_d_eq(mallctl("stats.arenas.0.huge.allocated", &allocated, &sz,
assert_d_eq(mallctl("stats.arenas.0.large.allocated", &allocated, &sz,
NULL, 0), expected, "Unexpected mallctl() result");
sz = sizeof(uint64_t);
assert_d_eq(mallctl("stats.arenas.0.huge.nmalloc", &nmalloc, &sz, NULL,
assert_d_eq(mallctl("stats.arenas.0.large.nmalloc", &nmalloc, &sz, NULL,
0), expected, "Unexpected mallctl() result");
assert_d_eq(mallctl("stats.arenas.0.huge.ndalloc", &ndalloc, &sz, NULL,
assert_d_eq(mallctl("stats.arenas.0.large.ndalloc", &ndalloc, &sz, NULL,
0), expected, "Unexpected mallctl() result");
assert_d_eq(mallctl("stats.arenas.0.huge.nrequests", &nrequests, &sz,
assert_d_eq(mallctl("stats.arenas.0.large.nrequests", &nrequests, &sz,
NULL, 0), expected, "Unexpected mallctl() result");
if (config_stats) {
@@ -75,7 +75,7 @@ TEST_END
TEST_BEGIN(test_stats_arenas_summary)
{
unsigned arena;
void *little, *huge;
void *little, *large;
uint64_t epoch;
size_t sz;
int expected = config_stats ? 0 : ENOENT;
@@ -88,11 +88,11 @@ TEST_BEGIN(test_stats_arenas_summary)
little = mallocx(SMALL_MAXCLASS, 0);
assert_ptr_not_null(little, "Unexpected mallocx() failure");
huge = mallocx(chunksize, 0);
assert_ptr_not_null(huge, "Unexpected mallocx() failure");
large = mallocx(chunksize, 0);
assert_ptr_not_null(large, "Unexpected mallocx() failure");
dallocx(little, 0);
dallocx(huge, 0);
dallocx(large, 0);
assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
"Unexpected mallctl() failure");
@@ -185,7 +185,7 @@ TEST_BEGIN(test_stats_arenas_small)
}
TEST_END
TEST_BEGIN(test_stats_arenas_huge)
TEST_BEGIN(test_stats_arenas_large)
{
unsigned arena;
void *p;
@@ -204,12 +204,12 @@ TEST_BEGIN(test_stats_arenas_huge)
"Unexpected mallctl() failure");
sz = sizeof(size_t);
assert_d_eq(mallctl("stats.arenas.0.huge.allocated", &allocated, &sz,
assert_d_eq(mallctl("stats.arenas.0.large.allocated", &allocated, &sz,
NULL, 0), expected, "Unexpected mallctl() result");
sz = sizeof(uint64_t);
assert_d_eq(mallctl("stats.arenas.0.huge.nmalloc", &nmalloc, &sz,
assert_d_eq(mallctl("stats.arenas.0.large.nmalloc", &nmalloc, &sz,
NULL, 0), expected, "Unexpected mallctl() result");
assert_d_eq(mallctl("stats.arenas.0.huge.ndalloc", &ndalloc, &sz,
assert_d_eq(mallctl("stats.arenas.0.large.ndalloc", &ndalloc, &sz,
NULL, 0), expected, "Unexpected mallctl() result");
if (config_stats) {
@@ -299,12 +299,12 @@ TEST_BEGIN(test_stats_arenas_bins)
}
TEST_END
TEST_BEGIN(test_stats_arenas_hchunks)
TEST_BEGIN(test_stats_arenas_lextents)
{
unsigned arena;
void *p;
uint64_t epoch, nmalloc, ndalloc;
size_t curhchunks, sz, hsize;
size_t curlextents, sz, hsize;
int expected = config_stats ? 0 : ENOENT;
arena = 0;
@@ -312,7 +312,7 @@ TEST_BEGIN(test_stats_arenas_hchunks)
0, "Unexpected mallctl() failure");
sz = sizeof(size_t);
assert_d_eq(mallctl("arenas.hchunk.0.size", &hsize, &sz, NULL, 0), 0,
assert_d_eq(mallctl("arenas.lextent.0.size", &hsize, &sz, NULL, 0), 0,
"Unexpected mallctl() failure");
p = mallocx(hsize, 0);
@@ -322,20 +322,20 @@ TEST_BEGIN(test_stats_arenas_hchunks)
"Unexpected mallctl() failure");
sz = sizeof(uint64_t);
assert_d_eq(mallctl("stats.arenas.0.hchunks.0.nmalloc", &nmalloc, &sz,
assert_d_eq(mallctl("stats.arenas.0.lextents.0.nmalloc", &nmalloc, &sz,
NULL, 0), expected, "Unexpected mallctl() result");
assert_d_eq(mallctl("stats.arenas.0.hchunks.0.ndalloc", &ndalloc, &sz,
assert_d_eq(mallctl("stats.arenas.0.lextents.0.ndalloc", &ndalloc, &sz,
NULL, 0), expected, "Unexpected mallctl() result");
sz = sizeof(size_t);
assert_d_eq(mallctl("stats.arenas.0.hchunks.0.curhchunks", &curhchunks,
&sz, NULL, 0), expected, "Unexpected mallctl() result");
assert_d_eq(mallctl("stats.arenas.0.lextents.0.curlextents",
&curlextents, &sz, NULL, 0), expected, "Unexpected mallctl() result");
if (config_stats) {
assert_u64_gt(nmalloc, 0,
"nmalloc should be greater than zero");
assert_u64_ge(nmalloc, ndalloc,
"nmalloc should be at least as large as ndalloc");
assert_u64_gt(curhchunks, 0,
assert_u64_gt(curlextents, 0,
"At least one chunk should be currently allocated");
}
@@ -349,10 +349,10 @@ main(void)
return (test(
test_stats_summary,
test_stats_huge,
test_stats_large,
test_stats_arenas_summary,
test_stats_arenas_small,
test_stats_arenas_huge,
test_stats_arenas_large,
test_stats_arenas_bins,
test_stats_arenas_hchunks));
test_stats_arenas_lextents));
}

View File

@@ -53,7 +53,7 @@ TEST_BEGIN(test_zero_small)
}
TEST_END
TEST_BEGIN(test_zero_huge)
TEST_BEGIN(test_zero_large)
{
test_skip_if(!config_fill);
@@ -67,5 +67,5 @@ main(void)
return (test(
test_zero_small,
test_zero_huge));
test_zero_large));
}