Use huge size class infrastructure for large size classes.

This commit is contained in:
Jason Evans
2016-05-28 00:17:28 -07:00
parent b46261d58b
commit ed2c2427a7
34 changed files with 463 additions and 1979 deletions

View File

@@ -120,7 +120,7 @@ chunk_merge(void *chunk_a, size_t size_a, void *chunk_b, size_t size_b,
TEST_BEGIN(test_chunk)
{
void *p;
size_t old_size, new_size, large0, large1, huge0, huge1, huge2, sz;
size_t old_size, new_size, huge0, huge1, huge2, sz;
unsigned arena_ind;
int flags;
size_t hooks_mib[3], purge_mib[3];
@@ -162,14 +162,8 @@ TEST_BEGIN(test_chunk)
assert_ptr_ne(old_hooks.split, chunk_split, "Unexpected split error");
assert_ptr_ne(old_hooks.merge, chunk_merge, "Unexpected merge error");
/* Get large size classes. */
sz = sizeof(size_t);
assert_d_eq(mallctl("arenas.lrun.0.size", &large0, &sz, NULL, 0), 0,
"Unexpected arenas.lrun.0.size failure");
assert_d_eq(mallctl("arenas.lrun.1.size", &large1, &sz, NULL, 0), 0,
"Unexpected arenas.lrun.1.size failure");
/* Get huge size classes. */
sz = sizeof(size_t);
assert_d_eq(mallctl("arenas.hchunk.0.size", &huge0, &sz, NULL, 0), 0,
"Unexpected arenas.hchunk.0.size failure");
assert_d_eq(mallctl("arenas.hchunk.1.size", &huge1, &sz, NULL, 0), 0,
@@ -224,24 +218,6 @@ TEST_BEGIN(test_chunk)
do_dalloc = true;
do_decommit = false;
/* Test decommit for large allocations. */
do_decommit = true;
p = mallocx(large1, flags);
assert_ptr_not_null(p, "Unexpected mallocx() error");
assert_d_eq(mallctlbymib(purge_mib, purge_miblen, NULL, NULL, NULL, 0),
0, "Unexpected arena.%u.purge error", arena_ind);
did_decommit = false;
assert_zu_eq(xallocx(p, large0, 0, flags), large0,
"Unexpected xallocx() failure");
assert_d_eq(mallctlbymib(purge_mib, purge_miblen, NULL, NULL, NULL, 0),
0, "Unexpected arena.%u.purge error", arena_ind);
did_commit = false;
assert_zu_eq(xallocx(p, large1, 0, flags), large1,
"Unexpected xallocx() failure");
assert_b_eq(did_decommit, did_commit, "Expected decommit/commit match");
dallocx(p, flags);
do_decommit = false;
/* Make sure non-huge allocation succeeds. */
p = mallocx(42, flags);
assert_ptr_not_null(p, "Unexpected mallocx() error");

View File

@@ -91,13 +91,6 @@ get_nsmall(void)
return (get_nsizes_impl("arenas.nbins"));
}
static unsigned
get_nlarge(void)
{
return (get_nsizes_impl("arenas.nlruns"));
}
static unsigned
get_nhuge(void)
{
@@ -131,13 +124,6 @@ get_small_size(size_t ind)
return (get_size_impl("arenas.bin.0.size", ind));
}
static size_t
get_large_size(size_t ind)
{
return (get_size_impl("arenas.lrun.0.size", ind));
}
static size_t
get_huge_size(size_t ind)
{
@@ -239,81 +225,14 @@ TEST_BEGIN(test_extra_small)
}
TEST_END
TEST_BEGIN(test_extra_large)
TEST_BEGIN(test_extra_huge)
{
int flags = MALLOCX_ARENA(arena_ind());
size_t smallmax, large0, large1, large2, huge0, hugemax;
size_t smallmax, huge1, huge2, huge3, hugemax;
void *p;
/* Get size classes. */
smallmax = get_small_size(get_nsmall()-1);
large0 = get_large_size(0);
large1 = get_large_size(1);
large2 = get_large_size(2);
huge0 = get_huge_size(0);
hugemax = get_huge_size(get_nhuge()-1);
p = mallocx(large2, flags);
assert_ptr_not_null(p, "Unexpected mallocx() error");
assert_zu_eq(xallocx(p, large2, 0, flags), large2,
"Unexpected xallocx() behavior");
/* Test size decrease with zero extra. */
assert_zu_eq(xallocx(p, large0, 0, flags), large0,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, smallmax, 0, flags), large0,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, large2, 0, flags), large2,
"Unexpected xallocx() behavior");
/* Test size decrease with non-zero extra. */
assert_zu_eq(xallocx(p, large0, large2 - large0, flags), large2,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, large1, large2 - large1, flags), large2,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, large0, large1 - large0, flags), large1,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, smallmax, large0 - smallmax, flags), large0,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, large0, 0, flags), large0,
"Unexpected xallocx() behavior");
/* Test size increase with zero extra. */
assert_zu_eq(xallocx(p, large2, 0, flags), large2,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, huge0, 0, flags), large2,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, large0, 0, flags), large0,
"Unexpected xallocx() behavior");
/* Test size increase with non-zero extra. */
assert_zu_lt(xallocx(p, large0, huge0 - large0, flags), huge0,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, large0, 0, flags), large0,
"Unexpected xallocx() behavior");
/* Test size increase with non-zero extra. */
assert_zu_eq(xallocx(p, large0, large2 - large0, flags), large2,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, large2, 0, flags), large2,
"Unexpected xallocx() behavior");
/* Test size+extra overflow. */
assert_zu_lt(xallocx(p, large2, hugemax - large2 + 1, flags), huge0,
"Unexpected xallocx() behavior");
dallocx(p, flags);
}
TEST_END
TEST_BEGIN(test_extra_huge)
{
int flags = MALLOCX_ARENA(arena_ind());
size_t largemax, huge1, huge2, huge3, hugemax;
void *p;
/* Get size classes. */
largemax = get_large_size(get_nlarge()-1);
huge1 = get_huge_size(1);
huge2 = get_huge_size(2);
huge3 = get_huge_size(3);
@@ -327,7 +246,7 @@ TEST_BEGIN(test_extra_huge)
/* Test size decrease with zero extra. */
assert_zu_ge(xallocx(p, huge1, 0, flags), huge1,
"Unexpected xallocx() behavior");
assert_zu_ge(xallocx(p, largemax, 0, flags), huge1,
assert_zu_ge(xallocx(p, smallmax, 0, flags), huge1,
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, huge3, 0, flags), huge3,
@@ -339,7 +258,7 @@ TEST_BEGIN(test_extra_huge)
"Unexpected xallocx() behavior");
assert_zu_eq(xallocx(p, huge1, huge2 - huge1, flags), huge2,
"Unexpected xallocx() behavior");
assert_zu_ge(xallocx(p, largemax, huge1 - largemax, flags), huge1,
assert_zu_ge(xallocx(p, smallmax, huge1 - smallmax, flags), huge1,
"Unexpected xallocx() behavior");
assert_zu_ge(xallocx(p, huge1, 0, flags), huge1,
@@ -455,18 +374,6 @@ test_zero(size_t szmin, size_t szmax)
dallocx(p, flags);
}
TEST_BEGIN(test_zero_large)
{
size_t large0, largemax;
/* Get size classes. */
large0 = get_large_size(0);
largemax = get_large_size(get_nlarge()-1);
test_zero(large0, largemax);
}
TEST_END
TEST_BEGIN(test_zero_huge)
{
size_t huge0, huge1;
@@ -490,8 +397,6 @@ main(void)
test_size,
test_size_extra_overflow,
test_extra_small,
test_extra_large,
test_extra_huge,
test_zero_large,
test_zero_huge));
}

View File

@@ -24,13 +24,6 @@ get_nsmall(void)
return (get_nsizes_impl("arenas.nbins"));
}
static unsigned
get_nlarge(void)
{
return (get_nsizes_impl("arenas.nlruns"));
}
static unsigned
get_nhuge(void)
{
@@ -64,13 +57,6 @@ get_small_size(size_t ind)
return (get_size_impl("arenas.bin.0.size", ind));
}
static size_t
get_large_size(size_t ind)
{
return (get_size_impl("arenas.lrun.0.size", ind));
}
static size_t
get_huge_size(size_t ind)
{
@@ -90,13 +76,13 @@ vsalloc(tsdn_t *tsdn, const void *ptr)
if (!extent_active_get(extent))
return (0);
return (isalloc(tsdn, extent, ptr, false));
return (isalloc(tsdn, extent, ptr));
}
TEST_BEGIN(test_arena_reset)
{
#define NHUGE 4
unsigned arena_ind, nsmall, nlarge, nhuge, nptrs, i;
#define NHUGE 32
unsigned arena_ind, nsmall, nhuge, nptrs, i;
size_t sz, miblen;
void **ptrs;
int flags;
@@ -110,9 +96,8 @@ TEST_BEGIN(test_arena_reset)
flags = MALLOCX_ARENA(arena_ind) | MALLOCX_TCACHE_NONE;
nsmall = get_nsmall();
nlarge = get_nlarge();
nhuge = get_nhuge() > NHUGE ? NHUGE : get_nhuge();
nptrs = nsmall + nlarge + nhuge;
nptrs = nsmall + nhuge;
ptrs = (void **)malloc(nptrs * sizeof(void *));
assert_ptr_not_null(ptrs, "Unexpected malloc() failure");
@@ -123,15 +108,9 @@ TEST_BEGIN(test_arena_reset)
assert_ptr_not_null(ptrs[i],
"Unexpected mallocx(%zu, %#x) failure", sz, flags);
}
for (i = 0; i < nlarge; i++) {
sz = get_large_size(i);
ptrs[nsmall + i] = mallocx(sz, flags);
assert_ptr_not_null(ptrs[i],
"Unexpected mallocx(%zu, %#x) failure", sz, flags);
}
for (i = 0; i < nhuge; i++) {
sz = get_huge_size(i);
ptrs[nsmall + nlarge + i] = mallocx(sz, flags);
ptrs[nsmall + i] = mallocx(sz, flags);
assert_ptr_not_null(ptrs[i],
"Unexpected mallocx(%zu, %#x) failure", sz, flags);
}
@@ -140,7 +119,7 @@ TEST_BEGIN(test_arena_reset)
/* Verify allocations. */
for (i = 0; i < nptrs; i++) {
assert_zu_gt(ivsalloc(tsdn, ptrs[i], false), 0,
assert_zu_gt(ivsalloc(tsdn, ptrs[i]), 0,
"Allocation should have queryable size");
}

View File

@@ -1,6 +1,6 @@
#include "test/jemalloc_test.h"
const char *malloc_conf = "purge:decay,decay_time:1";
const char *malloc_conf = "purge:decay,decay_time:1,lg_tcache_max:0";
static nstime_update_t *nstime_update_orig;
@@ -22,7 +22,7 @@ TEST_BEGIN(test_decay_ticks)
{
ticker_t *decay_ticker;
unsigned tick0, tick1;
size_t sz, huge0, large0;
size_t sz, huge0;
void *p;
test_skip_if(opt_purge != purge_mode_decay);
@@ -34,13 +34,11 @@ TEST_BEGIN(test_decay_ticks)
sz = sizeof(size_t);
assert_d_eq(mallctl("arenas.hchunk.0.size", &huge0, &sz, NULL, 0), 0,
"Unexpected mallctl failure");
assert_d_eq(mallctl("arenas.lrun.0.size", &large0, &sz, NULL, 0), 0,
"Unexpected mallctl failure");
/*
* Test the standard APIs using a huge size class, since we can't
* control tcache interactions (except by completely disabling tcache
* for the entire test program).
* control tcache interactions for small size classes (except by
* completely disabling tcache for the entire test program).
*/
/* malloc(). */
@@ -101,15 +99,14 @@ TEST_BEGIN(test_decay_ticks)
assert_u32_ne(tick1, tick0, "Expected ticker to tick during realloc()");
/*
* Test the *allocx() APIs using huge, large, and small size classes,
* with tcache explicitly disabled.
* Test the *allocx() APIs using huge and small size classes, with
* tcache explicitly disabled.
*/
{
unsigned i;
size_t allocx_sizes[3];
size_t allocx_sizes[2];
allocx_sizes[0] = huge0;
allocx_sizes[1] = large0;
allocx_sizes[2] = 1;
allocx_sizes[1] = 1;
for (i = 0; i < sizeof(allocx_sizes) / sizeof(size_t); i++) {
sz = allocx_sizes[i];
@@ -157,13 +154,13 @@ TEST_BEGIN(test_decay_ticks)
}
/*
* Test tcache fill/flush interactions for large and small size classes,
* Test tcache fill/flush interactions for huge and small size classes,
* using an explicit tcache.
*/
if (config_tcache) {
unsigned tcache_ind, i;
size_t tcache_sizes[2];
tcache_sizes[0] = large0;
tcache_sizes[0] = huge0;
tcache_sizes[1] = 1;
sz = sizeof(unsigned);
@@ -204,14 +201,14 @@ TEST_BEGIN(test_decay_ticker)
uint64_t epoch;
uint64_t npurge0 = 0;
uint64_t npurge1 = 0;
size_t sz, large;
size_t sz, huge;
unsigned i, nupdates0;
nstime_t time, decay_time, deadline;
test_skip_if(opt_purge != purge_mode_decay);
/*
* Allocate a bunch of large objects, pause the clock, deallocate the
* Allocate a bunch of huge objects, pause the clock, deallocate the
* objects, restore the clock, then [md]allocx() in a tight loop to
* verify the ticker triggers purging.
*/
@@ -222,11 +219,11 @@ TEST_BEGIN(test_decay_ticker)
sz = sizeof(size_t);
assert_d_eq(mallctl("arenas.tcache_max", &tcache_max, &sz, NULL,
0), 0, "Unexpected mallctl failure");
large = nallocx(tcache_max + 1, flags);
huge = nallocx(tcache_max + 1, flags);
} else {
sz = sizeof(size_t);
assert_d_eq(mallctl("arenas.lrun.0.size", &large, &sz, NULL, 0),
0, "Unexpected mallctl failure");
assert_d_eq(mallctl("arenas.hchunk.0.size", &huge, &sz, NULL,
0), 0, "Unexpected mallctl failure");
}
assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
@@ -238,7 +235,7 @@ TEST_BEGIN(test_decay_ticker)
config_stats ? 0 : ENOENT, "Unexpected mallctl result");
for (i = 0; i < NPS; i++) {
ps[i] = mallocx(large, flags);
ps[i] = mallocx(huge, flags);
assert_ptr_not_null(ps[i], "Unexpected mallocx() failure");
}
@@ -296,13 +293,13 @@ TEST_BEGIN(test_decay_nonmonotonic)
uint64_t epoch;
uint64_t npurge0 = 0;
uint64_t npurge1 = 0;
size_t sz, large0;
size_t sz, huge0;
unsigned i, nupdates0;
test_skip_if(opt_purge != purge_mode_decay);
sz = sizeof(size_t);
assert_d_eq(mallctl("arenas.lrun.0.size", &large0, &sz, NULL, 0), 0,
assert_d_eq(mallctl("arenas.hchunk.0.size", &huge0, &sz, NULL, 0), 0,
"Unexpected mallctl failure");
assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
@@ -322,7 +319,7 @@ TEST_BEGIN(test_decay_nonmonotonic)
nstime_update = nstime_update_mock;
for (i = 0; i < NPS; i++) {
ps[i] = mallocx(large0, flags);
ps[i] = mallocx(huge0, flags);
assert_ptr_not_null(ps[i], "Unexpected mallocx() failure");
}

View File

@@ -35,16 +35,16 @@ TEST_BEGIN(test_small_extent_size)
}
TEST_END
TEST_BEGIN(test_large_extent_size)
TEST_BEGIN(test_huge_extent_size)
{
bool cache_oblivious;
unsigned nlruns, i;
unsigned nhchunks, i;
size_t sz, extent_size_prev, ceil_prev;
size_t mib[4];
size_t miblen = sizeof(mib) / sizeof(size_t);
/*
* Iterate over all large size classes, get their extent sizes, and
* Iterate over all huge size classes, get their extent sizes, and
* verify that the quantized size is the same as the extent size.
*/
@@ -53,12 +53,12 @@ TEST_BEGIN(test_large_extent_size)
NULL, 0), 0, "Unexpected mallctl failure");
sz = sizeof(unsigned);
assert_d_eq(mallctl("arenas.nlruns", &nlruns, &sz, NULL, 0), 0,
assert_d_eq(mallctl("arenas.nhchunks", &nhchunks, &sz, NULL, 0), 0,
"Unexpected mallctl failure");
assert_d_eq(mallctlnametomib("arenas.lrun.0.size", mib, &miblen), 0,
assert_d_eq(mallctlnametomib("arenas.hchunk.0.size", mib, &miblen), 0,
"Unexpected mallctlnametomib failure");
for (i = 0; i < nlruns; i++) {
for (i = 0; i < nhchunks; i++) {
size_t lextent_size, extent_size, floor, ceil;
mib[2] = i;
@@ -91,33 +91,24 @@ TEST_BEGIN(test_large_extent_size)
ceil_prev, extent_size);
}
}
extent_size_prev = floor;
ceil_prev = extent_size_quantize_ceil(extent_size + PAGE);
if (i + 1 < nhchunks) {
extent_size_prev = floor;
ceil_prev = extent_size_quantize_ceil(extent_size +
PAGE);
}
}
}
TEST_END
TEST_BEGIN(test_monotonic)
{
unsigned nbins, nlruns, i;
size_t sz, floor_prev, ceil_prev;
/*
* Iterate over all extent sizes and verify that
* extent_size_quantize_{floor,ceil}() are monotonic.
*/
sz = sizeof(unsigned);
assert_d_eq(mallctl("arenas.nbins", &nbins, &sz, NULL, 0), 0,
"Unexpected mallctl failure");
sz = sizeof(unsigned);
assert_d_eq(mallctl("arenas.nlruns", &nlruns, &sz, NULL, 0), 0,
"Unexpected mallctl failure");
#define SZ_MAX ZU(4 * 1024 * 1024)
unsigned i;
size_t floor_prev, ceil_prev;
floor_prev = 0;
ceil_prev = 0;
for (i = 1; i <= large_maxclass >> LG_PAGE; i++) {
for (i = 1; i <= SZ_MAX >> LG_PAGE; i++) {
size_t extent_size, floor, ceil;
extent_size = i << LG_PAGE;
@@ -150,6 +141,6 @@ main(void)
return (test(
test_small_extent_size,
test_large_extent_size,
test_huge_extent_size,
test_monotonic));
}

View File

@@ -9,7 +9,6 @@ const char *malloc_conf =
#endif
static arena_dalloc_junk_small_t *arena_dalloc_junk_small_orig;
static arena_dalloc_junk_large_t *arena_dalloc_junk_large_orig;
static huge_dalloc_junk_t *huge_dalloc_junk_orig;
static void *watch_for_junking;
static bool saw_junking;
@@ -38,25 +37,10 @@ arena_dalloc_junk_small_intercept(void *ptr, const arena_bin_info_t *bin_info)
}
static void
arena_dalloc_junk_large_intercept(void *ptr, size_t usize)
{
size_t i;
arena_dalloc_junk_large_orig(ptr, usize);
for (i = 0; i < usize; i++) {
assert_u_eq(((uint8_t *)ptr)[i], JEMALLOC_FREE_JUNK,
"Missing junk fill for byte %zu/%zu of deallocated region",
i, usize);
}
if (ptr == watch_for_junking)
saw_junking = true;
}
static void
huge_dalloc_junk_intercept(tsdn_t *tsdn, void *ptr, size_t usize)
huge_dalloc_junk_intercept(void *ptr, size_t usize)
{
huge_dalloc_junk_orig(tsdn, ptr, usize);
huge_dalloc_junk_orig(ptr, usize);
/*
* The conditions under which junk filling actually occurs are nuanced
* enough that it doesn't make sense to duplicate the decision logic in
@@ -75,8 +59,6 @@ test_junk(size_t sz_min, size_t sz_max)
if (opt_junk_free) {
arena_dalloc_junk_small_orig = arena_dalloc_junk_small;
arena_dalloc_junk_small = arena_dalloc_junk_small_intercept;
arena_dalloc_junk_large_orig = arena_dalloc_junk_large;
arena_dalloc_junk_large = arena_dalloc_junk_large_intercept;
huge_dalloc_junk_orig = huge_dalloc_junk;
huge_dalloc_junk = huge_dalloc_junk_intercept;
}
@@ -106,13 +88,18 @@ test_junk(size_t sz_min, size_t sz_max)
}
if (xallocx(s, sz+1, 0, 0) == sz) {
uint8_t *t;
watch_junking(s);
s = (uint8_t *)rallocx(s, sz+1, 0);
assert_ptr_not_null((void *)s,
t = (uint8_t *)rallocx(s, sz+1, 0);
assert_ptr_not_null((void *)t,
"Unexpected rallocx() failure");
assert_ptr_ne(s, t, "Unexpected in-place rallocx()");
assert_zu_ge(sallocx(t, 0), sz+1,
"Unexpectedly small rallocx() result");
assert_true(!opt_junk_free || saw_junking,
"Expected region of size %zu to be junk-filled",
sz);
s = t;
}
}
@@ -123,7 +110,6 @@ test_junk(size_t sz_min, size_t sz_max)
if (opt_junk_free) {
arena_dalloc_junk_small = arena_dalloc_junk_small_orig;
arena_dalloc_junk_large = arena_dalloc_junk_large_orig;
huge_dalloc_junk = huge_dalloc_junk_orig;
}
}
@@ -136,64 +122,11 @@ TEST_BEGIN(test_junk_small)
}
TEST_END
TEST_BEGIN(test_junk_large)
{
test_skip_if(!config_fill);
test_junk(SMALL_MAXCLASS+1, large_maxclass);
}
TEST_END
TEST_BEGIN(test_junk_huge)
{
test_skip_if(!config_fill);
test_junk(large_maxclass+1, chunksize*2);
}
TEST_END
arena_ralloc_junk_large_t *arena_ralloc_junk_large_orig;
static void *most_recently_trimmed;
static size_t
shrink_size(size_t size)
{
size_t shrink_size;
for (shrink_size = size - 1; nallocx(shrink_size, 0) == size;
shrink_size--)
; /* Do nothing. */
return (shrink_size);
}
static void
arena_ralloc_junk_large_intercept(void *ptr, size_t old_usize, size_t usize)
{
arena_ralloc_junk_large_orig(ptr, old_usize, usize);
assert_zu_eq(old_usize, large_maxclass, "Unexpected old_usize");
assert_zu_eq(usize, shrink_size(large_maxclass), "Unexpected usize");
most_recently_trimmed = ptr;
}
TEST_BEGIN(test_junk_large_ralloc_shrink)
{
void *p1, *p2;
p1 = mallocx(large_maxclass, 0);
assert_ptr_not_null(p1, "Unexpected mallocx() failure");
arena_ralloc_junk_large_orig = arena_ralloc_junk_large;
arena_ralloc_junk_large = arena_ralloc_junk_large_intercept;
p2 = rallocx(p1, shrink_size(large_maxclass), 0);
assert_ptr_eq(p1, p2, "Unexpected move during shrink");
arena_ralloc_junk_large = arena_ralloc_junk_large_orig;
assert_ptr_eq(most_recently_trimmed, p1,
"Expected trimmed portion of region to be junk-filled");
test_junk(SMALL_MAXCLASS+1, chunksize*2);
}
TEST_END
@@ -203,7 +136,5 @@ main(void)
return (test(
test_junk_small,
test_junk_large,
test_junk_huge,
test_junk_large_ralloc_shrink));
test_junk_huge));
}

View File

@@ -596,8 +596,7 @@ TEST_BEGIN(test_arenas_constants)
TEST_ARENAS_CONSTANT(size_t, quantum, QUANTUM);
TEST_ARENAS_CONSTANT(size_t, page, PAGE);
TEST_ARENAS_CONSTANT(unsigned, nbins, NBINS);
TEST_ARENAS_CONSTANT(unsigned, nlruns, nlclasses);
TEST_ARENAS_CONSTANT(unsigned, nhchunks, nhclasses);
TEST_ARENAS_CONSTANT(unsigned, nhchunks, NSIZES - NBINS);
#undef TEST_ARENAS_CONSTANT
}
@@ -622,23 +621,6 @@ TEST_BEGIN(test_arenas_bin_constants)
}
TEST_END
TEST_BEGIN(test_arenas_lrun_constants)
{
#define TEST_ARENAS_LRUN_CONSTANT(t, name, expected) do { \
t name; \
size_t sz = sizeof(t); \
assert_d_eq(mallctl("arenas.lrun.0."#name, &name, &sz, NULL, \
0), 0, "Unexpected mallctl() failure"); \
assert_zu_eq(name, expected, "Incorrect "#name" size"); \
} while (0)
TEST_ARENAS_LRUN_CONSTANT(size_t, size, LARGE_MINCLASS);
#undef TEST_ARENAS_LRUN_CONSTANT
}
TEST_END
TEST_BEGIN(test_arenas_hchunk_constants)
{
@@ -650,7 +632,7 @@ TEST_BEGIN(test_arenas_hchunk_constants)
assert_zu_eq(name, expected, "Incorrect "#name" size"); \
} while (0)
TEST_ARENAS_HCHUNK_CONSTANT(size_t, size, chunksize);
TEST_ARENAS_HCHUNK_CONSTANT(size_t, size, LARGE_MINCLASS);
#undef TEST_ARENAS_HCHUNK_CONSTANT
}
@@ -721,7 +703,6 @@ main(void)
test_arenas_decay_time,
test_arenas_constants,
test_arenas_bin_constants,
test_arenas_lrun_constants,
test_arenas_hchunk_constants,
test_arenas_extend,
test_stats_arenas));

View File

@@ -1,10 +1,17 @@
#include "test/jemalloc_test.h"
const char *malloc_conf = ""
#ifdef JEMALLOC_PROF
const char *malloc_conf =
"prof:true,prof_accum:true,prof_active:false,lg_prof_sample:0,"
"lg_prof_interval:0";
"prof:true,prof_accum:true,prof_active:false,lg_prof_sample:0"
",lg_prof_interval:0"
# ifdef JEMALLOC_TCACHE
","
# endif
#endif
#ifdef JEMALLOC_TCACHE
"tcache:false"
#endif
;
static bool did_prof_dump_open;

View File

@@ -1,149 +0,0 @@
#include "test/jemalloc_test.h"
TEST_BEGIN(test_small_run_size)
{
unsigned nbins, i;
size_t sz, run_size;
size_t mib[4];
size_t miblen = sizeof(mib) / sizeof(size_t);
/*
* Iterate over all small size classes, get their run sizes, and verify
* that the quantized size is the same as the run size.
*/
sz = sizeof(unsigned);
assert_d_eq(mallctl("arenas.nbins", &nbins, &sz, NULL, 0), 0,
"Unexpected mallctl failure");
assert_d_eq(mallctlnametomib("arenas.bin.0.run_size", mib, &miblen), 0,
"Unexpected mallctlnametomib failure");
for (i = 0; i < nbins; i++) {
mib[2] = i;
sz = sizeof(size_t);
assert_d_eq(mallctlbymib(mib, miblen, &run_size, &sz, NULL, 0),
0, "Unexpected mallctlbymib failure");
assert_zu_eq(run_size, run_quantize_floor(run_size),
"Small run quantization should be a no-op (run_size=%zu)",
run_size);
assert_zu_eq(run_size, run_quantize_ceil(run_size),
"Small run quantization should be a no-op (run_size=%zu)",
run_size);
}
}
TEST_END
TEST_BEGIN(test_large_run_size)
{
bool cache_oblivious;
unsigned nlruns, i;
size_t sz, run_size_prev, ceil_prev;
size_t mib[4];
size_t miblen = sizeof(mib) / sizeof(size_t);
/*
* Iterate over all large size classes, get their run sizes, and verify
* that the quantized size is the same as the run size.
*/
sz = sizeof(bool);
assert_d_eq(mallctl("config.cache_oblivious", &cache_oblivious, &sz,
NULL, 0), 0, "Unexpected mallctl failure");
sz = sizeof(unsigned);
assert_d_eq(mallctl("arenas.nlruns", &nlruns, &sz, NULL, 0), 0,
"Unexpected mallctl failure");
assert_d_eq(mallctlnametomib("arenas.lrun.0.size", mib, &miblen), 0,
"Unexpected mallctlnametomib failure");
for (i = 0; i < nlruns; i++) {
size_t lrun_size, run_size, floor, ceil;
mib[2] = i;
sz = sizeof(size_t);
assert_d_eq(mallctlbymib(mib, miblen, &lrun_size, &sz, NULL, 0),
0, "Unexpected mallctlbymib failure");
run_size = cache_oblivious ? lrun_size + PAGE : lrun_size;
floor = run_quantize_floor(run_size);
ceil = run_quantize_ceil(run_size);
assert_zu_eq(run_size, floor,
"Large run quantization should be a no-op for precise "
"size (lrun_size=%zu, run_size=%zu)", lrun_size, run_size);
assert_zu_eq(run_size, ceil,
"Large run quantization should be a no-op for precise "
"size (lrun_size=%zu, run_size=%zu)", lrun_size, run_size);
if (i > 0) {
assert_zu_eq(run_size_prev, run_quantize_floor(run_size
- PAGE), "Floor should be a precise size");
if (run_size_prev < ceil_prev) {
assert_zu_eq(ceil_prev, run_size,
"Ceiling should be a precise size "
"(run_size_prev=%zu, ceil_prev=%zu, "
"run_size=%zu)", run_size_prev, ceil_prev,
run_size);
}
}
run_size_prev = floor;
ceil_prev = run_quantize_ceil(run_size + PAGE);
}
}
TEST_END
TEST_BEGIN(test_monotonic)
{
unsigned nbins, nlruns, i;
size_t sz, floor_prev, ceil_prev;
/*
* Iterate over all run sizes and verify that
* run_quantize_{floor,ceil}() are monotonic.
*/
sz = sizeof(unsigned);
assert_d_eq(mallctl("arenas.nbins", &nbins, &sz, NULL, 0), 0,
"Unexpected mallctl failure");
sz = sizeof(unsigned);
assert_d_eq(mallctl("arenas.nlruns", &nlruns, &sz, NULL, 0), 0,
"Unexpected mallctl failure");
floor_prev = 0;
ceil_prev = 0;
for (i = 1; i <= large_maxclass >> LG_PAGE; i++) {
size_t run_size, floor, ceil;
run_size = i << LG_PAGE;
floor = run_quantize_floor(run_size);
ceil = run_quantize_ceil(run_size);
assert_zu_le(floor, run_size,
"Floor should be <= (floor=%zu, run_size=%zu, ceil=%zu)",
floor, run_size, ceil);
assert_zu_ge(ceil, run_size,
"Ceiling should be >= (floor=%zu, run_size=%zu, ceil=%zu)",
floor, run_size, ceil);
assert_zu_le(floor_prev, floor, "Floor should be monotonic "
"(floor_prev=%zu, floor=%zu, run_size=%zu, ceil=%zu)",
floor_prev, floor, run_size, ceil);
assert_zu_le(ceil_prev, ceil, "Ceiling should be monotonic "
"(floor=%zu, run_size=%zu, ceil_prev=%zu, ceil=%zu)",
floor, run_size, ceil_prev, ceil);
floor_prev = floor;
ceil_prev = ceil;
}
}
TEST_END
int
main(void)
{
return (test(
test_small_run_size,
test_large_run_size,
test_monotonic));
}

View File

@@ -42,7 +42,7 @@ TEST_BEGIN(test_stats_huge)
size_t sz;
int expected = config_stats ? 0 : ENOENT;
p = mallocx(large_maxclass+1, 0);
p = mallocx(SMALL_MAXCLASS+1, 0);
assert_ptr_not_null(p, "Unexpected mallocx() failure");
assert_d_eq(mallctl("epoch", NULL, NULL, &epoch, sizeof(epoch)), 0,
@@ -75,7 +75,7 @@ TEST_END
TEST_BEGIN(test_stats_arenas_summary)
{
unsigned arena;
void *little, *large, *huge;
void *little, *huge;
uint64_t epoch;
size_t sz;
int expected = config_stats ? 0 : ENOENT;
@@ -88,13 +88,10 @@ TEST_BEGIN(test_stats_arenas_summary)
little = mallocx(SMALL_MAXCLASS, 0);
assert_ptr_not_null(little, "Unexpected mallocx() failure");
large = mallocx(large_maxclass, 0);
assert_ptr_not_null(large, "Unexpected mallocx() failure");
huge = mallocx(chunksize, 0);
assert_ptr_not_null(huge, "Unexpected mallocx() failure");
dallocx(little, 0);
dallocx(large, 0);
dallocx(huge, 0);
assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
@@ -188,50 +185,6 @@ TEST_BEGIN(test_stats_arenas_small)
}
TEST_END
TEST_BEGIN(test_stats_arenas_large)
{
unsigned arena;
void *p;
size_t sz, allocated;
uint64_t epoch, nmalloc, ndalloc, nrequests;
int expected = config_stats ? 0 : ENOENT;
arena = 0;
assert_d_eq(mallctl("thread.arena", NULL, NULL, &arena, sizeof(arena)),
0, "Unexpected mallctl() failure");
p = mallocx(large_maxclass, 0);
assert_ptr_not_null(p, "Unexpected mallocx() failure");
assert_d_eq(mallctl("epoch", NULL, NULL, &epoch, sizeof(epoch)), 0,
"Unexpected mallctl() failure");
sz = sizeof(size_t);
assert_d_eq(mallctl("stats.arenas.0.large.allocated", &allocated, &sz,
NULL, 0), expected, "Unexpected mallctl() result");
sz = sizeof(uint64_t);
assert_d_eq(mallctl("stats.arenas.0.large.nmalloc", &nmalloc, &sz,
NULL, 0), expected, "Unexpected mallctl() result");
assert_d_eq(mallctl("stats.arenas.0.large.ndalloc", &ndalloc, &sz,
NULL, 0), expected, "Unexpected mallctl() result");
assert_d_eq(mallctl("stats.arenas.0.large.nrequests", &nrequests, &sz,
NULL, 0), expected, "Unexpected mallctl() result");
if (config_stats) {
assert_zu_gt(allocated, 0,
"allocated should be greater than zero");
assert_u64_gt(nmalloc, 0,
"nmalloc should be greater than zero");
assert_u64_ge(nmalloc, ndalloc,
"nmalloc should be at least as large as ndalloc");
assert_u64_gt(nrequests, 0,
"nrequests should be greater than zero");
}
dallocx(p, 0);
}
TEST_END
TEST_BEGIN(test_stats_arenas_huge)
{
unsigned arena;
@@ -346,63 +299,23 @@ TEST_BEGIN(test_stats_arenas_bins)
}
TEST_END
TEST_BEGIN(test_stats_arenas_lruns)
{
unsigned arena;
void *p;
uint64_t epoch, nmalloc, ndalloc, nrequests;
size_t curruns, sz;
int expected = config_stats ? 0 : ENOENT;
arena = 0;
assert_d_eq(mallctl("thread.arena", NULL, NULL, &arena, sizeof(arena)),
0, "Unexpected mallctl() failure");
p = mallocx(LARGE_MINCLASS, 0);
assert_ptr_not_null(p, "Unexpected mallocx() failure");
assert_d_eq(mallctl("epoch", NULL, NULL, &epoch, sizeof(epoch)), 0,
"Unexpected mallctl() failure");
sz = sizeof(uint64_t);
assert_d_eq(mallctl("stats.arenas.0.lruns.0.nmalloc", &nmalloc, &sz,
NULL, 0), expected, "Unexpected mallctl() result");
assert_d_eq(mallctl("stats.arenas.0.lruns.0.ndalloc", &ndalloc, &sz,
NULL, 0), expected, "Unexpected mallctl() result");
assert_d_eq(mallctl("stats.arenas.0.lruns.0.nrequests", &nrequests, &sz,
NULL, 0), expected, "Unexpected mallctl() result");
sz = sizeof(size_t);
assert_d_eq(mallctl("stats.arenas.0.lruns.0.curruns", &curruns, &sz,
NULL, 0), expected, "Unexpected mallctl() result");
if (config_stats) {
assert_u64_gt(nmalloc, 0,
"nmalloc should be greater than zero");
assert_u64_ge(nmalloc, ndalloc,
"nmalloc should be at least as large as ndalloc");
assert_u64_gt(nrequests, 0,
"nrequests should be greater than zero");
assert_u64_gt(curruns, 0,
"At least one run should be currently allocated");
}
dallocx(p, 0);
}
TEST_END
TEST_BEGIN(test_stats_arenas_hchunks)
{
unsigned arena;
void *p;
uint64_t epoch, nmalloc, ndalloc;
size_t curhchunks, sz;
size_t curhchunks, sz, hsize;
int expected = config_stats ? 0 : ENOENT;
arena = 0;
assert_d_eq(mallctl("thread.arena", NULL, NULL, &arena, sizeof(arena)),
0, "Unexpected mallctl() failure");
p = mallocx(chunksize, 0);
sz = sizeof(size_t);
assert_d_eq(mallctl("arenas.hchunk.0.size", &hsize, &sz, NULL, 0), 0,
"Unexpected mallctl() failure");
p = mallocx(hsize, 0);
assert_ptr_not_null(p, "Unexpected mallocx() failure");
assert_d_eq(mallctl("epoch", NULL, NULL, &epoch, sizeof(epoch)), 0,
@@ -439,9 +352,7 @@ main(void)
test_stats_huge,
test_stats_arenas_summary,
test_stats_arenas_small,
test_stats_arenas_large,
test_stats_arenas_huge,
test_stats_arenas_bins,
test_stats_arenas_lruns,
test_stats_arenas_hchunks));
}

View File

@@ -53,19 +53,11 @@ TEST_BEGIN(test_zero_small)
}
TEST_END
TEST_BEGIN(test_zero_large)
{
test_skip_if(!config_fill);
test_zero(SMALL_MAXCLASS+1, large_maxclass);
}
TEST_END
TEST_BEGIN(test_zero_huge)
{
test_skip_if(!config_fill);
test_zero(large_maxclass+1, chunksize*2);
test_zero(SMALL_MAXCLASS+1, chunksize*2);
}
TEST_END
@@ -75,6 +67,5 @@ main(void)
return (test(
test_zero_small,
test_zero_large,
test_zero_huge));
}