Fix huge-aligned allocation.

This regression was caused by
b9408d77a6 (Fix/simplify chunk_recycle()
allocation size computations.).

This resolves #647.
This commit is contained in:
Jason Evans 2017-02-26 12:58:15 -08:00
parent ed19a48928
commit 1e2c9ef8d6
6 changed files with 141 additions and 11 deletions

View File

@ -156,6 +156,7 @@ TESTS_UNIT := \
$(srcroot)test/unit/bitmap.c \
$(srcroot)test/unit/ckh.c \
$(srcroot)test/unit/decay.c \
$(srcroot)test/unit/extent_quantize.c \
$(srcroot)test/unit/fork.c \
$(srcroot)test/unit/hash.c \
$(srcroot)test/unit/junk.c \

View File

@ -75,6 +75,11 @@ typedef rb_tree(extent_node_t) extent_tree_t;
/******************************************************************************/
#ifdef JEMALLOC_H_EXTERNS
#ifdef JEMALLOC_JET
size_t extent_size_quantize_floor(size_t size);
#endif
size_t extent_size_quantize_ceil(size_t size);
rb_proto(, extent_tree_szsnad_, extent_tree_t, extent_node_t)
rb_proto(, extent_tree_ad_, extent_tree_t, extent_node_t)

View File

@ -223,6 +223,8 @@ extent_node_sn_get
extent_node_sn_set
extent_node_zeroed_get
extent_node_zeroed_set
extent_size_quantize_ceil
extent_size_quantize_floor
extent_tree_ad_destroy
extent_tree_ad_destroy_recurse
extent_tree_ad_empty

View File

@ -188,12 +188,17 @@ chunk_deregister(const void *chunk, const extent_node_t *node)
static extent_node_t *
chunk_first_best_fit(arena_t *arena, extent_tree_t *chunks_szsnad, size_t size)
{
extent_node_t *node;
size_t qsize;
extent_node_t key;
assert(size == CHUNK_CEILING(size));
extent_node_init(&key, arena, NULL, size, 0, false, false);
return (extent_tree_szsnad_nsearch(chunks_szsnad, &key));
qsize = extent_size_quantize_ceil(size);
extent_node_init(&key, arena, NULL, qsize, 0, false, false);
node = extent_tree_szsnad_nsearch(chunks_szsnad, &key);
assert(node == NULL || extent_node_size_get(node) >= size);
return node;
}
static void *

View File

@ -3,13 +3,11 @@
/******************************************************************************/
/*
* Round down to the nearest chunk size that can actually be requested during
* normal huge allocation.
*/
JEMALLOC_INLINE_C size_t
extent_quantize(size_t size)
{
#ifndef JEMALLOC_JET
static
#endif
size_t
extent_size_quantize_floor(size_t size) {
size_t ret;
szind_t ind;
@ -25,11 +23,32 @@ extent_quantize(size_t size)
return (ret);
}
size_t
extent_size_quantize_ceil(size_t size) {
size_t ret;
assert(size > 0);
ret = extent_size_quantize_floor(size);
if (ret < size) {
/*
* Skip a quantization that may have an adequately large extent,
* because under-sized extents may be mixed in. This only
* happens when an unusual size is requested, i.e. for aligned
* allocation, and is just one of several places where linear
* search would potentially find sufficiently aligned available
* memory somewhere lower.
*/
ret = index2size(size2index(ret + 1));
}
return ret;
}
JEMALLOC_INLINE_C int
extent_sz_comp(const extent_node_t *a, const extent_node_t *b)
{
size_t a_qsize = extent_quantize(extent_node_size_get(a));
size_t b_qsize = extent_quantize(extent_node_size_get(b));
size_t a_qsize = extent_size_quantize_floor(extent_node_size_get(a));
size_t b_qsize = extent_size_quantize_floor(extent_node_size_get(b));
return ((a_qsize > b_qsize) - (a_qsize < b_qsize));
}

View File

@ -0,0 +1,98 @@
#include "test/jemalloc_test.h"
TEST_BEGIN(test_huge_extent_size) {
unsigned nhchunks, i;
size_t sz, extent_size_prev, ceil_prev;
size_t mib[4];
size_t miblen = sizeof(mib) / sizeof(size_t);
/*
* Iterate over all huge size classes, get their extent sizes, and
* verify that the quantized size is the same as the extent size.
*/
sz = sizeof(unsigned);
assert_d_eq(mallctl("arenas.nhchunks", (void *)&nhchunks, &sz, NULL,
0), 0, "Unexpected mallctl failure");
assert_d_eq(mallctlnametomib("arenas.hchunk.0.size", mib, &miblen), 0,
"Unexpected mallctlnametomib failure");
for (i = 0; i < nhchunks; i++) {
size_t extent_size, floor, ceil;
mib[2] = i;
sz = sizeof(size_t);
assert_d_eq(mallctlbymib(mib, miblen, (void *)&extent_size,
&sz, NULL, 0), 0, "Unexpected mallctlbymib failure");
floor = extent_size_quantize_floor(extent_size);
ceil = extent_size_quantize_ceil(extent_size);
assert_zu_eq(extent_size, floor,
"Extent quantization should be a no-op for precise size "
"(extent_size=%zu)", extent_size);
assert_zu_eq(extent_size, ceil,
"Extent quantization should be a no-op for precise size "
"(extent_size=%zu)", extent_size);
if (i > 0) {
assert_zu_eq(extent_size_prev,
extent_size_quantize_floor(extent_size - PAGE),
"Floor should be a precise size");
if (extent_size_prev < ceil_prev) {
assert_zu_eq(ceil_prev, extent_size,
"Ceiling should be a precise size "
"(extent_size_prev=%zu, ceil_prev=%zu, "
"extent_size=%zu)", extent_size_prev,
ceil_prev, extent_size);
}
}
if (i + 1 < nhchunks) {
extent_size_prev = floor;
ceil_prev = extent_size_quantize_ceil(extent_size +
PAGE);
}
}
}
TEST_END
TEST_BEGIN(test_monotonic) {
#define SZ_MAX ZU(4 * 1024 * 1024)
unsigned i;
size_t floor_prev, ceil_prev;
floor_prev = 0;
ceil_prev = 0;
for (i = 1; i <= SZ_MAX >> LG_PAGE; i++) {
size_t extent_size, floor, ceil;
extent_size = i << LG_PAGE;
floor = extent_size_quantize_floor(extent_size);
ceil = extent_size_quantize_ceil(extent_size);
assert_zu_le(floor, extent_size,
"Floor should be <= (floor=%zu, extent_size=%zu, ceil=%zu)",
floor, extent_size, ceil);
assert_zu_ge(ceil, extent_size,
"Ceiling should be >= (floor=%zu, extent_size=%zu, "
"ceil=%zu)", floor, extent_size, ceil);
assert_zu_le(floor_prev, floor, "Floor should be monotonic "
"(floor_prev=%zu, floor=%zu, extent_size=%zu, ceil=%zu)",
floor_prev, floor, extent_size, ceil);
assert_zu_le(ceil_prev, ceil, "Ceiling should be monotonic "
"(floor=%zu, extent_size=%zu, ceil_prev=%zu, ceil=%zu)",
floor, extent_size, ceil_prev, ceil);
floor_prev = floor;
ceil_prev = ceil;
}
}
TEST_END
int
main(void) {
return test(
test_huge_extent_size,
test_monotonic);
}