Add a test-case for small profiled allocations

Validate that small allocations (i.e. those with `size <= SC_SMALL_MAXCLASS`)
which are sampled for profiling maintain the expected invariants even
though they now take up less space.
This commit is contained in:
Kevin Svetlitski 2023-06-12 14:13:17 -07:00 committed by Qi Wang
parent 5a858c64d6
commit ebd7e99f5c
3 changed files with 85 additions and 0 deletions

View File

@ -256,6 +256,7 @@ TESTS_UNIT := \
$(srcroot)test/unit/prof_mdump.c \
$(srcroot)test/unit/prof_recent.c \
$(srcroot)test/unit/prof_reset.c \
$(srcroot)test/unit/prof_small.c \
$(srcroot)test/unit/prof_stats.c \
$(srcroot)test/unit/prof_tctx.c \
$(srcroot)test/unit/prof_thread_name.c \

78
test/unit/prof_small.c Normal file
View File

@ -0,0 +1,78 @@
#include "test/jemalloc_test.h"
static void assert_small_allocation_sampled(void *ptr, size_t size) {
assert_ptr_not_null(ptr, "Unexpected malloc failure");
assert_zu_le(size, SC_SMALL_MAXCLASS, "Unexpected large size class");
edata_t *edata = emap_edata_lookup(TSDN_NULL, &arena_emap_global, ptr);
assert_ptr_not_null(edata, "Unable to find edata for allocation");
expect_false(edata_slab_get(edata),
"Sampled small allocations should not be placed on slabs");
expect_ptr_eq(edata_base_get(edata), ptr,
"Sampled allocations should be page-aligned");
expect_zu_eq(edata_usize_get(edata), size,
"Edata usize did not match requested size");
expect_zu_eq(edata_size_get(edata), PAGE_CEILING(size) + sz_large_pad,
"Edata actual size was not a multiple of PAGE");
prof_tctx_t *prof_tctx = edata_prof_tctx_get(edata);
expect_ptr_not_null(prof_tctx, "Edata had null prof_tctx");
expect_ptr_not_null(prof_tctx->tdata,
"Edata had null prof_tdata despite being sampled");
}
TEST_BEGIN(test_profile_small_allocations) {
test_skip_if(!config_prof);
for (szind_t index = 0; index < SC_NBINS; index++) {
size_t size = sz_index2size(index);
void *ptr = malloc(size);
assert_small_allocation_sampled(ptr, size);
free(ptr);
}
}
TEST_END
TEST_BEGIN(test_profile_small_reallocations_growing) {
test_skip_if(!config_prof);
void *ptr = NULL;
for (szind_t index = 0; index < SC_NBINS; index++) {
size_t size = sz_index2size(index);
ptr = realloc(ptr, size);
assert_small_allocation_sampled(ptr, size);
}
}
TEST_END
TEST_BEGIN(test_profile_small_reallocations_shrinking) {
test_skip_if(!config_prof);
void *ptr = NULL;
for (szind_t index = SC_NBINS; index-- > 0;) {
size_t size = sz_index2size(index);
ptr = realloc(ptr, size);
assert_small_allocation_sampled(ptr, size);
}
}
TEST_END
TEST_BEGIN(test_profile_small_reallocations_same_size_class) {
test_skip_if(!config_prof);
for (szind_t index = 0; index < SC_NBINS; index++) {
size_t size = sz_index2size(index);
void *ptr = malloc(size);
assert_small_allocation_sampled(ptr, size);
ptr = realloc(ptr, size - 1);
assert_small_allocation_sampled(ptr, size);
free(ptr);
}
}
TEST_END
int
main(void) {
return test(test_profile_small_allocations,
test_profile_small_reallocations_growing,
test_profile_small_reallocations_shrinking,
test_profile_small_reallocations_same_size_class);
}

6
test/unit/prof_small.sh Normal file
View File

@ -0,0 +1,6 @@
#!/bin/sh
if [ "x${enable_prof}" = "x1" ] ; then
export MALLOC_CONF="prof:true,lg_prof_sample:0"
fi