server-skynet-source-3rd-je.../test/unit/prof_active.c
Jason Evans fc12c0b8bc Implement/test/fix prof-related mallctl's.
Implement/test/fix the opt.prof_thread_active_init,
prof.thread_active_init, and thread.prof.active mallctl's.

Test/fix the thread.prof.name mallctl.

Refactor opt_prof_active to be read-only and move mutable state into the
prof_active variable.  Stop leaning on ctl-related locking for
protection.
2014-10-03 23:25:30 -07:00

137 lines
3.6 KiB
C

#include "test/jemalloc_test.h"
#ifdef JEMALLOC_PROF
const char *malloc_conf =
"prof:true,prof_thread_active_init:false,lg_prof_sample:0,prof_final:false";
#endif
static void
mallctl_bool_get(const char *name, bool expected, const char *func, int line)
{
bool old;
size_t sz;
sz = sizeof(old);
assert_d_eq(mallctl(name, &old, &sz, NULL, 0), 0,
"%s():%d: Unexpected mallctl failure reading %s", func, line, name);
assert_b_eq(old, expected, "%s():%d: Unexpected %s value", func, line,
name);
}
static void
mallctl_bool_set(const char *name, bool old_expected, bool val_new,
const char *func, int line)
{
bool old;
size_t sz;
sz = sizeof(old);
assert_d_eq(mallctl(name, &old, &sz, &val_new, sizeof(val_new)), 0,
"%s():%d: Unexpected mallctl failure reading/writing %s", func,
line, name);
assert_b_eq(old, old_expected, "%s():%d: Unexpected %s value", func,
line, name);
}
static void
mallctl_prof_active_get_impl(bool prof_active_old_expected, const char *func,
int line)
{
mallctl_bool_get("prof.active", prof_active_old_expected, func, line);
}
#define mallctl_prof_active_get(a) \
mallctl_prof_active_get_impl(a, __func__, __LINE__)
static void
mallctl_prof_active_set_impl(bool prof_active_old_expected,
bool prof_active_new, const char *func, int line)
{
mallctl_bool_set("prof.active", prof_active_old_expected,
prof_active_new, func, line);
}
#define mallctl_prof_active_set(a, b) \
mallctl_prof_active_set_impl(a, b, __func__, __LINE__)
static void
mallctl_thread_prof_active_get_impl(bool thread_prof_active_old_expected,
const char *func, int line)
{
mallctl_bool_get("thread.prof.active", thread_prof_active_old_expected,
func, line);
}
#define mallctl_thread_prof_active_get(a) \
mallctl_thread_prof_active_get_impl(a, __func__, __LINE__)
static void
mallctl_thread_prof_active_set_impl(bool thread_prof_active_old_expected,
bool thread_prof_active_new, const char *func, int line)
{
mallctl_bool_set("thread.prof.active", thread_prof_active_old_expected,
thread_prof_active_new, func, line);
}
#define mallctl_thread_prof_active_set(a, b) \
mallctl_thread_prof_active_set_impl(a, b, __func__, __LINE__)
static void
prof_sampling_probe_impl(bool expect_sample, const char *func, int line)
{
void *p;
size_t expected_backtraces = expect_sample ? 1 : 0;
assert_zu_eq(prof_bt_count(), 0, "%s():%d: Expected 0 backtraces", func,
line);
p = mallocx(1, 0);
assert_ptr_not_null(p, "Unexpected mallocx() failure");
assert_zu_eq(prof_bt_count(), expected_backtraces,
"%s():%d: Unexpected backtrace count", func, line);
dallocx(p, 0);
}
#define prof_sampling_probe(a) \
prof_sampling_probe_impl(a, __func__, __LINE__)
TEST_BEGIN(test_prof_active)
{
test_skip_if(!config_prof);
mallctl_prof_active_get(true);
mallctl_thread_prof_active_get(false);
mallctl_prof_active_set(true, true);
mallctl_thread_prof_active_set(false, false);
/* prof.active, !thread.prof.active. */
prof_sampling_probe(false);
mallctl_prof_active_set(true, false);
mallctl_thread_prof_active_set(false, false);
/* !prof.active, !thread.prof.active. */
prof_sampling_probe(false);
mallctl_prof_active_set(false, false);
mallctl_thread_prof_active_set(false, true);
/* !prof.active, thread.prof.active. */
prof_sampling_probe(false);
mallctl_prof_active_set(false, true);
mallctl_thread_prof_active_set(true, true);
/* prof.active, thread.prof.active. */
prof_sampling_probe(true);
/* Restore settings. */
mallctl_prof_active_set(true, true);
mallctl_thread_prof_active_set(true, false);
}
TEST_END
int
main(void)
{
return (test(
test_prof_active));
}