2019-09-04 06:04:48 +08:00
|
|
|
#include "test/jemalloc_test.h"
|
|
|
|
|
|
|
|
TEST_BEGIN(test_next_event_fast_roll_back) {
|
|
|
|
tsd_t *tsd = tsd_fetch();
|
2020-01-31 08:31:45 +08:00
|
|
|
te_ctx_t ctx;
|
|
|
|
te_ctx_get(tsd, &ctx, true);
|
2020-01-29 13:12:06 +08:00
|
|
|
|
2020-01-31 08:31:45 +08:00
|
|
|
te_ctx_last_event_set(&ctx, 0);
|
|
|
|
te_ctx_current_bytes_set(&ctx, TE_NEXT_EVENT_FAST_MAX - 8U);
|
|
|
|
te_ctx_next_event_set(tsd, &ctx, TE_NEXT_EVENT_FAST_MAX);
|
2020-01-29 13:12:06 +08:00
|
|
|
#define E(event, condition, is_alloc) \
|
|
|
|
if (is_alloc && condition) { \
|
2020-01-31 08:31:45 +08:00
|
|
|
event##_event_wait_set(tsd, TE_NEXT_EVENT_FAST_MAX); \
|
2020-01-29 13:12:06 +08:00
|
|
|
}
|
2019-10-25 07:41:45 +08:00
|
|
|
ITERATE_OVER_ALL_EVENTS
|
|
|
|
#undef E
|
2019-09-04 06:04:48 +08:00
|
|
|
void *p = malloc(16U);
|
2020-02-19 06:39:06 +08:00
|
|
|
expect_ptr_not_null(p, "malloc() failed");
|
2019-09-04 06:04:48 +08:00
|
|
|
free(p);
|
|
|
|
}
|
|
|
|
TEST_END
|
|
|
|
|
|
|
|
TEST_BEGIN(test_next_event_fast_resume) {
|
|
|
|
tsd_t *tsd = tsd_fetch();
|
2020-01-29 13:12:06 +08:00
|
|
|
|
2020-01-31 08:31:45 +08:00
|
|
|
te_ctx_t ctx;
|
|
|
|
te_ctx_get(tsd, &ctx, true);
|
2020-01-29 13:12:06 +08:00
|
|
|
|
2020-01-31 08:31:45 +08:00
|
|
|
te_ctx_last_event_set(&ctx, 0);
|
|
|
|
te_ctx_current_bytes_set(&ctx, TE_NEXT_EVENT_FAST_MAX + 8U);
|
|
|
|
te_ctx_next_event_set(tsd, &ctx, TE_NEXT_EVENT_FAST_MAX + 16U);
|
2020-01-29 13:12:06 +08:00
|
|
|
#define E(event, condition, is_alloc) \
|
|
|
|
if (is_alloc && condition) { \
|
|
|
|
event##_event_wait_set(tsd, \
|
2020-01-31 08:31:45 +08:00
|
|
|
TE_NEXT_EVENT_FAST_MAX + 16U); \
|
2020-01-29 13:12:06 +08:00
|
|
|
}
|
2019-10-25 07:41:45 +08:00
|
|
|
ITERATE_OVER_ALL_EVENTS
|
|
|
|
#undef E
|
2019-09-04 06:04:48 +08:00
|
|
|
void *p = malloc(SC_LOOKUP_MAXCLASS);
|
2020-02-19 06:39:06 +08:00
|
|
|
expect_ptr_not_null(p, "malloc() failed");
|
2019-09-04 06:04:48 +08:00
|
|
|
free(p);
|
|
|
|
}
|
|
|
|
TEST_END
|
|
|
|
|
|
|
|
TEST_BEGIN(test_event_rollback) {
|
|
|
|
tsd_t *tsd = tsd_fetch();
|
2020-01-31 08:31:45 +08:00
|
|
|
const uint64_t diff = TE_MAX_INTERVAL >> 2;
|
2019-09-04 06:04:48 +08:00
|
|
|
size_t count = 10;
|
|
|
|
uint64_t thread_allocated = thread_allocated_get(tsd);
|
|
|
|
while (count-- != 0) {
|
2020-01-31 08:31:45 +08:00
|
|
|
te_alloc_rollback(tsd, diff);
|
2019-09-04 06:04:48 +08:00
|
|
|
uint64_t thread_allocated_after = thread_allocated_get(tsd);
|
2020-02-19 06:39:06 +08:00
|
|
|
expect_u64_eq(thread_allocated - thread_allocated_after, diff,
|
2019-09-04 06:04:48 +08:00
|
|
|
"thread event counters are not properly rolled back");
|
|
|
|
thread_allocated = thread_allocated_after;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
TEST_END
|
|
|
|
|
|
|
|
int
|
|
|
|
main(void) {
|
|
|
|
return test(
|
|
|
|
test_next_event_fast_roll_back,
|
|
|
|
test_next_event_fast_resume,
|
|
|
|
test_event_rollback);
|
|
|
|
}
|