Subvert tail call optimization in backtrace test.
Re-structure alloc_[01](), which are mutually tail-recursive functions, to do (unnecessary) work post-recursion so that the compiler cannot perform tail call optimization, thus preserving intentionally unique call paths in captured backtraces.
This commit is contained in:
parent
e2206edebc
commit
898960247a
@ -28,18 +28,24 @@ static void *alloc_##n(unsigned bits);
|
|||||||
static void * \
|
static void * \
|
||||||
alloc_##n(unsigned bits) \
|
alloc_##n(unsigned bits) \
|
||||||
{ \
|
{ \
|
||||||
|
void *p; \
|
||||||
\
|
\
|
||||||
if (bits == 0) { \
|
if (bits == 0) \
|
||||||
void *p = mallocx(1, 0); \
|
p = mallocx(1, 0); \
|
||||||
assert_ptr_not_null(p, "Unexpected mallocx() failure"); \
|
else { \
|
||||||
return (p); \
|
|
||||||
} \
|
|
||||||
\
|
|
||||||
switch (bits & 0x1U) { \
|
switch (bits & 0x1U) { \
|
||||||
case 0: return (alloc_0(bits >> 1)); \
|
case 0: \
|
||||||
case 1: return (alloc_1(bits >> 1)); \
|
p = alloc_0(bits >> 1); \
|
||||||
|
break; \
|
||||||
|
case 1: \
|
||||||
|
p = alloc_1(bits >> 1); \
|
||||||
|
break; \
|
||||||
default: not_reached(); \
|
default: not_reached(); \
|
||||||
} \
|
} \
|
||||||
|
} \
|
||||||
|
/* Intentionally sabotage tail call optimization. */ \
|
||||||
|
assert_ptr_not_null(p, "Unexpected mallocx() failure"); \
|
||||||
|
return (p); \
|
||||||
}
|
}
|
||||||
alloc_n_proto(0)
|
alloc_n_proto(0)
|
||||||
alloc_n_proto(1)
|
alloc_n_proto(1)
|
||||||
@ -74,7 +80,7 @@ thd_start(void *varg)
|
|||||||
i+1 == NALLOCS_PER_THREAD) {
|
i+1 == NALLOCS_PER_THREAD) {
|
||||||
bt_count = prof_bt_count();
|
bt_count = prof_bt_count();
|
||||||
assert_zu_le(bt_count_prev+(i-i_prev), bt_count,
|
assert_zu_le(bt_count_prev+(i-i_prev), bt_count,
|
||||||
"Expected larger bactrace count increase");
|
"Expected larger backtrace count increase");
|
||||||
i_prev = i;
|
i_prev = i;
|
||||||
bt_count_prev = bt_count;
|
bt_count_prev = bt_count;
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user