Remove prof -> thread_event dependency
This commit is contained in:
parent
441d88d1c7
commit
ba783b3a0f
@ -85,11 +85,11 @@ prof_info_set(tsd_t *tsd, edata_t *edata, prof_tctx_t *tctx) {
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE bool
|
||||
prof_sample_should_skip(tsd_t *tsd, size_t usize) {
|
||||
prof_sample_should_skip(tsd_t *tsd, bool sample_event) {
|
||||
cassert(config_prof);
|
||||
|
||||
/* Fastpath: no need to load tdata */
|
||||
if (likely(!te_prof_sample_event_lookahead(tsd, usize))) {
|
||||
if (likely(!sample_event)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -106,12 +106,11 @@ prof_sample_should_skip(tsd_t *tsd, size_t usize) {
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE prof_tctx_t *
|
||||
prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active) {
|
||||
prof_alloc_prep(tsd_t *tsd, bool prof_active, bool sample_event) {
|
||||
prof_tctx_t *ret;
|
||||
|
||||
assert(usize == sz_s2u(usize));
|
||||
|
||||
if (!prof_active || likely(prof_sample_should_skip(tsd, usize))) {
|
||||
if (!prof_active ||
|
||||
likely(prof_sample_should_skip(tsd, sample_event))) {
|
||||
ret = (prof_tctx_t *)(uintptr_t)1U;
|
||||
} else {
|
||||
ret = prof_tctx_create(tsd);
|
||||
@ -137,7 +136,7 @@ prof_malloc(tsd_t *tsd, const void *ptr, size_t size, size_t usize,
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
prof_realloc(tsd_t *tsd, const void *ptr, size_t size, size_t usize,
|
||||
prof_tctx_t *tctx, bool prof_active, const void *old_ptr, size_t old_usize,
|
||||
prof_info_t *old_prof_info) {
|
||||
prof_info_t *old_prof_info, bool sample_event) {
|
||||
bool sampled, old_sampled, moved;
|
||||
|
||||
cassert(config_prof);
|
||||
@ -145,7 +144,7 @@ prof_realloc(tsd_t *tsd, const void *ptr, size_t size, size_t usize,
|
||||
|
||||
if (prof_active && ptr != NULL) {
|
||||
assert(usize == isalloc(tsd_tsdn(tsd), ptr));
|
||||
if (prof_sample_should_skip(tsd, usize)) {
|
||||
if (prof_sample_should_skip(tsd, sample_event)) {
|
||||
/*
|
||||
* Don't sample. The usize passed to prof_alloc_prep()
|
||||
* was larger than what actually got allocated, so a
|
||||
|
@ -220,6 +220,7 @@ te_ctx_get(tsd_t *tsd, te_ctx_t *ctx, bool is_alloc) {
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE bool
|
||||
te_prof_sample_event_lookahead(tsd_t *tsd, size_t usize) {
|
||||
assert(usize == sz_s2u(usize));
|
||||
return tsd_thread_allocated_get(tsd) + usize -
|
||||
tsd_thread_allocated_last_event_get(tsd) >=
|
||||
tsd_prof_sample_event_wait_get(tsd);
|
||||
|
@ -2186,7 +2186,9 @@ imalloc_body(static_opts_t *sopts, dynamic_opts_t *dopts, tsd_t *tsd) {
|
||||
/* If profiling is on, get our profiling context. */
|
||||
if (config_prof && opt_prof) {
|
||||
bool prof_active = prof_active_get_unlocked();
|
||||
prof_tctx_t *tctx = prof_alloc_prep(tsd, usize, prof_active);
|
||||
bool sample_event = te_prof_sample_event_lookahead(tsd, usize);
|
||||
prof_tctx_t *tctx = prof_alloc_prep(tsd, prof_active,
|
||||
sample_event);
|
||||
|
||||
emap_alloc_ctx_t alloc_ctx;
|
||||
if (likely((uintptr_t)tctx == (uintptr_t)1U)) {
|
||||
@ -3131,7 +3133,8 @@ irallocx_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t size,
|
||||
prof_info_t old_prof_info;
|
||||
prof_info_get_and_reset_recent(tsd, old_ptr, alloc_ctx, &old_prof_info);
|
||||
bool prof_active = prof_active_get_unlocked();
|
||||
prof_tctx_t *tctx = prof_alloc_prep(tsd, *usize, prof_active);
|
||||
bool sample_event = te_prof_sample_event_lookahead(tsd, *usize);
|
||||
prof_tctx_t *tctx = prof_alloc_prep(tsd, prof_active, sample_event);
|
||||
void *p;
|
||||
if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) {
|
||||
p = irallocx_prof_sample(tsd_tsdn(tsd), old_ptr, old_usize,
|
||||
@ -3158,8 +3161,9 @@ irallocx_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t size,
|
||||
*usize = isalloc(tsd_tsdn(tsd), p);
|
||||
}
|
||||
|
||||
sample_event = te_prof_sample_event_lookahead(tsd, *usize);
|
||||
prof_realloc(tsd, p, size, *usize, tctx, prof_active, old_ptr,
|
||||
old_usize, &old_prof_info);
|
||||
old_usize, &old_prof_info, sample_event);
|
||||
|
||||
return p;
|
||||
}
|
||||
@ -3416,7 +3420,8 @@ ixallocx_prof(tsd_t *tsd, void *ptr, size_t old_usize, size_t size,
|
||||
}
|
||||
}
|
||||
bool prof_active = prof_active_get_unlocked();
|
||||
prof_tctx_t *tctx = prof_alloc_prep(tsd, usize_max, prof_active);
|
||||
bool sample_event = te_prof_sample_event_lookahead(tsd, usize_max);
|
||||
prof_tctx_t *tctx = prof_alloc_prep(tsd, prof_active, sample_event);
|
||||
|
||||
size_t usize;
|
||||
if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) {
|
||||
@ -3442,8 +3447,9 @@ ixallocx_prof(tsd_t *tsd, void *ptr, size_t old_usize, size_t size,
|
||||
} else {
|
||||
prof_info_get_and_reset_recent(tsd, ptr, alloc_ctx, &prof_info);
|
||||
assert(usize <= usize_max);
|
||||
sample_event = te_prof_sample_event_lookahead(tsd, usize);
|
||||
prof_realloc(tsd, ptr, size, usize, tctx, prof_active, ptr,
|
||||
old_usize, &prof_info);
|
||||
old_usize, &prof_info, sample_event);
|
||||
}
|
||||
|
||||
assert(old_prof_info.alloc_tctx == prof_info.alloc_tctx);
|
||||
|
Loading…
Reference in New Issue
Block a user