diff --git a/include/jemalloc/internal/prof_externs.h b/include/jemalloc/internal/prof_externs.h index 6e020be1..86f4193a 100644 --- a/include/jemalloc/internal/prof_externs.h +++ b/include/jemalloc/internal/prof_externs.h @@ -54,7 +54,7 @@ void prof_malloc_sample_object(tsd_t *tsd, const void *ptr, size_t usize, void prof_free_sampled_object(tsd_t *tsd, size_t usize, prof_info_t *prof_info); void bt_init(prof_bt_t *bt, void **vec); void prof_backtrace(tsd_t *tsd, prof_bt_t *bt); -prof_tctx_t *prof_lookup(tsd_t *tsd, prof_bt_t *bt); +prof_tctx_t *prof_tctx_create(tsd_t *tsd); #ifdef JEMALLOC_JET size_t prof_tdata_count(void); size_t prof_bt_count(void); diff --git a/include/jemalloc/internal/prof_inlines_b.h b/include/jemalloc/internal/prof_inlines_b.h index 3c0594ef..2aebb3de 100644 --- a/include/jemalloc/internal/prof_inlines_b.h +++ b/include/jemalloc/internal/prof_inlines_b.h @@ -75,8 +75,7 @@ prof_alloc_time_set(tsd_t *tsd, const void *ptr, nstime_t t) { } JEMALLOC_ALWAYS_INLINE bool -prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update, - prof_tdata_t **tdata_out) { +prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update) { cassert(config_prof); /* Fastpath: no need to load tdata */ @@ -90,14 +89,6 @@ prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update, prof_tdata_t *tdata = prof_tdata_get(tsd, true); if (unlikely((uintptr_t)tdata <= (uintptr_t)PROF_TDATA_STATE_MAX)) { - tdata = NULL; - } - - if (tdata_out != NULL) { - *tdata_out = tdata; - } - - if (unlikely(tdata == NULL)) { return true; } @@ -111,18 +102,14 @@ prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update, JEMALLOC_ALWAYS_INLINE prof_tctx_t * prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active, bool update) { prof_tctx_t *ret; - prof_tdata_t *tdata; - prof_bt_t bt; assert(usize == sz_s2u(usize)); - if (!prof_active || likely(prof_sample_accum_update(tsd, usize, update, - &tdata))) { + if (!prof_active || + likely(prof_sample_accum_update(tsd, usize, update))) { ret = (prof_tctx_t *)(uintptr_t)1U; } else { - bt_init(&bt, tdata->vec); - prof_backtrace(tsd, &bt); - ret = prof_lookup(tsd, &bt); + ret = prof_tctx_create(tsd); } return ret; @@ -154,7 +141,7 @@ prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx, if (prof_active && !updated && ptr != NULL) { assert(usize == isalloc(tsd_tsdn(tsd), ptr)); - if (prof_sample_accum_update(tsd, usize, true, NULL)) { + if (prof_sample_accum_update(tsd, usize, true)) { /* * Don't sample. The usize passed to prof_alloc_prep() * was larger than what actually got allocated, so a diff --git a/src/prof_data.c b/src/prof_data.c index 2f8bd2de..1b321528 100644 --- a/src/prof_data.c +++ b/src/prof_data.c @@ -300,7 +300,7 @@ prof_lookup_global(tsd_t *tsd, prof_bt_t *bt, prof_tdata_t *tdata, return false; } -prof_tctx_t * +static prof_tctx_t * prof_lookup(tsd_t *tsd, prof_bt_t *bt) { union { prof_tctx_t *p; @@ -312,9 +312,7 @@ prof_lookup(tsd_t *tsd, prof_bt_t *bt) { cassert(config_prof); tdata = prof_tdata_get(tsd, false); - if (tdata == NULL) { - return NULL; - } + assert(tdata != NULL); malloc_mutex_lock(tsd_tsdn(tsd), tdata->lock); not_found = ckh_search(&tdata->bt2tctx, bt, NULL, &ret.v); @@ -374,6 +372,16 @@ prof_lookup(tsd_t *tsd, prof_bt_t *bt) { return ret.p; } +prof_tctx_t * +prof_tctx_create(tsd_t *tsd) { + prof_tdata_t *tdata = prof_tdata_get(tsd, false); + assert(tdata != NULL); + prof_bt_t bt; + bt_init(&bt, tdata->vec); + prof_backtrace(tsd, &bt); + return prof_lookup(tsd, &bt); +} + #ifdef JEMALLOC_JET static prof_tdata_t * prof_tdata_count_iter(prof_tdata_tree_t *tdatas, prof_tdata_t *tdata,