move bytes until sample to tsd. Fastpath allocation does not need

to load tdata now, avoiding several branches.
This commit is contained in:
Dave Watson 2018-10-09 10:59:02 -07:00
parent 09adf18f1a
commit 9ed3bdc848
4 changed files with 13 additions and 11 deletions

View File

@ -82,6 +82,7 @@ JEMALLOC_ALWAYS_INLINE bool
prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update, prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update,
prof_tdata_t **tdata_out) { prof_tdata_t **tdata_out) {
prof_tdata_t *tdata; prof_tdata_t *tdata;
uint64_t bytes_until_sample;
cassert(config_prof); cassert(config_prof);
@ -98,9 +99,10 @@ prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update,
return true; return true;
} }
if (likely(tdata->bytes_until_sample >= usize)) { bytes_until_sample = tsd_bytes_until_sample_get(tsd);
if (update) { if (likely(bytes_until_sample >= usize)) {
tdata->bytes_until_sample -= usize; if (update && tsd_nominal(tsd)) {
tsd_bytes_until_sample_set(tsd, bytes_until_sample - usize);
} }
return true; return true;
} else { } else {

View File

@ -169,7 +169,6 @@ struct prof_tdata_s {
/* Sampling state. */ /* Sampling state. */
uint64_t prng_state; uint64_t prng_state;
uint64_t bytes_until_sample;
/* State used to avoid dumping while operating on prof internals. */ /* State used to avoid dumping while operating on prof internals. */
bool enq; bool enq;

View File

@ -68,6 +68,7 @@ typedef void (*test_callback_t)(int *);
O(offset_state, uint64_t, uint64_t) \ O(offset_state, uint64_t, uint64_t) \
O(thread_allocated, uint64_t, uint64_t) \ O(thread_allocated, uint64_t, uint64_t) \
O(thread_deallocated, uint64_t, uint64_t) \ O(thread_deallocated, uint64_t, uint64_t) \
O(bytes_until_sample, uint64_t, uint64_t) \
O(prof_tdata, prof_tdata_t *, prof_tdata_t *) \ O(prof_tdata, prof_tdata_t *, prof_tdata_t *) \
O(rtree_ctx, rtree_ctx_t, rtree_ctx_t) \ O(rtree_ctx, rtree_ctx_t, rtree_ctx_t) \
O(iarena, arena_t *, arena_t *) \ O(iarena, arena_t *, arena_t *) \
@ -86,6 +87,7 @@ typedef void (*test_callback_t)(int *);
0, \ 0, \
0, \ 0, \
0, \ 0, \
0, \
NULL, \ NULL, \
RTREE_CTX_ZERO_INITIALIZER, \ RTREE_CTX_ZERO_INITIALIZER, \
NULL, \ NULL, \

View File

@ -1136,15 +1136,12 @@ prof_lookup(tsd_t *tsd, prof_bt_t *bt) {
void void
prof_sample_threshold_update(prof_tdata_t *tdata) { prof_sample_threshold_update(prof_tdata_t *tdata) {
#ifdef JEMALLOC_PROF #ifdef JEMALLOC_PROF
uint64_t r;
double u;
if (!config_prof) { if (!config_prof) {
return; return;
} }
if (lg_prof_sample == 0) { if (lg_prof_sample == 0) {
tdata->bytes_until_sample = 0; tsd_bytes_until_sample_set(tsd_fetch(), 0);
return; return;
} }
@ -1166,11 +1163,13 @@ prof_sample_threshold_update(prof_tdata_t *tdata) {
* pp 500 * pp 500
* (http://luc.devroye.org/rnbookindex.html) * (http://luc.devroye.org/rnbookindex.html)
*/ */
r = prng_lg_range_u64(&tdata->prng_state, 53); uint64_t r = prng_lg_range_u64(&tdata->prng_state, 53);
u = (double)r * (1.0/9007199254740992.0L); double u = (double)r * (1.0/9007199254740992.0L);
tdata->bytes_until_sample = (uint64_t)(log(u) / uint64_t bytes_until_sample = (uint64_t)(log(u) /
log(1.0 - (1.0 / (double)((uint64_t)1U << lg_prof_sample)))) log(1.0 - (1.0 / (double)((uint64_t)1U << lg_prof_sample))))
+ (uint64_t)1U; + (uint64_t)1U;
tsd_bytes_until_sample_set(tsd_fetch(), bytes_until_sample);
#endif #endif
} }