move bytes until sample to tsd. Fastpath allocation does not need
to load tdata now, avoiding several branches.
This commit is contained in:
@@ -82,6 +82,7 @@ JEMALLOC_ALWAYS_INLINE bool
|
||||
prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update,
|
||||
prof_tdata_t **tdata_out) {
|
||||
prof_tdata_t *tdata;
|
||||
uint64_t bytes_until_sample;
|
||||
|
||||
cassert(config_prof);
|
||||
|
||||
@@ -98,9 +99,10 @@ prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update,
|
||||
return true;
|
||||
}
|
||||
|
||||
if (likely(tdata->bytes_until_sample >= usize)) {
|
||||
if (update) {
|
||||
tdata->bytes_until_sample -= usize;
|
||||
bytes_until_sample = tsd_bytes_until_sample_get(tsd);
|
||||
if (likely(bytes_until_sample >= usize)) {
|
||||
if (update && tsd_nominal(tsd)) {
|
||||
tsd_bytes_until_sample_set(tsd, bytes_until_sample - usize);
|
||||
}
|
||||
return true;
|
||||
} else {
|
||||
|
@@ -169,7 +169,6 @@ struct prof_tdata_s {
|
||||
|
||||
/* Sampling state. */
|
||||
uint64_t prng_state;
|
||||
uint64_t bytes_until_sample;
|
||||
|
||||
/* State used to avoid dumping while operating on prof internals. */
|
||||
bool enq;
|
||||
|
@@ -68,6 +68,7 @@ typedef void (*test_callback_t)(int *);
|
||||
O(offset_state, uint64_t, uint64_t) \
|
||||
O(thread_allocated, uint64_t, uint64_t) \
|
||||
O(thread_deallocated, uint64_t, uint64_t) \
|
||||
O(bytes_until_sample, uint64_t, uint64_t) \
|
||||
O(prof_tdata, prof_tdata_t *, prof_tdata_t *) \
|
||||
O(rtree_ctx, rtree_ctx_t, rtree_ctx_t) \
|
||||
O(iarena, arena_t *, arena_t *) \
|
||||
@@ -86,6 +87,7 @@ typedef void (*test_callback_t)(int *);
|
||||
0, \
|
||||
0, \
|
||||
0, \
|
||||
0, \
|
||||
NULL, \
|
||||
RTREE_CTX_ZERO_INITIALIZER, \
|
||||
NULL, \
|
||||
|
Reference in New Issue
Block a user