2020-08-25 11:56:34 +08:00
|
|
|
#ifndef JEMALLOC_INTERNAL_PROF_INLINES_H
|
|
|
|
#define JEMALLOC_INTERNAL_PROF_INLINES_H
|
2017-01-11 10:06:31 +08:00
|
|
|
|
2023-06-10 08:37:47 +08:00
|
|
|
#include "jemalloc/internal/jemalloc_preamble.h"
|
|
|
|
#include "jemalloc/internal/arena_inlines_b.h"
|
|
|
|
#include "jemalloc/internal/jemalloc_internal_inlines_c.h"
|
|
|
|
#include "jemalloc/internal/prof_externs.h"
|
|
|
|
#include "jemalloc/internal/prof_structs.h"
|
2019-03-23 03:53:11 +08:00
|
|
|
#include "jemalloc/internal/safety_check.h"
|
2017-05-31 01:45:37 +08:00
|
|
|
#include "jemalloc/internal/sz.h"
|
2019-09-04 06:04:48 +08:00
|
|
|
#include "jemalloc/internal/thread_event.h"
|
2017-05-31 01:45:37 +08:00
|
|
|
|
2020-04-29 00:59:37 +08:00
|
|
|
JEMALLOC_ALWAYS_INLINE void
|
2023-07-06 04:33:34 +08:00
|
|
|
prof_active_assert(void) {
|
2020-04-29 00:59:37 +08:00
|
|
|
cassert(config_prof);
|
|
|
|
/*
|
|
|
|
* If opt_prof is off, then prof_active must always be off, regardless
|
|
|
|
* of whether prof_active_mtx is in effect or not.
|
|
|
|
*/
|
2021-12-23 09:24:58 +08:00
|
|
|
assert(opt_prof || !prof_active_state);
|
2020-04-29 00:59:37 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
JEMALLOC_ALWAYS_INLINE bool
|
|
|
|
prof_active_get_unlocked(void) {
|
|
|
|
prof_active_assert();
|
|
|
|
/*
|
|
|
|
* Even if opt_prof is true, sampling can be temporarily disabled by
|
|
|
|
* setting prof_active to false. No locking is used when reading
|
|
|
|
* prof_active in the fast path, so there are no guarantees regarding
|
|
|
|
* how long it will take for all threads to notice state changes.
|
|
|
|
*/
|
2021-12-23 09:24:58 +08:00
|
|
|
return prof_active_state;
|
2020-04-29 00:59:37 +08:00
|
|
|
}
|
|
|
|
|
2017-01-11 10:06:31 +08:00
|
|
|
JEMALLOC_ALWAYS_INLINE bool
|
2017-01-16 08:56:30 +08:00
|
|
|
prof_gdump_get_unlocked(void) {
|
2017-01-11 10:06:31 +08:00
|
|
|
/*
|
|
|
|
* No locking is used when reading prof_gdump_val in the fast path, so
|
|
|
|
* there are no guarantees regarding how long it will take for all
|
|
|
|
* threads to notice state changes.
|
|
|
|
*/
|
2017-01-20 10:15:45 +08:00
|
|
|
return prof_gdump_val;
|
2017-01-11 10:06:31 +08:00
|
|
|
}
|
|
|
|
|
2023-03-29 09:02:34 +08:00
|
|
|
JEMALLOC_ALWAYS_INLINE void
|
|
|
|
prof_thread_name_assert(prof_tdata_t *tdata) {
|
|
|
|
if (!config_debug) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
prof_active_assert();
|
|
|
|
|
|
|
|
bool terminated = false;
|
|
|
|
for (unsigned i = 0; i < PROF_THREAD_NAME_MAX_LEN; i++) {
|
|
|
|
if (tdata->thread_name[i] == '\0') {
|
|
|
|
terminated = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
assert(terminated);
|
|
|
|
}
|
|
|
|
|
2017-01-11 10:06:31 +08:00
|
|
|
JEMALLOC_ALWAYS_INLINE prof_tdata_t *
|
2017-01-16 08:56:30 +08:00
|
|
|
prof_tdata_get(tsd_t *tsd, bool create) {
|
2017-01-11 10:06:31 +08:00
|
|
|
prof_tdata_t *tdata;
|
|
|
|
|
|
|
|
cassert(config_prof);
|
|
|
|
|
|
|
|
tdata = tsd_prof_tdata_get(tsd);
|
|
|
|
if (create) {
|
2019-10-04 04:01:12 +08:00
|
|
|
assert(tsd_reentrancy_level_get(tsd) == 0);
|
2017-01-11 10:06:31 +08:00
|
|
|
if (unlikely(tdata == NULL)) {
|
|
|
|
if (tsd_nominal(tsd)) {
|
|
|
|
tdata = prof_tdata_init(tsd);
|
|
|
|
tsd_prof_tdata_set(tsd, tdata);
|
|
|
|
}
|
|
|
|
} else if (unlikely(tdata->expired)) {
|
|
|
|
tdata = prof_tdata_reinit(tsd, tdata);
|
|
|
|
tsd_prof_tdata_set(tsd, tdata);
|
|
|
|
}
|
|
|
|
assert(tdata == NULL || tdata->attached);
|
|
|
|
}
|
|
|
|
|
2023-03-29 09:02:34 +08:00
|
|
|
if (tdata != NULL) {
|
|
|
|
prof_thread_name_assert(tdata);
|
|
|
|
}
|
|
|
|
|
2017-01-20 10:15:45 +08:00
|
|
|
return tdata;
|
2017-01-11 10:06:31 +08:00
|
|
|
}
|
|
|
|
|
2019-11-20 08:24:57 +08:00
|
|
|
JEMALLOC_ALWAYS_INLINE void
|
2020-02-07 05:45:04 +08:00
|
|
|
prof_info_get(tsd_t *tsd, const void *ptr, emap_alloc_ctx_t *alloc_ctx,
|
2019-11-20 08:24:57 +08:00
|
|
|
prof_info_t *prof_info) {
|
2017-01-11 10:06:31 +08:00
|
|
|
cassert(config_prof);
|
|
|
|
assert(ptr != NULL);
|
2019-11-20 08:24:57 +08:00
|
|
|
assert(prof_info != NULL);
|
2017-01-11 10:06:31 +08:00
|
|
|
|
2019-12-19 05:38:14 +08:00
|
|
|
arena_prof_info_get(tsd, ptr, alloc_ctx, prof_info, false);
|
|
|
|
}
|
|
|
|
|
|
|
|
JEMALLOC_ALWAYS_INLINE void
|
|
|
|
prof_info_get_and_reset_recent(tsd_t *tsd, const void *ptr,
|
2020-02-07 05:45:04 +08:00
|
|
|
emap_alloc_ctx_t *alloc_ctx, prof_info_t *prof_info) {
|
2019-12-19 05:38:14 +08:00
|
|
|
cassert(config_prof);
|
|
|
|
assert(ptr != NULL);
|
|
|
|
assert(prof_info != NULL);
|
|
|
|
|
|
|
|
arena_prof_info_get(tsd, ptr, alloc_ctx, prof_info, true);
|
2017-01-11 10:06:31 +08:00
|
|
|
}
|
|
|
|
|
2023-07-22 09:13:58 +08:00
|
|
|
JEMALLOC_ALWAYS_INLINE bool
|
|
|
|
prof_tctx_is_valid(const prof_tctx_t *tctx) {
|
|
|
|
return tctx != NULL && tctx != PROF_TCTX_SENTINEL;
|
|
|
|
}
|
|
|
|
|
2017-01-11 10:06:31 +08:00
|
|
|
JEMALLOC_ALWAYS_INLINE void
|
2020-02-07 05:45:04 +08:00
|
|
|
prof_tctx_reset(tsd_t *tsd, const void *ptr, emap_alloc_ctx_t *alloc_ctx) {
|
2017-01-11 10:06:31 +08:00
|
|
|
cassert(config_prof);
|
|
|
|
assert(ptr != NULL);
|
|
|
|
|
2019-12-14 08:48:03 +08:00
|
|
|
arena_prof_tctx_reset(tsd, ptr, alloc_ctx);
|
2017-01-11 10:06:31 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
JEMALLOC_ALWAYS_INLINE void
|
2019-12-14 08:48:03 +08:00
|
|
|
prof_tctx_reset_sampled(tsd_t *tsd, const void *ptr) {
|
2017-01-11 10:06:31 +08:00
|
|
|
cassert(config_prof);
|
|
|
|
assert(ptr != NULL);
|
|
|
|
|
2019-12-14 08:48:03 +08:00
|
|
|
arena_prof_tctx_reset_sampled(tsd, ptr);
|
2017-01-11 10:06:31 +08:00
|
|
|
}
|
|
|
|
|
2018-07-06 01:56:33 +08:00
|
|
|
JEMALLOC_ALWAYS_INLINE void
|
2020-08-22 02:31:53 +08:00
|
|
|
prof_info_set(tsd_t *tsd, edata_t *edata, prof_tctx_t *tctx, size_t size) {
|
2018-07-06 01:56:33 +08:00
|
|
|
cassert(config_prof);
|
2019-12-19 05:38:14 +08:00
|
|
|
assert(edata != NULL);
|
2023-07-22 09:13:58 +08:00
|
|
|
assert(prof_tctx_is_valid(tctx));
|
2018-07-06 01:56:33 +08:00
|
|
|
|
2020-08-22 02:31:53 +08:00
|
|
|
arena_prof_info_set(tsd, edata, tctx, size);
|
2018-07-06 01:56:33 +08:00
|
|
|
}
|
|
|
|
|
2018-10-23 23:12:46 +08:00
|
|
|
JEMALLOC_ALWAYS_INLINE bool
|
2020-03-11 05:21:05 +08:00
|
|
|
prof_sample_should_skip(tsd_t *tsd, bool sample_event) {
|
2018-10-23 23:12:46 +08:00
|
|
|
cassert(config_prof);
|
|
|
|
|
|
|
|
/* Fastpath: no need to load tdata */
|
2020-03-11 05:21:05 +08:00
|
|
|
if (likely(!sample_event)) {
|
2018-10-23 23:12:46 +08:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2020-08-25 11:56:34 +08:00
|
|
|
/*
|
|
|
|
* sample_event is always obtained from the thread event module, and
|
|
|
|
* whenever it's true, it means that the thread event module has
|
|
|
|
* already checked the reentrancy level.
|
|
|
|
*/
|
|
|
|
assert(tsd_reentrancy_level_get(tsd) == 0);
|
2019-10-04 04:01:12 +08:00
|
|
|
|
2019-11-05 09:22:25 +08:00
|
|
|
prof_tdata_t *tdata = prof_tdata_get(tsd, true);
|
2019-12-27 07:28:04 +08:00
|
|
|
if (unlikely(tdata == NULL)) {
|
2017-01-20 10:15:45 +08:00
|
|
|
return true;
|
2017-01-16 08:56:30 +08:00
|
|
|
}
|
2017-01-11 10:06:31 +08:00
|
|
|
|
2018-10-10 02:07:24 +08:00
|
|
|
return !tdata->active;
|
2017-01-11 10:06:31 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
JEMALLOC_ALWAYS_INLINE prof_tctx_t *
|
2020-03-11 05:21:05 +08:00
|
|
|
prof_alloc_prep(tsd_t *tsd, bool prof_active, bool sample_event) {
|
2017-01-11 10:06:31 +08:00
|
|
|
prof_tctx_t *ret;
|
|
|
|
|
2020-03-11 05:21:05 +08:00
|
|
|
if (!prof_active ||
|
|
|
|
likely(prof_sample_should_skip(tsd, sample_event))) {
|
2023-07-22 09:13:58 +08:00
|
|
|
ret = PROF_TCTX_SENTINEL;
|
2017-01-16 08:56:30 +08:00
|
|
|
} else {
|
2019-12-06 07:52:54 +08:00
|
|
|
ret = prof_tctx_create(tsd);
|
2017-01-11 10:06:31 +08:00
|
|
|
}
|
|
|
|
|
2017-01-20 10:15:45 +08:00
|
|
|
return ret;
|
2017-01-11 10:06:31 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
JEMALLOC_ALWAYS_INLINE void
|
2020-01-10 02:20:34 +08:00
|
|
|
prof_malloc(tsd_t *tsd, const void *ptr, size_t size, size_t usize,
|
2020-02-07 05:45:04 +08:00
|
|
|
emap_alloc_ctx_t *alloc_ctx, prof_tctx_t *tctx) {
|
2017-01-11 10:06:31 +08:00
|
|
|
cassert(config_prof);
|
|
|
|
assert(ptr != NULL);
|
2019-11-23 03:42:01 +08:00
|
|
|
assert(usize == isalloc(tsd_tsdn(tsd), ptr));
|
2017-01-11 10:06:31 +08:00
|
|
|
|
2023-07-22 09:13:58 +08:00
|
|
|
if (unlikely(prof_tctx_is_valid(tctx))) {
|
2020-01-10 02:20:34 +08:00
|
|
|
prof_malloc_sample_object(tsd, ptr, size, usize, tctx);
|
2017-01-16 08:56:30 +08:00
|
|
|
} else {
|
2019-12-14 08:48:03 +08:00
|
|
|
prof_tctx_reset(tsd, ptr, alloc_ctx);
|
2017-01-11 10:06:31 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
JEMALLOC_ALWAYS_INLINE void
|
2020-01-10 02:20:34 +08:00
|
|
|
prof_realloc(tsd_t *tsd, const void *ptr, size_t size, size_t usize,
|
|
|
|
prof_tctx_t *tctx, bool prof_active, const void *old_ptr, size_t old_usize,
|
2020-03-11 05:21:05 +08:00
|
|
|
prof_info_t *old_prof_info, bool sample_event) {
|
2017-01-11 10:06:31 +08:00
|
|
|
bool sampled, old_sampled, moved;
|
|
|
|
|
|
|
|
cassert(config_prof);
|
2023-07-22 09:13:58 +08:00
|
|
|
assert(ptr != NULL || !prof_tctx_is_valid(tctx));
|
2017-01-11 10:06:31 +08:00
|
|
|
|
2019-12-11 02:03:54 +08:00
|
|
|
if (prof_active && ptr != NULL) {
|
2017-03-17 16:25:12 +08:00
|
|
|
assert(usize == isalloc(tsd_tsdn(tsd), ptr));
|
2020-03-11 05:21:05 +08:00
|
|
|
if (prof_sample_should_skip(tsd, sample_event)) {
|
2017-01-11 10:06:31 +08:00
|
|
|
/*
|
|
|
|
* Don't sample. The usize passed to prof_alloc_prep()
|
|
|
|
* was larger than what actually got allocated, so a
|
|
|
|
* backtrace was captured for this allocation, even
|
|
|
|
* though its actual usize was insufficient to cross the
|
|
|
|
* sample threshold.
|
|
|
|
*/
|
2020-03-10 06:49:15 +08:00
|
|
|
prof_alloc_rollback(tsd, tctx);
|
2023-07-22 09:13:58 +08:00
|
|
|
tctx = PROF_TCTX_SENTINEL;
|
2017-01-11 10:06:31 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-07-22 09:13:58 +08:00
|
|
|
sampled = prof_tctx_is_valid(tctx);
|
|
|
|
old_sampled = prof_tctx_is_valid(old_prof_info->alloc_tctx);
|
2017-01-11 10:06:31 +08:00
|
|
|
moved = (ptr != old_ptr);
|
|
|
|
|
|
|
|
if (unlikely(sampled)) {
|
2020-01-10 02:20:34 +08:00
|
|
|
prof_malloc_sample_object(tsd, ptr, size, usize, tctx);
|
2017-01-11 10:06:31 +08:00
|
|
|
} else if (moved) {
|
2019-12-14 08:48:03 +08:00
|
|
|
prof_tctx_reset(tsd, ptr, NULL);
|
2017-01-17 03:09:24 +08:00
|
|
|
} else if (unlikely(old_sampled)) {
|
|
|
|
/*
|
2019-12-14 08:48:03 +08:00
|
|
|
* prof_tctx_reset() would work for the !moved case as well,
|
|
|
|
* but prof_tctx_reset_sampled() is slightly cheaper, and the
|
|
|
|
* proper thing to do here in the presence of explicit
|
|
|
|
* knowledge re: moved state.
|
2017-01-17 03:09:24 +08:00
|
|
|
*/
|
2019-12-14 08:48:03 +08:00
|
|
|
prof_tctx_reset_sampled(tsd, ptr);
|
2017-01-17 03:09:24 +08:00
|
|
|
} else {
|
2019-11-20 08:24:57 +08:00
|
|
|
prof_info_t prof_info;
|
2019-11-23 03:42:01 +08:00
|
|
|
prof_info_get(tsd, ptr, NULL, &prof_info);
|
2023-07-22 09:13:58 +08:00
|
|
|
assert(prof_info.alloc_tctx == PROF_TCTX_SENTINEL);
|
2017-01-17 03:09:24 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The prof_free_sampled_object() call must come after the
|
|
|
|
* prof_malloc_sample_object() call, because tctx and old_tctx may be
|
|
|
|
* the same, in which case reversing the call order could cause the tctx
|
|
|
|
* to be prematurely destroyed as a side effect of momentarily zeroed
|
|
|
|
* counters.
|
|
|
|
*/
|
|
|
|
if (unlikely(old_sampled)) {
|
2022-11-03 06:17:16 +08:00
|
|
|
prof_free_sampled_object(tsd, old_ptr, old_usize,
|
|
|
|
old_prof_info);
|
2017-01-17 03:09:24 +08:00
|
|
|
}
|
2017-01-11 10:06:31 +08:00
|
|
|
}
|
|
|
|
|
2020-01-29 09:32:45 +08:00
|
|
|
JEMALLOC_ALWAYS_INLINE size_t
|
2023-06-03 06:15:37 +08:00
|
|
|
prof_sample_align(size_t usize, size_t orig_align) {
|
2020-01-29 09:32:45 +08:00
|
|
|
/*
|
2023-06-03 06:15:37 +08:00
|
|
|
* Enforce alignment, so that sampled allocations can be identified
|
2020-01-29 09:32:45 +08:00
|
|
|
* w/o metadata lookup.
|
|
|
|
*/
|
|
|
|
assert(opt_prof);
|
2023-06-03 06:15:37 +08:00
|
|
|
return (orig_align < PROF_SAMPLE_ALIGNMENT &&
|
|
|
|
(sz_can_use_slab(usize) || opt_cache_oblivious)) ?
|
|
|
|
PROF_SAMPLE_ALIGNMENT : orig_align;
|
2020-01-29 09:32:45 +08:00
|
|
|
}
|
|
|
|
|
2020-04-24 06:46:45 +08:00
|
|
|
JEMALLOC_ALWAYS_INLINE bool
|
|
|
|
prof_sampled(tsd_t *tsd, const void *ptr) {
|
|
|
|
prof_info_t prof_info;
|
|
|
|
prof_info_get(tsd, ptr, NULL, &prof_info);
|
2023-07-22 09:13:58 +08:00
|
|
|
bool sampled = prof_tctx_is_valid(prof_info.alloc_tctx);
|
2020-04-24 06:46:45 +08:00
|
|
|
if (sampled) {
|
|
|
|
assert(prof_sample_aligned(ptr));
|
|
|
|
}
|
|
|
|
return sampled;
|
|
|
|
}
|
|
|
|
|
2017-01-11 10:06:31 +08:00
|
|
|
JEMALLOC_ALWAYS_INLINE void
|
2020-02-07 05:45:04 +08:00
|
|
|
prof_free(tsd_t *tsd, const void *ptr, size_t usize,
|
|
|
|
emap_alloc_ctx_t *alloc_ctx) {
|
2019-11-20 08:24:57 +08:00
|
|
|
prof_info_t prof_info;
|
2019-12-19 05:38:14 +08:00
|
|
|
prof_info_get_and_reset_recent(tsd, ptr, alloc_ctx, &prof_info);
|
2017-01-11 10:06:31 +08:00
|
|
|
|
|
|
|
cassert(config_prof);
|
2017-03-17 16:25:12 +08:00
|
|
|
assert(usize == isalloc(tsd_tsdn(tsd), ptr));
|
2017-01-11 10:06:31 +08:00
|
|
|
|
2023-07-22 09:13:58 +08:00
|
|
|
if (unlikely(prof_tctx_is_valid(prof_info.alloc_tctx))) {
|
2020-01-29 09:32:45 +08:00
|
|
|
assert(prof_sample_aligned(ptr));
|
2022-11-03 06:17:16 +08:00
|
|
|
prof_free_sampled_object(tsd, ptr, usize, &prof_info);
|
2017-01-16 08:56:30 +08:00
|
|
|
}
|
2017-01-11 10:06:31 +08:00
|
|
|
}
|
|
|
|
|
2023-03-29 09:02:34 +08:00
|
|
|
JEMALLOC_ALWAYS_INLINE bool
|
|
|
|
prof_thread_name_empty(prof_tdata_t *tdata) {
|
|
|
|
prof_active_assert();
|
|
|
|
|
|
|
|
return (tdata->thread_name[0] == '\0');
|
|
|
|
}
|
|
|
|
|
|
|
|
JEMALLOC_ALWAYS_INLINE void
|
|
|
|
prof_thread_name_clear(prof_tdata_t *tdata) {
|
|
|
|
prof_active_assert();
|
|
|
|
|
|
|
|
tdata->thread_name[0] = '\0';
|
|
|
|
}
|
|
|
|
|
2020-08-25 11:56:34 +08:00
|
|
|
#endif /* JEMALLOC_INTERNAL_PROF_INLINES_H */
|