2017-01-11 10:06:31 +08:00
|
|
|
#ifndef JEMALLOC_INTERNAL_PROF_EXTERNS_H
|
|
|
|
#define JEMALLOC_INTERNAL_PROF_EXTERNS_H
|
|
|
|
|
2023-06-10 08:37:47 +08:00
|
|
|
#include "jemalloc/internal/jemalloc_preamble.h"
|
|
|
|
#include "jemalloc/internal/base.h"
|
2017-05-24 03:28:19 +08:00
|
|
|
#include "jemalloc/internal/mutex.h"
|
2021-08-31 05:05:56 +08:00
|
|
|
#include "jemalloc/internal/prof_hook.h"
|
2017-05-24 03:28:19 +08:00
|
|
|
|
2019-12-07 01:45:40 +08:00
|
|
|
extern bool opt_prof;
|
|
|
|
extern bool opt_prof_active;
|
|
|
|
extern bool opt_prof_thread_active_init;
|
2022-08-20 03:17:10 +08:00
|
|
|
extern unsigned opt_prof_bt_max;
|
2019-12-07 01:45:40 +08:00
|
|
|
extern size_t opt_lg_prof_sample; /* Mean bytes between samples. */
|
|
|
|
extern ssize_t opt_lg_prof_interval; /* lg(prof_interval). */
|
|
|
|
extern bool opt_prof_gdump; /* High-water memory dumping. */
|
|
|
|
extern bool opt_prof_final; /* Final profile dumping. */
|
|
|
|
extern bool opt_prof_leak; /* Dump leak summary at exit. */
|
2022-01-12 18:46:34 +08:00
|
|
|
extern bool opt_prof_leak_error; /* Exit with error code if memory leaked */
|
2019-12-07 01:45:40 +08:00
|
|
|
extern bool opt_prof_accum; /* Report cumulative bytes. */
|
|
|
|
extern bool opt_prof_log; /* Turn logging on at boot. */
|
|
|
|
extern char opt_prof_prefix[
|
2017-01-11 10:06:31 +08:00
|
|
|
/* Minimize memory bloat for non-prof builds. */
|
|
|
|
#ifdef JEMALLOC_PROF
|
|
|
|
PATH_MAX +
|
|
|
|
#endif
|
|
|
|
1];
|
2020-08-04 04:05:34 +08:00
|
|
|
extern bool opt_prof_unbias;
|
2017-01-11 10:06:31 +08:00
|
|
|
|
2019-12-19 05:38:14 +08:00
|
|
|
/* For recording recent allocations */
|
|
|
|
extern ssize_t opt_prof_recent_alloc_max;
|
|
|
|
|
2020-03-25 08:53:41 +08:00
|
|
|
/* Whether to use thread name provided by the system or by mallctl. */
|
2020-06-20 06:16:53 +08:00
|
|
|
extern bool opt_prof_sys_thread_name;
|
2020-03-25 08:53:41 +08:00
|
|
|
|
2020-12-19 09:14:59 +08:00
|
|
|
/* Whether to record per size class counts and request size totals. */
|
|
|
|
extern bool opt_prof_stats;
|
|
|
|
|
2017-01-11 10:06:31 +08:00
|
|
|
/* Accessed via prof_active_[gs]et{_unlocked,}(). */
|
2021-12-23 09:24:58 +08:00
|
|
|
extern bool prof_active_state;
|
2017-01-11 10:06:31 +08:00
|
|
|
|
|
|
|
/* Accessed via prof_gdump_[gs]et{_unlocked,}(). */
|
2019-12-07 01:45:40 +08:00
|
|
|
extern bool prof_gdump_val;
|
2017-01-11 10:06:31 +08:00
|
|
|
|
2019-10-15 00:35:51 +08:00
|
|
|
/* Profile dump interval, measured in bytes allocated. */
|
2019-12-07 01:45:40 +08:00
|
|
|
extern uint64_t prof_interval;
|
2017-01-11 10:06:31 +08:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Initialized as opt_lg_prof_sample, and potentially modified during profiling
|
|
|
|
* resets.
|
|
|
|
*/
|
2019-12-07 01:45:40 +08:00
|
|
|
extern size_t lg_prof_sample;
|
2017-01-11 10:06:31 +08:00
|
|
|
|
2019-12-07 01:45:40 +08:00
|
|
|
extern bool prof_booted;
|
2019-07-13 07:37:37 +08:00
|
|
|
|
2021-08-31 05:05:56 +08:00
|
|
|
void prof_backtrace_hook_set(prof_backtrace_hook_t hook);
|
2023-07-06 04:33:34 +08:00
|
|
|
prof_backtrace_hook_t prof_backtrace_hook_get(void);
|
2020-08-04 06:27:08 +08:00
|
|
|
|
2021-09-02 04:00:01 +08:00
|
|
|
void prof_dump_hook_set(prof_dump_hook_t hook);
|
2023-07-06 04:33:34 +08:00
|
|
|
prof_dump_hook_t prof_dump_hook_get(void);
|
2021-09-02 04:00:01 +08:00
|
|
|
|
2022-11-03 06:17:16 +08:00
|
|
|
void prof_sample_hook_set(prof_sample_hook_t hook);
|
2023-07-06 04:33:34 +08:00
|
|
|
prof_sample_hook_t prof_sample_hook_get(void);
|
2022-11-03 06:17:16 +08:00
|
|
|
|
|
|
|
void prof_sample_free_hook_set(prof_sample_free_hook_t hook);
|
2023-07-06 04:33:34 +08:00
|
|
|
prof_sample_free_hook_t prof_sample_free_hook_get(void);
|
2022-11-03 06:17:16 +08:00
|
|
|
|
2020-04-29 00:59:37 +08:00
|
|
|
/* Functions only accessed in prof_inlines.h */
|
2019-12-19 07:15:31 +08:00
|
|
|
prof_tdata_t *prof_tdata_init(tsd_t *tsd);
|
|
|
|
prof_tdata_t *prof_tdata_reinit(tsd_t *tsd, prof_tdata_t *tdata);
|
|
|
|
|
2020-03-10 06:49:15 +08:00
|
|
|
void prof_alloc_rollback(tsd_t *tsd, prof_tctx_t *tctx);
|
2020-01-10 02:20:34 +08:00
|
|
|
void prof_malloc_sample_object(tsd_t *tsd, const void *ptr, size_t size,
|
|
|
|
size_t usize, prof_tctx_t *tctx);
|
2022-11-03 06:17:16 +08:00
|
|
|
void prof_free_sampled_object(tsd_t *tsd, const void *ptr, size_t usize,
|
|
|
|
prof_info_t *prof_info);
|
2019-12-06 07:52:54 +08:00
|
|
|
prof_tctx_t *prof_tctx_create(tsd_t *tsd);
|
2017-05-02 14:10:42 +08:00
|
|
|
void prof_idump(tsdn_t *tsdn);
|
|
|
|
bool prof_mdump(tsd_t *tsd, const char *filename);
|
|
|
|
void prof_gdump(tsdn_t *tsdn);
|
2019-07-13 07:37:37 +08:00
|
|
|
|
2017-05-02 14:10:42 +08:00
|
|
|
void prof_tdata_cleanup(tsd_t *tsd);
|
|
|
|
bool prof_active_get(tsdn_t *tsdn);
|
|
|
|
bool prof_active_set(tsdn_t *tsdn, bool active);
|
|
|
|
const char *prof_thread_name_get(tsd_t *tsd);
|
|
|
|
int prof_thread_name_set(tsd_t *tsd, const char *thread_name);
|
|
|
|
bool prof_thread_active_get(tsd_t *tsd);
|
|
|
|
bool prof_thread_active_set(tsd_t *tsd, bool active);
|
|
|
|
bool prof_thread_active_init_get(tsdn_t *tsdn);
|
|
|
|
bool prof_thread_active_init_set(tsdn_t *tsdn, bool active_init);
|
|
|
|
bool prof_gdump_get(tsdn_t *tsdn);
|
|
|
|
bool prof_gdump_set(tsdn_t *tsdn, bool active);
|
|
|
|
void prof_boot0(void);
|
|
|
|
void prof_boot1(void);
|
2020-02-18 06:09:29 +08:00
|
|
|
bool prof_boot2(tsd_t *tsd, base_t *base);
|
2017-05-02 14:10:42 +08:00
|
|
|
void prof_prefork0(tsdn_t *tsdn);
|
|
|
|
void prof_prefork1(tsdn_t *tsdn);
|
|
|
|
void prof_postfork_parent(tsdn_t *tsdn);
|
|
|
|
void prof_postfork_child(tsdn_t *tsdn);
|
2020-04-17 04:33:56 +08:00
|
|
|
|
2020-04-16 01:49:08 +08:00
|
|
|
/* Only accessed by thread event. */
|
|
|
|
uint64_t prof_sample_new_event_wait(tsd_t *tsd);
|
2020-04-17 04:33:56 +08:00
|
|
|
uint64_t prof_sample_postponed_event_wait(tsd_t *tsd);
|
2020-04-18 01:38:06 +08:00
|
|
|
void prof_sample_event_handler(tsd_t *tsd, uint64_t elapsed);
|
2017-01-11 10:06:31 +08:00
|
|
|
|
|
|
|
#endif /* JEMALLOC_INTERNAL_PROF_EXTERNS_H */
|