Fix threads-related profiling bugs.
Initialize bt2cnt_tsd so that cleanup at thread exit actually happens. Associate (prof_ctx_t *) with allocated objects, rather than (prof_thr_cnt_t *). Each thread must always operate on its own (prof_thr_cnt_t *), and an object may outlive the thread that allocated it.
This commit is contained in:
@@ -98,7 +98,7 @@ struct arena_chunk_map_s {
|
||||
|
||||
#ifdef JEMALLOC_PROF
|
||||
/* Profile counters, used for large object runs. */
|
||||
prof_thr_cnt_t *prof_cnt;
|
||||
prof_ctx_t *prof_ctx;
|
||||
#endif
|
||||
|
||||
/*
|
||||
@@ -246,10 +246,10 @@ struct arena_bin_s {
|
||||
|
||||
#ifdef JEMALLOC_PROF
|
||||
/*
|
||||
* Offset of first (prof_cnt_t *) in a run header for this bin's size
|
||||
* Offset of first (prof_ctx_t *) in a run header for this bin's size
|
||||
* class, or 0 if (opt_prof == false).
|
||||
*/
|
||||
uint32_t cnt0_offset;
|
||||
uint32_t ctx0_offset;
|
||||
#endif
|
||||
|
||||
/* Offset of first region in a run for this bin's size class. */
|
||||
@@ -438,8 +438,8 @@ size_t arena_salloc(const void *ptr);
|
||||
#ifdef JEMALLOC_PROF
|
||||
void arena_prof_promoted(const void *ptr, size_t size);
|
||||
size_t arena_salloc_demote(const void *ptr);
|
||||
prof_thr_cnt_t *arena_prof_cnt_get(const void *ptr);
|
||||
void arena_prof_cnt_set(const void *ptr, prof_thr_cnt_t *cnt);
|
||||
prof_ctx_t *arena_prof_ctx_get(const void *ptr);
|
||||
void arena_prof_ctx_set(const void *ptr, prof_ctx_t *ctx);
|
||||
#endif
|
||||
void arena_dalloc_bin(arena_t *arena, arena_chunk_t *chunk, void *ptr,
|
||||
arena_chunk_map_t *mapelm);
|
||||
|
@@ -19,7 +19,7 @@ struct extent_node_s {
|
||||
|
||||
#ifdef JEMALLOC_PROF
|
||||
/* Profile counters, used for huge objects. */
|
||||
prof_thr_cnt_t *prof_cnt;
|
||||
prof_ctx_t *prof_ctx;
|
||||
#endif
|
||||
|
||||
/* Pointer to the extent that this tree node is responsible for. */
|
||||
|
@@ -25,8 +25,8 @@ void *huge_ralloc(void *ptr, size_t size, size_t oldsize);
|
||||
void huge_dalloc(void *ptr);
|
||||
size_t huge_salloc(const void *ptr);
|
||||
#ifdef JEMALLOC_PROF
|
||||
prof_thr_cnt_t *huge_prof_cnt_get(const void *ptr);
|
||||
void huge_prof_cnt_set(const void *ptr, prof_thr_cnt_t *cnt);
|
||||
prof_ctx_t *huge_prof_ctx_get(const void *ptr);
|
||||
void huge_prof_ctx_set(const void *ptr, prof_ctx_t *ctx);
|
||||
#endif
|
||||
bool huge_boot(void);
|
||||
|
||||
|
@@ -98,6 +98,9 @@ struct prof_thr_cnt_s {
|
||||
};
|
||||
|
||||
struct prof_ctx_s {
|
||||
/* Associated backtrace. */
|
||||
prof_bt_t *bt;
|
||||
|
||||
/* Protects cnt_merged and sets_ql. */
|
||||
malloc_mutex_t lock;
|
||||
|
||||
@@ -151,10 +154,10 @@ bool prof_init(prof_t *prof, bool master);
|
||||
void prof_destroy(prof_t *prof);
|
||||
|
||||
prof_thr_cnt_t *prof_alloc_prep(size_t size);
|
||||
prof_thr_cnt_t *prof_cnt_get(const void *ptr);
|
||||
prof_ctx_t *prof_ctx_get(const void *ptr);
|
||||
void prof_malloc(const void *ptr, prof_thr_cnt_t *cnt);
|
||||
void prof_realloc(const void *ptr, prof_thr_cnt_t *cnt, const void *old_ptr,
|
||||
size_t old_size, prof_thr_cnt_t *old_cnt);
|
||||
size_t old_size, prof_ctx_t *old_ctx);
|
||||
void prof_free(const void *ptr);
|
||||
void prof_idump(void);
|
||||
bool prof_mdump(const char *filename);
|
||||
|
Reference in New Issue
Block a user