diff --git a/include/jemalloc/internal/arena_inlines_b.h b/include/jemalloc/internal/arena_inlines_b.h index c7d08227..44a73373 100644 --- a/include/jemalloc/internal/arena_inlines_b.h +++ b/include/jemalloc/internal/arena_inlines_b.h @@ -104,15 +104,15 @@ arena_prof_info_get(tsd_t *tsd, const void *ptr, emap_alloc_ctx_t *alloc_ctx, if (reset_recent && large_dalloc_safety_checks(edata, ptr, edata_szind_get(edata))) { - prof_info->alloc_tctx = (prof_tctx_t *)(uintptr_t)1U; + prof_info->alloc_tctx = PROF_TCTX_SENTINEL; return; } large_prof_info_get(tsd, edata, prof_info, reset_recent); } else { - prof_info->alloc_tctx = (prof_tctx_t *)(uintptr_t)1U; + prof_info->alloc_tctx = PROF_TCTX_SENTINEL; /* * No need to set other fields in prof_info; they will never be - * accessed if (uintptr_t)alloc_tctx == (uintptr_t)1U. + * accessed if alloc_tctx == PROF_TCTX_SENTINEL. */ } } diff --git a/include/jemalloc/internal/prof_inlines.h b/include/jemalloc/internal/prof_inlines.h index 6cb73735..75300ee4 100644 --- a/include/jemalloc/internal/prof_inlines.h +++ b/include/jemalloc/internal/prof_inlines.h @@ -106,6 +106,11 @@ prof_info_get_and_reset_recent(tsd_t *tsd, const void *ptr, arena_prof_info_get(tsd, ptr, alloc_ctx, prof_info, true); } +JEMALLOC_ALWAYS_INLINE bool +prof_tctx_is_valid(const prof_tctx_t *tctx) { + return tctx != NULL && tctx != PROF_TCTX_SENTINEL; +} + JEMALLOC_ALWAYS_INLINE void prof_tctx_reset(tsd_t *tsd, const void *ptr, emap_alloc_ctx_t *alloc_ctx) { cassert(config_prof); @@ -126,7 +131,7 @@ JEMALLOC_ALWAYS_INLINE void prof_info_set(tsd_t *tsd, edata_t *edata, prof_tctx_t *tctx, size_t size) { cassert(config_prof); assert(edata != NULL); - assert((uintptr_t)tctx > (uintptr_t)1U); + assert(prof_tctx_is_valid(tctx)); arena_prof_info_set(tsd, edata, tctx, size); } @@ -161,7 +166,7 @@ prof_alloc_prep(tsd_t *tsd, bool prof_active, bool sample_event) { if (!prof_active || likely(prof_sample_should_skip(tsd, sample_event))) { - ret = (prof_tctx_t *)(uintptr_t)1U; + ret = PROF_TCTX_SENTINEL; } else { ret = prof_tctx_create(tsd); } @@ -176,7 +181,7 @@ prof_malloc(tsd_t *tsd, const void *ptr, size_t size, size_t usize, assert(ptr != NULL); assert(usize == isalloc(tsd_tsdn(tsd), ptr)); - if (unlikely((uintptr_t)tctx > (uintptr_t)1U)) { + if (unlikely(prof_tctx_is_valid(tctx))) { prof_malloc_sample_object(tsd, ptr, size, usize, tctx); } else { prof_tctx_reset(tsd, ptr, alloc_ctx); @@ -190,7 +195,7 @@ prof_realloc(tsd_t *tsd, const void *ptr, size_t size, size_t usize, bool sampled, old_sampled, moved; cassert(config_prof); - assert(ptr != NULL || (uintptr_t)tctx <= (uintptr_t)1U); + assert(ptr != NULL || !prof_tctx_is_valid(tctx)); if (prof_active && ptr != NULL) { assert(usize == isalloc(tsd_tsdn(tsd), ptr)); @@ -203,12 +208,12 @@ prof_realloc(tsd_t *tsd, const void *ptr, size_t size, size_t usize, * sample threshold. */ prof_alloc_rollback(tsd, tctx); - tctx = (prof_tctx_t *)(uintptr_t)1U; + tctx = PROF_TCTX_SENTINEL; } } - sampled = ((uintptr_t)tctx > (uintptr_t)1U); - old_sampled = ((uintptr_t)old_prof_info->alloc_tctx > (uintptr_t)1U); + sampled = prof_tctx_is_valid(tctx); + old_sampled = prof_tctx_is_valid(old_prof_info->alloc_tctx); moved = (ptr != old_ptr); if (unlikely(sampled)) { @@ -226,7 +231,7 @@ prof_realloc(tsd_t *tsd, const void *ptr, size_t size, size_t usize, } else { prof_info_t prof_info; prof_info_get(tsd, ptr, NULL, &prof_info); - assert((uintptr_t)prof_info.alloc_tctx == (uintptr_t)1U); + assert(prof_info.alloc_tctx == PROF_TCTX_SENTINEL); } /* @@ -258,7 +263,7 @@ JEMALLOC_ALWAYS_INLINE bool prof_sampled(tsd_t *tsd, const void *ptr) { prof_info_t prof_info; prof_info_get(tsd, ptr, NULL, &prof_info); - bool sampled = (uintptr_t)prof_info.alloc_tctx > (uintptr_t)1U; + bool sampled = prof_tctx_is_valid(prof_info.alloc_tctx); if (sampled) { assert(prof_sample_aligned(ptr)); } @@ -274,7 +279,7 @@ prof_free(tsd_t *tsd, const void *ptr, size_t usize, cassert(config_prof); assert(usize == isalloc(tsd_tsdn(tsd), ptr)); - if (unlikely((uintptr_t)prof_info.alloc_tctx > (uintptr_t)1U)) { + if (unlikely(prof_tctx_is_valid(prof_info.alloc_tctx))) { assert(prof_sample_aligned(ptr)); prof_free_sampled_object(tsd, ptr, usize, &prof_info); } diff --git a/include/jemalloc/internal/prof_types.h b/include/jemalloc/internal/prof_types.h index 046ea204..921b16fe 100644 --- a/include/jemalloc/internal/prof_types.h +++ b/include/jemalloc/internal/prof_types.h @@ -88,4 +88,6 @@ typedef struct prof_recent_s prof_recent_t; #define PROF_SAMPLE_ALIGNMENT PAGE #define PROF_SAMPLE_ALIGNMENT_MASK PAGE_MASK +#define PROF_TCTX_SENTINEL ((prof_tctx_t *)((uintptr_t)1U)) + #endif /* JEMALLOC_INTERNAL_PROF_TYPES_H */ diff --git a/src/jemalloc.c b/src/jemalloc.c index 3961683a..a36b4974 100644 --- a/src/jemalloc.c +++ b/src/jemalloc.c @@ -2529,12 +2529,12 @@ imalloc_body(static_opts_t *sopts, dynamic_opts_t *dopts, tsd_t *tsd) { sample_event); emap_alloc_ctx_t alloc_ctx; - if (likely((uintptr_t)tctx == (uintptr_t)1U)) { + if (likely(tctx == PROF_TCTX_SENTINEL)) { alloc_ctx.slab = sz_can_use_slab(usize); allocation = imalloc_no_sample( sopts, dopts, tsd, usize, usize, ind, alloc_ctx.slab); - } else if ((uintptr_t)tctx > (uintptr_t)1U) { + } else if (tctx != NULL) { allocation = imalloc_sample( sopts, dopts, tsd, usize, ind); alloc_ctx.slab = false; @@ -3366,7 +3366,7 @@ irallocx_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t size, bool sample_event = te_prof_sample_event_lookahead(tsd, usize); prof_tctx_t *tctx = prof_alloc_prep(tsd, prof_active, sample_event); void *p; - if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) { + if (unlikely(tctx != PROF_TCTX_SENTINEL)) { p = irallocx_prof_sample(tsd_tsdn(tsd), old_ptr, old_usize, usize, alignment, zero, tcache, arena, tctx, hook_args); } else { @@ -3612,7 +3612,7 @@ ixallocx_prof(tsd_t *tsd, void *ptr, size_t old_usize, size_t size, prof_tctx_t *tctx = prof_alloc_prep(tsd, prof_active, sample_event); size_t usize; - if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) { + if (unlikely(tctx != PROF_TCTX_SENTINEL)) { usize = ixallocx_prof_sample(tsd_tsdn(tsd), ptr, old_usize, size, extra, alignment, zero, tctx); } else { diff --git a/src/large.c b/src/large.c index 5fc4bf58..10fa652e 100644 --- a/src/large.c +++ b/src/large.c @@ -287,7 +287,7 @@ large_prof_info_get(tsd_t *tsd, edata_t *edata, prof_info_t *prof_info, prof_tctx_t *alloc_tctx = edata_prof_tctx_get(edata); prof_info->alloc_tctx = alloc_tctx; - if ((uintptr_t)alloc_tctx > (uintptr_t)1U) { + if (prof_tctx_is_valid(alloc_tctx)) { nstime_copy(&prof_info->alloc_time, edata_prof_alloc_time_get(edata)); prof_info->alloc_size = edata_prof_alloc_size_get(edata); @@ -308,7 +308,7 @@ large_prof_tctx_set(edata_t *edata, prof_tctx_t *tctx) { void large_prof_tctx_reset(edata_t *edata) { - large_prof_tctx_set(edata, (prof_tctx_t *)(uintptr_t)1U); + large_prof_tctx_set(edata, PROF_TCTX_SENTINEL); } void diff --git a/src/prof.c b/src/prof.c index 9986a329..52869375 100644 --- a/src/prof.c +++ b/src/prof.c @@ -91,11 +91,19 @@ prof_alloc_rollback(tsd_t *tsd, prof_tctx_t *tctx) { cassert(config_prof); if (tsd_reentrancy_level_get(tsd) > 0) { - assert((uintptr_t)tctx == (uintptr_t)1U); + assert(tctx == PROF_TCTX_SENTINEL); return; } - if ((uintptr_t)tctx > (uintptr_t)1U) { + if (prof_tctx_is_valid(tctx)) { + /* + * This `assert` really shouldn't be necessary. It's here + * because there's a bug in the clang static analyzer; it + * somehow does not realize that by `prof_tctx_is_valid(tctx)` + * being true that we've already ensured that `tctx` is not + * `NULL`. + */ + assert(tctx != NULL); malloc_mutex_lock(tsd_tsdn(tsd), tctx->tdata->lock); tctx->prepared = false; prof_tctx_try_destroy(tsd, tctx); @@ -169,7 +177,7 @@ prof_free_sampled_object(tsd_t *tsd, const void *ptr, size_t usize, assert(prof_info != NULL); prof_tctx_t *tctx = prof_info->alloc_tctx; - assert((uintptr_t)tctx > (uintptr_t)1U); + assert(prof_tctx_is_valid(tctx)); szind_t szind = sz_size2index(usize); diff --git a/test/unit/prof_tctx.c b/test/unit/prof_tctx.c index e0efdc36..d19dd395 100644 --- a/test/unit/prof_tctx.c +++ b/test/unit/prof_tctx.c @@ -18,7 +18,7 @@ TEST_BEGIN(test_prof_realloc) { p = mallocx(1024, flags); expect_ptr_not_null(p, "Unexpected mallocx() failure"); prof_info_get(tsd, p, NULL, &prof_info_p); - expect_ptr_ne(prof_info_p.alloc_tctx, (prof_tctx_t *)(uintptr_t)1U, + expect_ptr_ne(prof_info_p.alloc_tctx, PROF_TCTX_SENTINEL, "Expected valid tctx"); prof_cnt_all(&cnt_1); expect_u64_eq(cnt_0.curobjs + 1, cnt_1.curobjs, @@ -28,7 +28,7 @@ TEST_BEGIN(test_prof_realloc) { expect_ptr_ne(p, q, "Expected move"); expect_ptr_not_null(p, "Unexpected rmallocx() failure"); prof_info_get(tsd, q, NULL, &prof_info_q); - expect_ptr_ne(prof_info_q.alloc_tctx, (prof_tctx_t *)(uintptr_t)1U, + expect_ptr_ne(prof_info_q.alloc_tctx, PROF_TCTX_SENTINEL, "Expected valid tctx"); prof_cnt_all(&cnt_2); expect_u64_eq(cnt_1.curobjs, cnt_2.curobjs,