Make one call to prof_active_get_unlocked() per allocation event.
Make one call to prof_active_get_unlocked() per allocation event, and use the result throughout the relevant functions that handle an allocation event. Also add a missing check in prof_realloc(). These fixes protect allocation events against concurrent prof_active changes.
This commit is contained in:
parent
ef363de701
commit
cec0d63d8b
@ -29,6 +29,10 @@ brevity. Much more detail can be found in the git revision history:
|
|||||||
- Fix xallocx() bugs related to the 'extra' parameter when specified as
|
- Fix xallocx() bugs related to the 'extra' parameter when specified as
|
||||||
non-zero.
|
non-zero.
|
||||||
- Fix irealloc_prof() to prof_alloc_rollback() on OOM.
|
- Fix irealloc_prof() to prof_alloc_rollback() on OOM.
|
||||||
|
- Make one call to prof_active_get_unlocked() per allocation event, and use
|
||||||
|
the result throughout the relevant functions that handle an allocation
|
||||||
|
event. Also add a missing check in prof_realloc(). These fixes protect
|
||||||
|
allocation events against concurrent prof_active changes.
|
||||||
|
|
||||||
* 4.0.0 (August 17, 2015)
|
* 4.0.0 (August 17, 2015)
|
||||||
|
|
||||||
|
@ -331,14 +331,16 @@ bool prof_gdump_get_unlocked(void);
|
|||||||
prof_tdata_t *prof_tdata_get(tsd_t *tsd, bool create);
|
prof_tdata_t *prof_tdata_get(tsd_t *tsd, bool create);
|
||||||
bool prof_sample_accum_update(tsd_t *tsd, size_t usize, bool commit,
|
bool prof_sample_accum_update(tsd_t *tsd, size_t usize, bool commit,
|
||||||
prof_tdata_t **tdata_out);
|
prof_tdata_t **tdata_out);
|
||||||
prof_tctx_t *prof_alloc_prep(tsd_t *tsd, size_t usize, bool update);
|
prof_tctx_t *prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active,
|
||||||
|
bool update);
|
||||||
prof_tctx_t *prof_tctx_get(const void *ptr);
|
prof_tctx_t *prof_tctx_get(const void *ptr);
|
||||||
void prof_tctx_set(const void *ptr, size_t usize, prof_tctx_t *tctx);
|
void prof_tctx_set(const void *ptr, size_t usize, prof_tctx_t *tctx);
|
||||||
void prof_malloc_sample_object(const void *ptr, size_t usize,
|
void prof_malloc_sample_object(const void *ptr, size_t usize,
|
||||||
prof_tctx_t *tctx);
|
prof_tctx_t *tctx);
|
||||||
void prof_malloc(const void *ptr, size_t usize, prof_tctx_t *tctx);
|
void prof_malloc(const void *ptr, size_t usize, prof_tctx_t *tctx);
|
||||||
void prof_realloc(tsd_t *tsd, const void *ptr, size_t usize,
|
void prof_realloc(tsd_t *tsd, const void *ptr, size_t usize,
|
||||||
prof_tctx_t *tctx, bool updated, size_t old_usize, prof_tctx_t *old_tctx);
|
prof_tctx_t *tctx, bool prof_active, bool updated,
|
||||||
|
size_t old_usize, prof_tctx_t *old_tctx);
|
||||||
void prof_free(tsd_t *tsd, const void *ptr, size_t usize);
|
void prof_free(tsd_t *tsd, const void *ptr, size_t usize);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@ -443,7 +445,7 @@ prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update,
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE prof_tctx_t *
|
JEMALLOC_ALWAYS_INLINE prof_tctx_t *
|
||||||
prof_alloc_prep(tsd_t *tsd, size_t usize, bool update)
|
prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active, bool update)
|
||||||
{
|
{
|
||||||
prof_tctx_t *ret;
|
prof_tctx_t *ret;
|
||||||
prof_tdata_t *tdata;
|
prof_tdata_t *tdata;
|
||||||
@ -451,8 +453,8 @@ prof_alloc_prep(tsd_t *tsd, size_t usize, bool update)
|
|||||||
|
|
||||||
assert(usize == s2u(usize));
|
assert(usize == s2u(usize));
|
||||||
|
|
||||||
if (!prof_active_get_unlocked() || likely(prof_sample_accum_update(tsd,
|
if (!prof_active || likely(prof_sample_accum_update(tsd, usize, update,
|
||||||
usize, update, &tdata)))
|
&tdata)))
|
||||||
ret = (prof_tctx_t *)(uintptr_t)1U;
|
ret = (prof_tctx_t *)(uintptr_t)1U;
|
||||||
else {
|
else {
|
||||||
bt_init(&bt, tdata->vec);
|
bt_init(&bt, tdata->vec);
|
||||||
@ -479,17 +481,17 @@ prof_malloc(const void *ptr, size_t usize, prof_tctx_t *tctx)
|
|||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx,
|
prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx,
|
||||||
bool updated, size_t old_usize, prof_tctx_t *old_tctx)
|
bool prof_active, bool updated, size_t old_usize, prof_tctx_t *old_tctx)
|
||||||
{
|
{
|
||||||
|
|
||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
assert(ptr != NULL || (uintptr_t)tctx <= (uintptr_t)1U);
|
assert(ptr != NULL || (uintptr_t)tctx <= (uintptr_t)1U);
|
||||||
|
|
||||||
if (!updated && ptr != NULL) {
|
if (prof_active && !updated && ptr != NULL) {
|
||||||
assert(usize == isalloc(ptr, true));
|
assert(usize == isalloc(ptr, true));
|
||||||
if (prof_sample_accum_update(tsd, usize, true, NULL)) {
|
if (prof_sample_accum_update(tsd, usize, true, NULL)) {
|
||||||
/*
|
/*
|
||||||
* Don't sample. The usize passed to PROF_ALLOC_PREP()
|
* Don't sample. The usize passed to prof_alloc_prep()
|
||||||
* was larger than what actually got allocated, so a
|
* was larger than what actually got allocated, so a
|
||||||
* backtrace was captured for this allocation, even
|
* backtrace was captured for this allocation, even
|
||||||
* though its actual usize was insufficient to cross the
|
* though its actual usize was insufficient to cross the
|
||||||
|
@ -1378,7 +1378,7 @@ imalloc_prof(tsd_t *tsd, size_t usize)
|
|||||||
void *p;
|
void *p;
|
||||||
prof_tctx_t *tctx;
|
prof_tctx_t *tctx;
|
||||||
|
|
||||||
tctx = prof_alloc_prep(tsd, usize, true);
|
tctx = prof_alloc_prep(tsd, usize, prof_active_get_unlocked(), true);
|
||||||
if (unlikely((uintptr_t)tctx != (uintptr_t)1U))
|
if (unlikely((uintptr_t)tctx != (uintptr_t)1U))
|
||||||
p = imalloc_prof_sample(tsd, usize, tctx);
|
p = imalloc_prof_sample(tsd, usize, tctx);
|
||||||
else
|
else
|
||||||
@ -1468,7 +1468,7 @@ imemalign_prof(tsd_t *tsd, size_t alignment, size_t usize)
|
|||||||
void *p;
|
void *p;
|
||||||
prof_tctx_t *tctx;
|
prof_tctx_t *tctx;
|
||||||
|
|
||||||
tctx = prof_alloc_prep(tsd, usize, true);
|
tctx = prof_alloc_prep(tsd, usize, prof_active_get_unlocked(), true);
|
||||||
if (unlikely((uintptr_t)tctx != (uintptr_t)1U))
|
if (unlikely((uintptr_t)tctx != (uintptr_t)1U))
|
||||||
p = imemalign_prof_sample(tsd, alignment, usize, tctx);
|
p = imemalign_prof_sample(tsd, alignment, usize, tctx);
|
||||||
else
|
else
|
||||||
@ -1599,7 +1599,7 @@ icalloc_prof(tsd_t *tsd, size_t usize)
|
|||||||
void *p;
|
void *p;
|
||||||
prof_tctx_t *tctx;
|
prof_tctx_t *tctx;
|
||||||
|
|
||||||
tctx = prof_alloc_prep(tsd, usize, true);
|
tctx = prof_alloc_prep(tsd, usize, prof_active_get_unlocked(), true);
|
||||||
if (unlikely((uintptr_t)tctx != (uintptr_t)1U))
|
if (unlikely((uintptr_t)tctx != (uintptr_t)1U))
|
||||||
p = icalloc_prof_sample(tsd, usize, tctx);
|
p = icalloc_prof_sample(tsd, usize, tctx);
|
||||||
else
|
else
|
||||||
@ -1704,10 +1704,12 @@ JEMALLOC_ALWAYS_INLINE_C void *
|
|||||||
irealloc_prof(tsd_t *tsd, void *oldptr, size_t old_usize, size_t usize)
|
irealloc_prof(tsd_t *tsd, void *oldptr, size_t old_usize, size_t usize)
|
||||||
{
|
{
|
||||||
void *p;
|
void *p;
|
||||||
|
bool prof_active;
|
||||||
prof_tctx_t *old_tctx, *tctx;
|
prof_tctx_t *old_tctx, *tctx;
|
||||||
|
|
||||||
|
prof_active = prof_active_get_unlocked();
|
||||||
old_tctx = prof_tctx_get(oldptr);
|
old_tctx = prof_tctx_get(oldptr);
|
||||||
tctx = prof_alloc_prep(tsd, usize, true);
|
tctx = prof_alloc_prep(tsd, usize, prof_active, true);
|
||||||
if (unlikely((uintptr_t)tctx != (uintptr_t)1U))
|
if (unlikely((uintptr_t)tctx != (uintptr_t)1U))
|
||||||
p = irealloc_prof_sample(tsd, oldptr, old_usize, usize, tctx);
|
p = irealloc_prof_sample(tsd, oldptr, old_usize, usize, tctx);
|
||||||
else
|
else
|
||||||
@ -1716,7 +1718,8 @@ irealloc_prof(tsd_t *tsd, void *oldptr, size_t old_usize, size_t usize)
|
|||||||
prof_alloc_rollback(tsd, tctx, true);
|
prof_alloc_rollback(tsd, tctx, true);
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
prof_realloc(tsd, p, usize, tctx, true, old_usize, old_tctx);
|
prof_realloc(tsd, p, usize, tctx, prof_active, true, old_usize,
|
||||||
|
old_tctx);
|
||||||
|
|
||||||
return (p);
|
return (p);
|
||||||
}
|
}
|
||||||
@ -2014,7 +2017,7 @@ imallocx_prof(tsd_t *tsd, size_t size, int flags, size_t *usize)
|
|||||||
if (unlikely(imallocx_flags_decode(tsd, size, flags, usize, &alignment,
|
if (unlikely(imallocx_flags_decode(tsd, size, flags, usize, &alignment,
|
||||||
&zero, &tcache, &arena)))
|
&zero, &tcache, &arena)))
|
||||||
return (NULL);
|
return (NULL);
|
||||||
tctx = prof_alloc_prep(tsd, *usize, true);
|
tctx = prof_alloc_prep(tsd, *usize, prof_active_get_unlocked(), true);
|
||||||
if (likely((uintptr_t)tctx == (uintptr_t)1U)) {
|
if (likely((uintptr_t)tctx == (uintptr_t)1U)) {
|
||||||
p = imallocx_maybe_flags(tsd, size, flags, *usize, alignment,
|
p = imallocx_maybe_flags(tsd, size, flags, *usize, alignment,
|
||||||
zero, tcache, arena);
|
zero, tcache, arena);
|
||||||
@ -2123,10 +2126,12 @@ irallocx_prof(tsd_t *tsd, void *oldptr, size_t old_usize, size_t size,
|
|||||||
arena_t *arena)
|
arena_t *arena)
|
||||||
{
|
{
|
||||||
void *p;
|
void *p;
|
||||||
|
bool prof_active;
|
||||||
prof_tctx_t *old_tctx, *tctx;
|
prof_tctx_t *old_tctx, *tctx;
|
||||||
|
|
||||||
|
prof_active = prof_active_get_unlocked();
|
||||||
old_tctx = prof_tctx_get(oldptr);
|
old_tctx = prof_tctx_get(oldptr);
|
||||||
tctx = prof_alloc_prep(tsd, *usize, true);
|
tctx = prof_alloc_prep(tsd, *usize, prof_active, true);
|
||||||
if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) {
|
if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) {
|
||||||
p = irallocx_prof_sample(tsd, oldptr, old_usize, size,
|
p = irallocx_prof_sample(tsd, oldptr, old_usize, size,
|
||||||
alignment, *usize, zero, tcache, arena, tctx);
|
alignment, *usize, zero, tcache, arena, tctx);
|
||||||
@ -2150,7 +2155,8 @@ irallocx_prof(tsd_t *tsd, void *oldptr, size_t old_usize, size_t size,
|
|||||||
*/
|
*/
|
||||||
*usize = isalloc(p, config_prof);
|
*usize = isalloc(p, config_prof);
|
||||||
}
|
}
|
||||||
prof_realloc(tsd, p, *usize, tctx, true, old_usize, old_tctx);
|
prof_realloc(tsd, p, *usize, tctx, prof_active, true,
|
||||||
|
old_usize, old_tctx);
|
||||||
|
|
||||||
return (p);
|
return (p);
|
||||||
}
|
}
|
||||||
@ -2274,8 +2280,10 @@ ixallocx_prof(tsd_t *tsd, void *ptr, size_t old_usize, size_t size,
|
|||||||
size_t extra, size_t alignment, bool zero)
|
size_t extra, size_t alignment, bool zero)
|
||||||
{
|
{
|
||||||
size_t max_usize, usize;
|
size_t max_usize, usize;
|
||||||
|
bool prof_active;
|
||||||
prof_tctx_t *old_tctx, *tctx;
|
prof_tctx_t *old_tctx, *tctx;
|
||||||
|
|
||||||
|
prof_active = prof_active_get_unlocked();
|
||||||
old_tctx = prof_tctx_get(ptr);
|
old_tctx = prof_tctx_get(ptr);
|
||||||
/* Clamp extra if necessary to avoid (size + extra) overflow. */
|
/* Clamp extra if necessary to avoid (size + extra) overflow. */
|
||||||
if (unlikely(size + extra > HUGE_MAXCLASS))
|
if (unlikely(size + extra > HUGE_MAXCLASS))
|
||||||
@ -2288,7 +2296,7 @@ ixallocx_prof(tsd_t *tsd, void *ptr, size_t old_usize, size_t size,
|
|||||||
*/
|
*/
|
||||||
max_usize = (alignment == 0) ? s2u(size+extra) : sa2u(size+extra,
|
max_usize = (alignment == 0) ? s2u(size+extra) : sa2u(size+extra,
|
||||||
alignment);
|
alignment);
|
||||||
tctx = prof_alloc_prep(tsd, max_usize, false);
|
tctx = prof_alloc_prep(tsd, max_usize, prof_active, false);
|
||||||
if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) {
|
if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) {
|
||||||
usize = ixallocx_prof_sample(ptr, old_usize, size, extra,
|
usize = ixallocx_prof_sample(ptr, old_usize, size, extra,
|
||||||
alignment, zero, max_usize, tctx);
|
alignment, zero, max_usize, tctx);
|
||||||
@ -2300,7 +2308,8 @@ ixallocx_prof(tsd_t *tsd, void *ptr, size_t old_usize, size_t size,
|
|||||||
prof_alloc_rollback(tsd, tctx, false);
|
prof_alloc_rollback(tsd, tctx, false);
|
||||||
return (usize);
|
return (usize);
|
||||||
}
|
}
|
||||||
prof_realloc(tsd, ptr, usize, tctx, false, old_usize, old_tctx);
|
prof_realloc(tsd, ptr, usize, tctx, prof_active, false, old_usize,
|
||||||
|
old_tctx);
|
||||||
|
|
||||||
return (usize);
|
return (usize);
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user