Resolve an unsupported special case in arena_prof_tctx_set().

Add arena_prof_tctx_reset() and use it instead of arena_prof_tctx_set()
when resetting the tctx pointer during reallocation, which happens
whenever an originally sampled reallocated object is not sampled during
reallocation.

This regression was introduced by
594c759f37 (Optimize
arena_prof_tctx_set().)
This commit is contained in:
Jason Evans 2015-09-14 23:48:11 -07:00
parent ea8d97b897
commit 708ed79834
6 changed files with 62 additions and 6 deletions

View File

@ -556,6 +556,8 @@ unsigned arena_run_regind(arena_run_t *run, arena_bin_info_t *bin_info,
const void *ptr);
prof_tctx_t *arena_prof_tctx_get(const void *ptr);
void arena_prof_tctx_set(const void *ptr, size_t usize, prof_tctx_t *tctx);
void arena_prof_tctx_reset(const void *ptr, size_t usize,
const void *old_ptr, prof_tctx_t *old_tctx);
void *arena_malloc(tsd_t *tsd, arena_t *arena, size_t size, bool zero,
tcache_t *tcache);
arena_t *arena_aalloc(const void *ptr);
@ -1126,6 +1128,35 @@ arena_prof_tctx_set(const void *ptr, size_t usize, prof_tctx_t *tctx)
huge_prof_tctx_set(ptr, tctx);
}
JEMALLOC_INLINE void
arena_prof_tctx_reset(const void *ptr, size_t usize, const void *old_ptr,
prof_tctx_t *old_tctx)
{
cassert(config_prof);
assert(ptr != NULL);
if (unlikely(usize > SMALL_MAXCLASS || (ptr == old_ptr &&
(uintptr_t)old_tctx > (uintptr_t)1U))) {
arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
if (likely(chunk != ptr)) {
size_t pageind;
arena_chunk_map_misc_t *elm;
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >>
LG_PAGE;
assert(arena_mapbits_allocated_get(chunk, pageind) !=
0);
assert(arena_mapbits_large_get(chunk, pageind) != 0);
elm = arena_miscelm_get(chunk, pageind);
atomic_write_p(&elm->prof_tctx_pun,
(prof_tctx_t *)(uintptr_t)1U);
} else
huge_prof_tctx_reset(ptr);
}
}
JEMALLOC_ALWAYS_INLINE void *
arena_malloc(tsd_t *tsd, arena_t *arena, size_t size, bool zero,
tcache_t *tcache)

View File

@ -26,6 +26,7 @@ arena_t *huge_aalloc(const void *ptr);
size_t huge_salloc(const void *ptr);
prof_tctx_t *huge_prof_tctx_get(const void *ptr);
void huge_prof_tctx_set(const void *ptr, prof_tctx_t *tctx);
void huge_prof_tctx_reset(const void *ptr);
#endif /* JEMALLOC_H_EXTERNS */
/******************************************************************************/

View File

@ -80,6 +80,7 @@ arena_prof_accum_impl
arena_prof_accum_locked
arena_prof_promoted
arena_prof_tctx_get
arena_prof_tctx_reset
arena_prof_tctx_set
arena_ptr_small_binind_get
arena_purge_all
@ -250,6 +251,7 @@ huge_dalloc_junk
huge_malloc
huge_palloc
huge_prof_tctx_get
huge_prof_tctx_reset
huge_prof_tctx_set
huge_ralloc
huge_ralloc_no_move
@ -379,6 +381,7 @@ prof_reset
prof_sample_accum_update
prof_sample_threshold_update
prof_tctx_get
prof_tctx_reset
prof_tctx_set
prof_tdata_cleanup
prof_tdata_get

View File

@ -335,11 +335,13 @@ prof_tctx_t *prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active,
bool update);
prof_tctx_t *prof_tctx_get(const void *ptr);
void prof_tctx_set(const void *ptr, size_t usize, prof_tctx_t *tctx);
void prof_tctx_reset(const void *ptr, size_t usize, const void *old_ptr,
prof_tctx_t *tctx);
void prof_malloc_sample_object(const void *ptr, size_t usize,
prof_tctx_t *tctx);
void prof_malloc(const void *ptr, size_t usize, prof_tctx_t *tctx);
void prof_realloc(tsd_t *tsd, const void *ptr, size_t usize,
prof_tctx_t *tctx, bool prof_active, bool updated,
prof_tctx_t *tctx, bool prof_active, bool updated, const void *old_ptr,
size_t old_usize, prof_tctx_t *old_tctx);
void prof_free(tsd_t *tsd, const void *ptr, size_t usize);
#endif
@ -414,6 +416,17 @@ prof_tctx_set(const void *ptr, size_t usize, prof_tctx_t *tctx)
arena_prof_tctx_set(ptr, usize, tctx);
}
JEMALLOC_ALWAYS_INLINE void
prof_tctx_reset(const void *ptr, size_t usize, const void *old_ptr,
prof_tctx_t *old_tctx)
{
cassert(config_prof);
assert(ptr != NULL);
arena_prof_tctx_reset(ptr, usize, old_ptr, old_tctx);
}
JEMALLOC_ALWAYS_INLINE bool
prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update,
prof_tdata_t **tdata_out)
@ -481,7 +494,8 @@ prof_malloc(const void *ptr, size_t usize, prof_tctx_t *tctx)
JEMALLOC_ALWAYS_INLINE void
prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx,
bool prof_active, bool updated, size_t old_usize, prof_tctx_t *old_tctx)
bool prof_active, bool updated, const void *old_ptr, size_t old_usize,
prof_tctx_t *old_tctx)
{
bool sampled, old_sampled;
@ -508,7 +522,7 @@ prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx,
if (unlikely(sampled))
prof_malloc_sample_object(ptr, usize, tctx);
else
prof_tctx_set(ptr, usize, (prof_tctx_t *)(uintptr_t)1U);
prof_tctx_reset(ptr, usize, old_ptr, old_tctx);
if (unlikely(old_sampled))
prof_free_sampled_object(tsd, old_usize, old_tctx);

View File

@ -424,3 +424,10 @@ huge_prof_tctx_set(const void *ptr, prof_tctx_t *tctx)
extent_node_prof_tctx_set(node, tctx);
malloc_mutex_unlock(&arena->huge_mtx);
}
void
huge_prof_tctx_reset(const void *ptr)
{
huge_prof_tctx_set(ptr, (prof_tctx_t *)(uintptr_t)1U);
}

View File

@ -1718,7 +1718,7 @@ irealloc_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t usize)
prof_alloc_rollback(tsd, tctx, true);
return (NULL);
}
prof_realloc(tsd, p, usize, tctx, prof_active, true, old_usize,
prof_realloc(tsd, p, usize, tctx, prof_active, true, old_ptr, old_usize,
old_tctx);
return (p);
@ -2155,7 +2155,7 @@ irallocx_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t size,
*/
*usize = isalloc(p, config_prof);
}
prof_realloc(tsd, p, *usize, tctx, prof_active, true,
prof_realloc(tsd, p, *usize, tctx, prof_active, true, old_ptr,
old_usize, old_tctx);
return (p);
@ -2308,7 +2308,7 @@ ixallocx_prof(tsd_t *tsd, void *ptr, size_t old_usize, size_t size,
prof_alloc_rollback(tsd, tctx, false);
return (usize);
}
prof_realloc(tsd, ptr, usize, tctx, prof_active, false, old_usize,
prof_realloc(tsd, ptr, usize, tctx, prof_active, false, ptr, old_usize,
old_tctx);
return (usize);