Simplify imallocx_prof_sample().

Simplify imallocx_prof_sample() to always operate on usize rather than
sometimes using size.  This avoids redundant usize computations and
more closely fits the style adopted by i[rx]allocx_prof_sample() to fix
sampling bugs.
This commit is contained in:
Jason Evans 2015-09-17 10:19:28 -07:00
parent 4be9c79f88
commit 3263be6efb

View File

@ -1965,41 +1965,29 @@ imallocx_flags(tsd_t *tsd, size_t usize, size_t alignment, bool zero,
tcache_t *tcache, arena_t *arena) tcache_t *tcache, arena_t *arena)
{ {
if (alignment != 0) if (unlikely(alignment != 0))
return (ipalloct(tsd, usize, alignment, zero, tcache, arena)); return (ipalloct(tsd, usize, alignment, zero, tcache, arena));
if (zero) if (unlikely(zero))
return (icalloct(tsd, usize, tcache, arena)); return (icalloct(tsd, usize, tcache, arena));
return (imalloct(tsd, usize, tcache, arena)); return (imalloct(tsd, usize, tcache, arena));
} }
JEMALLOC_ALWAYS_INLINE_C void *
imallocx_maybe_flags(tsd_t *tsd, size_t size, int flags, size_t usize,
size_t alignment, bool zero, tcache_t *tcache, arena_t *arena)
{
if (likely(flags == 0))
return (imalloc(tsd, size));
return (imallocx_flags(tsd, usize, alignment, zero, tcache, arena));
}
static void * static void *
imallocx_prof_sample(tsd_t *tsd, size_t size, int flags, size_t usize, imallocx_prof_sample(tsd_t *tsd, size_t usize, size_t alignment, bool zero,
size_t alignment, bool zero, tcache_t *tcache, arena_t *arena) tcache_t *tcache, arena_t *arena)
{ {
void *p; void *p;
if (usize <= SMALL_MAXCLASS) { if (usize <= SMALL_MAXCLASS) {
assert(((alignment == 0) ? s2u(LARGE_MINCLASS) : assert(((alignment == 0) ? s2u(LARGE_MINCLASS) :
sa2u(LARGE_MINCLASS, alignment)) == LARGE_MINCLASS); sa2u(LARGE_MINCLASS, alignment)) == LARGE_MINCLASS);
p = imallocx_maybe_flags(tsd, LARGE_MINCLASS, flags, p = imallocx_flags(tsd, LARGE_MINCLASS, alignment, zero, tcache,
LARGE_MINCLASS, alignment, zero, tcache, arena); arena);
if (p == NULL) if (p == NULL)
return (NULL); return (NULL);
arena_prof_promoted(p, usize); arena_prof_promoted(p, usize);
} else { } else
p = imallocx_maybe_flags(tsd, size, flags, usize, alignment, p = imallocx_flags(tsd, usize, alignment, zero, tcache, arena);
zero, tcache, arena);
}
return (p); return (p);
} }
@ -2018,12 +2006,11 @@ imallocx_prof(tsd_t *tsd, size_t size, int flags, size_t *usize)
&zero, &tcache, &arena))) &zero, &tcache, &arena)))
return (NULL); return (NULL);
tctx = prof_alloc_prep(tsd, *usize, prof_active_get_unlocked(), true); tctx = prof_alloc_prep(tsd, *usize, prof_active_get_unlocked(), true);
if (likely((uintptr_t)tctx == (uintptr_t)1U)) { if (likely((uintptr_t)tctx == (uintptr_t)1U))
p = imallocx_maybe_flags(tsd, size, flags, *usize, alignment, p = imallocx_flags(tsd, *usize, alignment, zero, tcache, arena);
zero, tcache, arena); else if ((uintptr_t)tctx > (uintptr_t)1U) {
} else if ((uintptr_t)tctx > (uintptr_t)1U) { p = imallocx_prof_sample(tsd, *usize, alignment, zero, tcache,
p = imallocx_prof_sample(tsd, size, flags, *usize, alignment, arena);
zero, tcache, arena);
} else } else
p = NULL; p = NULL;
if (unlikely(p == NULL)) { if (unlikely(p == NULL)) {