avoid redundant chunk header reads

* use sized deallocation in iralloct_realign
* iralloc and ixalloc always need the old size, so pass it in from the
  caller where it's often already calculated
This commit is contained in:
Daniel Micay 2014-10-24 13:18:57 -04:00 committed by Jason Evans
parent 809b0ac391
commit d33f834591
2 changed files with 42 additions and 45 deletions

View File

@ -790,12 +790,13 @@ void isqalloc(tsd_t *tsd, void *ptr, size_t size, bool try_tcache);
void *iralloct_realign(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, void *iralloct_realign(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
size_t extra, size_t alignment, bool zero, bool try_tcache_alloc, size_t extra, size_t alignment, bool zero, bool try_tcache_alloc,
bool try_tcache_dalloc, arena_t *arena); bool try_tcache_dalloc, arena_t *arena);
void *iralloct(tsd_t *tsd, void *ptr, size_t size, size_t alignment, void *iralloct(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
bool zero, bool try_tcache_alloc, bool try_tcache_dalloc, arena_t *arena); size_t alignment, bool zero, bool try_tcache_alloc, bool try_tcache_dalloc,
void *iralloc(tsd_t *tsd, void *ptr, size_t size, size_t alignment, arena_t *arena);
bool zero); void *iralloc(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
bool ixalloc(void *ptr, size_t size, size_t extra, size_t alignment, size_t alignment, bool zero);
bool zero); bool ixalloc(void *ptr, size_t oldsize, size_t size, size_t extra,
size_t alignment, bool zero);
#endif #endif
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_)) #if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
@ -1013,21 +1014,18 @@ iralloct_realign(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
*/ */
copysize = (size < oldsize) ? size : oldsize; copysize = (size < oldsize) ? size : oldsize;
memcpy(p, ptr, copysize); memcpy(p, ptr, copysize);
iqalloc(tsd, ptr, try_tcache_dalloc); isqalloc(tsd, ptr, oldsize, try_tcache_dalloc);
return (p); return (p);
} }
JEMALLOC_ALWAYS_INLINE void * JEMALLOC_ALWAYS_INLINE void *
iralloct(tsd_t *tsd, void *ptr, size_t size, size_t alignment, bool zero, iralloct(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, size_t alignment,
bool try_tcache_alloc, bool try_tcache_dalloc, arena_t *arena) bool zero, bool try_tcache_alloc, bool try_tcache_dalloc, arena_t *arena)
{ {
size_t oldsize;
assert(ptr != NULL); assert(ptr != NULL);
assert(size != 0); assert(size != 0);
oldsize = isalloc(ptr, config_prof);
if (alignment != 0 && ((uintptr_t)ptr & ((uintptr_t)alignment-1)) if (alignment != 0 && ((uintptr_t)ptr & ((uintptr_t)alignment-1))
!= 0) { != 0) {
/* /*
@ -1048,21 +1046,22 @@ iralloct(tsd_t *tsd, void *ptr, size_t size, size_t alignment, bool zero,
} }
JEMALLOC_ALWAYS_INLINE void * JEMALLOC_ALWAYS_INLINE void *
iralloc(tsd_t *tsd, void *ptr, size_t size, size_t alignment, bool zero) iralloc(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, size_t alignment,
bool zero)
{ {
return (iralloct(tsd, ptr, size, alignment, zero, true, true, NULL)); return (iralloct(tsd, ptr, oldsize, size, alignment, zero, true, true,
NULL));
} }
JEMALLOC_ALWAYS_INLINE bool JEMALLOC_ALWAYS_INLINE bool
ixalloc(void *ptr, size_t size, size_t extra, size_t alignment, bool zero) ixalloc(void *ptr, size_t oldsize, size_t size, size_t extra, size_t alignment,
bool zero)
{ {
size_t oldsize;
assert(ptr != NULL); assert(ptr != NULL);
assert(size != 0); assert(size != 0);
oldsize = isalloc(ptr, config_prof);
if (alignment != 0 && ((uintptr_t)ptr & ((uintptr_t)alignment-1)) if (alignment != 0 && ((uintptr_t)ptr & ((uintptr_t)alignment-1))
!= 0) { != 0) {
/* Existing object alignment is inadequate. */ /* Existing object alignment is inadequate. */

View File

@ -1529,19 +1529,20 @@ label_return:
} }
static void * static void *
irealloc_prof_sample(tsd_t *tsd, void *oldptr, size_t usize, prof_tctx_t *tctx) irealloc_prof_sample(tsd_t *tsd, void *oldptr, size_t old_usize, size_t usize,
prof_tctx_t *tctx)
{ {
void *p; void *p;
if (tctx == NULL) if (tctx == NULL)
return (NULL); return (NULL);
if (usize <= SMALL_MAXCLASS) { if (usize <= SMALL_MAXCLASS) {
p = iralloc(tsd, oldptr, LARGE_MINCLASS, 0, false); p = iralloc(tsd, oldptr, old_usize, LARGE_MINCLASS, 0, false);
if (p == NULL) if (p == NULL)
return (NULL); return (NULL);
arena_prof_promoted(p, usize); arena_prof_promoted(p, usize);
} else } else
p = iralloc(tsd, oldptr, usize, 0, false); p = iralloc(tsd, oldptr, old_usize, usize, 0, false);
return (p); return (p);
} }
@ -1555,9 +1556,9 @@ irealloc_prof(tsd_t *tsd, void *oldptr, size_t old_usize, size_t usize)
old_tctx = prof_tctx_get(oldptr); old_tctx = prof_tctx_get(oldptr);
tctx = prof_alloc_prep(tsd, usize, true); tctx = prof_alloc_prep(tsd, usize, true);
if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) if (unlikely((uintptr_t)tctx != (uintptr_t)1U))
p = irealloc_prof_sample(tsd, oldptr, usize, tctx); p = irealloc_prof_sample(tsd, oldptr, old_usize, usize, tctx);
else else
p = iralloc(tsd, oldptr, usize, 0, false); p = iralloc(tsd, oldptr, old_usize, usize, 0, false);
if (p == NULL) if (p == NULL)
return (NULL); return (NULL);
prof_realloc(tsd, p, usize, tctx, true, old_usize, old_tctx); prof_realloc(tsd, p, usize, tctx, true, old_usize, old_tctx);
@ -1630,9 +1631,7 @@ je_realloc(void *ptr, size_t size)
malloc_thread_init(); malloc_thread_init();
tsd = tsd_fetch(); tsd = tsd_fetch();
if ((config_prof && opt_prof) || config_stats || old_usize = isalloc(ptr, config_prof);
(config_valgrind && unlikely(in_valgrind)))
old_usize = isalloc(ptr, config_prof);
if (config_valgrind && unlikely(in_valgrind)) if (config_valgrind && unlikely(in_valgrind))
old_rzsize = config_prof ? p2rz(ptr) : u2rz(old_usize); old_rzsize = config_prof ? p2rz(ptr) : u2rz(old_usize);
@ -1643,7 +1642,7 @@ je_realloc(void *ptr, size_t size)
if (config_stats || (config_valgrind && if (config_stats || (config_valgrind &&
unlikely(in_valgrind))) unlikely(in_valgrind)))
usize = s2u(size); usize = s2u(size);
ret = iralloc(tsd, ptr, size, 0, false); ret = iralloc(tsd, ptr, old_usize, size, 0, false);
} }
} else { } else {
/* realloc(NULL, size) is equivalent to malloc(size). */ /* realloc(NULL, size) is equivalent to malloc(size). */
@ -1922,22 +1921,22 @@ label_oom:
} }
static void * static void *
irallocx_prof_sample(tsd_t *tsd, void *oldptr, size_t size, size_t alignment, irallocx_prof_sample(tsd_t *tsd, void *oldptr, size_t old_usize, size_t size,
size_t usize, bool zero, bool try_tcache_alloc, bool try_tcache_dalloc, size_t alignment, size_t usize, bool zero, bool try_tcache_alloc,
arena_t *arena, prof_tctx_t *tctx) bool try_tcache_dalloc, arena_t *arena, prof_tctx_t *tctx)
{ {
void *p; void *p;
if (tctx == NULL) if (tctx == NULL)
return (NULL); return (NULL);
if (usize <= SMALL_MAXCLASS) { if (usize <= SMALL_MAXCLASS) {
p = iralloct(tsd, oldptr, LARGE_MINCLASS, alignment, zero, p = iralloct(tsd, oldptr, old_usize, LARGE_MINCLASS, alignment,
try_tcache_alloc, try_tcache_dalloc, arena); zero, try_tcache_alloc, try_tcache_dalloc, arena);
if (p == NULL) if (p == NULL)
return (NULL); return (NULL);
arena_prof_promoted(p, usize); arena_prof_promoted(p, usize);
} else { } else {
p = iralloct(tsd, oldptr, size, alignment, zero, p = iralloct(tsd, oldptr, old_usize, size, alignment, zero,
try_tcache_alloc, try_tcache_dalloc, arena); try_tcache_alloc, try_tcache_dalloc, arena);
} }
@ -1955,10 +1954,11 @@ irallocx_prof(tsd_t *tsd, void *oldptr, size_t old_usize, size_t size,
old_tctx = prof_tctx_get(oldptr); old_tctx = prof_tctx_get(oldptr);
tctx = prof_alloc_prep(tsd, *usize, false); tctx = prof_alloc_prep(tsd, *usize, false);
if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) { if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) {
p = irallocx_prof_sample(tsd, oldptr, size, alignment, *usize, p = irallocx_prof_sample(tsd, oldptr, old_usize, size,
zero, try_tcache_alloc, try_tcache_dalloc, arena, tctx); alignment, *usize, zero, try_tcache_alloc,
try_tcache_dalloc, arena, tctx);
} else { } else {
p = iralloct(tsd, oldptr, size, alignment, zero, p = iralloct(tsd, oldptr, old_usize, size, alignment, zero,
try_tcache_alloc, try_tcache_dalloc, arena); try_tcache_alloc, try_tcache_dalloc, arena);
} }
if (unlikely(p == NULL)) { if (unlikely(p == NULL)) {
@ -1988,7 +1988,7 @@ je_rallocx(void *ptr, size_t size, int flags)
void *p; void *p;
tsd_t *tsd; tsd_t *tsd;
size_t usize; size_t usize;
UNUSED size_t old_usize JEMALLOC_CC_SILENCE_INIT(0); size_t old_usize;
UNUSED size_t old_rzsize JEMALLOC_CC_SILENCE_INIT(0); UNUSED size_t old_rzsize JEMALLOC_CC_SILENCE_INIT(0);
size_t alignment = MALLOCX_ALIGN_GET(flags); size_t alignment = MALLOCX_ALIGN_GET(flags);
bool zero = flags & MALLOCX_ZERO; bool zero = flags & MALLOCX_ZERO;
@ -2016,9 +2016,7 @@ je_rallocx(void *ptr, size_t size, int flags)
arena = NULL; arena = NULL;
} }
if ((config_prof && opt_prof) || config_stats || old_usize = isalloc(ptr, config_prof);
((config_valgrind && unlikely(in_valgrind))))
old_usize = isalloc(ptr, config_prof);
if (config_valgrind && unlikely(in_valgrind)) if (config_valgrind && unlikely(in_valgrind))
old_rzsize = u2rz(old_usize); old_rzsize = u2rz(old_usize);
@ -2030,8 +2028,8 @@ je_rallocx(void *ptr, size_t size, int flags)
if (unlikely(p == NULL)) if (unlikely(p == NULL))
goto label_oom; goto label_oom;
} else { } else {
p = iralloct(tsd, ptr, size, alignment, zero, try_tcache_alloc, p = iralloct(tsd, ptr, old_usize, size, alignment, zero,
try_tcache_dalloc, arena); try_tcache_alloc, try_tcache_dalloc, arena);
if (unlikely(p == NULL)) if (unlikely(p == NULL))
goto label_oom; goto label_oom;
if (config_stats || (config_valgrind && unlikely(in_valgrind))) if (config_stats || (config_valgrind && unlikely(in_valgrind)))
@ -2061,7 +2059,7 @@ ixallocx_helper(void *ptr, size_t old_usize, size_t size, size_t extra,
{ {
size_t usize; size_t usize;
if (ixalloc(ptr, size, extra, alignment, zero)) if (ixalloc(ptr, old_usize, size, extra, alignment, zero))
return (old_usize); return (old_usize);
usize = isalloc(ptr, config_prof); usize = isalloc(ptr, config_prof);
@ -2080,9 +2078,9 @@ ixallocx_prof_sample(void *ptr, size_t old_usize, size_t size, size_t extra,
/* Use minimum usize to determine whether promotion may happen. */ /* Use minimum usize to determine whether promotion may happen. */
if (((alignment == 0) ? s2u(size) : sa2u(size, alignment)) <= if (((alignment == 0) ? s2u(size) : sa2u(size, alignment)) <=
SMALL_MAXCLASS) { SMALL_MAXCLASS) {
if (ixalloc(ptr, SMALL_MAXCLASS+1, (SMALL_MAXCLASS+1 >= if (ixalloc(ptr, old_usize, SMALL_MAXCLASS+1,
size+extra) ? 0 : size+extra - (SMALL_MAXCLASS+1), (SMALL_MAXCLASS+1 >= size+extra) ? 0 : size+extra -
alignment, zero)) (SMALL_MAXCLASS+1), alignment, zero))
return (old_usize); return (old_usize);
usize = isalloc(ptr, config_prof); usize = isalloc(ptr, config_prof);
if (max_usize < LARGE_MINCLASS) if (max_usize < LARGE_MINCLASS)