From fbb31029a5c2f556f39e04a8781340d4ee4cf16c Mon Sep 17 00:00:00 2001 From: Max Wang Date: Thu, 27 Mar 2014 14:46:00 -0700 Subject: [PATCH] Use arena dss prec instead of default for huge allocs. Pass a dss_prec_t parameter to huge_{m,p,r}alloc instead of defaulting to the chunk dss prec. --- include/jemalloc/internal/huge.h | 8 +++--- .../jemalloc/internal/jemalloc_internal.h.in | 10 +++---- src/huge.c | 26 ++++++++++++------- src/jemalloc.c | 2 +- 4 files changed, 27 insertions(+), 19 deletions(-) diff --git a/include/jemalloc/internal/huge.h b/include/jemalloc/internal/huge.h index ddf13138..a2b9c779 100644 --- a/include/jemalloc/internal/huge.h +++ b/include/jemalloc/internal/huge.h @@ -17,18 +17,20 @@ extern size_t huge_allocated; /* Protects chunk-related data structures. */ extern malloc_mutex_t huge_mtx; -void *huge_malloc(size_t size, bool zero); -void *huge_palloc(size_t size, size_t alignment, bool zero); +void *huge_malloc(size_t size, bool zero, dss_prec_t dss_prec); +void *huge_palloc(size_t size, size_t alignment, bool zero, + dss_prec_t dss_prec); bool huge_ralloc_no_move(void *ptr, size_t oldsize, size_t size, size_t extra); void *huge_ralloc(void *ptr, size_t oldsize, size_t size, size_t extra, - size_t alignment, bool zero, bool try_tcache_dalloc); + size_t alignment, bool zero, bool try_tcache_dalloc, dss_prec_t dss_prec); #ifdef JEMALLOC_JET typedef void (huge_dalloc_junk_t)(void *, size_t); extern huge_dalloc_junk_t *huge_dalloc_junk; #endif void huge_dalloc(void *ptr, bool unmap); size_t huge_salloc(const void *ptr); +dss_prec_t huge_dss_prec_get(arena_t *arena); prof_ctx_t *huge_prof_ctx_get(const void *ptr); void huge_prof_ctx_set(const void *ptr, prof_ctx_t *ctx); bool huge_boot(void); diff --git a/include/jemalloc/internal/jemalloc_internal.h.in b/include/jemalloc/internal/jemalloc_internal.h.in index d24a1fe6..574bbb14 100644 --- a/include/jemalloc/internal/jemalloc_internal.h.in +++ b/include/jemalloc/internal/jemalloc_internal.h.in @@ -770,7 +770,7 @@ imalloct(size_t size, bool try_tcache, arena_t *arena) if (size <= arena_maxclass) return (arena_malloc(arena, size, false, try_tcache)); else - return (huge_malloc(size, false)); + return (huge_malloc(size, false, huge_dss_prec_get(arena))); } JEMALLOC_ALWAYS_INLINE void * @@ -787,7 +787,7 @@ icalloct(size_t size, bool try_tcache, arena_t *arena) if (size <= arena_maxclass) return (arena_malloc(arena, size, true, try_tcache)); else - return (huge_malloc(size, true)); + return (huge_malloc(size, true, huge_dss_prec_get(arena))); } JEMALLOC_ALWAYS_INLINE void * @@ -813,9 +813,9 @@ ipalloct(size_t usize, size_t alignment, bool zero, bool try_tcache, ret = arena_palloc(choose_arena(arena), usize, alignment, zero); } else if (alignment <= chunksize) - ret = huge_malloc(usize, zero); + ret = huge_malloc(usize, zero, huge_dss_prec_get(arena)); else - ret = huge_palloc(usize, alignment, zero); + ret = huge_palloc(usize, alignment, zero, huge_dss_prec_get(arena)); } assert(ALIGNMENT_ADDR2BASE(ret, alignment) == ret); @@ -984,7 +984,7 @@ iralloct(void *ptr, size_t size, size_t extra, size_t alignment, bool zero, try_tcache_dalloc)); } else { return (huge_ralloc(ptr, oldsize, size, extra, - alignment, zero, try_tcache_dalloc)); + alignment, zero, try_tcache_dalloc, huge_dss_prec_get(arena))); } } diff --git a/src/huge.c b/src/huge.c index 6d86aed8..d72f2135 100644 --- a/src/huge.c +++ b/src/huge.c @@ -16,14 +16,14 @@ malloc_mutex_t huge_mtx; static extent_tree_t huge; void * -huge_malloc(size_t size, bool zero) +huge_malloc(size_t size, bool zero, dss_prec_t dss_prec) { - return (huge_palloc(size, chunksize, zero)); + return (huge_palloc(size, chunksize, zero, dss_prec)); } void * -huge_palloc(size_t size, size_t alignment, bool zero) +huge_palloc(size_t size, size_t alignment, bool zero, dss_prec_t dss_prec) { void *ret; size_t csize; @@ -48,8 +48,7 @@ huge_palloc(size_t size, size_t alignment, bool zero) * it is possible to make correct junk/zero fill decisions below. */ is_zeroed = zero; - ret = chunk_alloc(csize, alignment, false, &is_zeroed, - chunk_dss_prec_get()); + ret = chunk_alloc(csize, alignment, false, &is_zeroed, dss_prec); if (ret == NULL) { base_node_dealloc(node); return (NULL); @@ -98,7 +97,7 @@ huge_ralloc_no_move(void *ptr, size_t oldsize, size_t size, size_t extra) void * huge_ralloc(void *ptr, size_t oldsize, size_t size, size_t extra, - size_t alignment, bool zero, bool try_tcache_dalloc) + size_t alignment, bool zero, bool try_tcache_dalloc, dss_prec_t dss_prec) { void *ret; size_t copysize; @@ -113,18 +112,18 @@ huge_ralloc(void *ptr, size_t oldsize, size_t size, size_t extra, * space and copying. */ if (alignment > chunksize) - ret = huge_palloc(size + extra, alignment, zero); + ret = huge_palloc(size + extra, alignment, zero, dss_prec); else - ret = huge_malloc(size + extra, zero); + ret = huge_malloc(size + extra, zero, dss_prec); if (ret == NULL) { if (extra == 0) return (NULL); /* Try again, this time without extra. */ if (alignment > chunksize) - ret = huge_palloc(size, alignment, zero); + ret = huge_palloc(size, alignment, zero, dss_prec); else - ret = huge_malloc(size, zero); + ret = huge_malloc(size, zero, dss_prec); if (ret == NULL) return (NULL); @@ -264,6 +263,13 @@ huge_salloc(const void *ptr) return (ret); } +dss_prec_t +huge_dss_prec_get(arena_t *arena) +{ + + return (arena_dss_prec_get(choose_arena(arena))); +} + prof_ctx_t * huge_prof_ctx_get(const void *ptr) { diff --git a/src/jemalloc.c b/src/jemalloc.c index 563d99f8..204778bc 100644 --- a/src/jemalloc.c +++ b/src/jemalloc.c @@ -2076,7 +2076,7 @@ a0alloc(size_t size, bool zero) if (size <= arena_maxclass) return (arena_malloc(arenas[0], size, zero, false)); else - return (huge_malloc(size, zero)); + return (huge_malloc(size, zero, huge_dss_prec_get(arenas[0]))); } void *