Add support for user-specified chunk allocators/deallocators.

Add new mallctl endpoints "arena<i>.chunk.alloc" and
"arena<i>.chunk.dealloc" to allow userspace to configure
jemalloc's chunk allocator and deallocator on a per-arena
basis.
This commit is contained in:
aravind
2014-05-05 15:16:56 -07:00
committed by Jason Evans
parent 4bbd11b789
commit fb7fe50a88
16 changed files with 283 additions and 45 deletions

View File

@@ -702,7 +702,8 @@ imalloct(size_t size, bool try_tcache, arena_t *arena)
if (size <= arena_maxclass)
return (arena_malloc(arena, size, false, try_tcache));
else
return (huge_malloc(size, false, huge_dss_prec_get(arena)));
return (huge_malloc(arena, size, false,
huge_dss_prec_get(arena)));
}
JEMALLOC_ALWAYS_INLINE void *
@@ -719,7 +720,8 @@ icalloct(size_t size, bool try_tcache, arena_t *arena)
if (size <= arena_maxclass)
return (arena_malloc(arena, size, true, try_tcache));
else
return (huge_malloc(size, true, huge_dss_prec_get(arena)));
return (huge_malloc(arena, size, true,
huge_dss_prec_get(arena)));
}
JEMALLOC_ALWAYS_INLINE void *
@@ -745,9 +747,11 @@ ipalloct(size_t usize, size_t alignment, bool zero, bool try_tcache,
ret = arena_palloc(choose_arena(arena), usize,
alignment, zero);
} else if (alignment <= chunksize)
ret = huge_malloc(usize, zero, huge_dss_prec_get(arena));
ret = huge_malloc(arena, usize, zero,
huge_dss_prec_get(arena));
else
ret = huge_palloc(usize, alignment, zero, huge_dss_prec_get(arena));
ret = huge_palloc(arena, usize, alignment, zero,
huge_dss_prec_get(arena));
}
assert(ALIGNMENT_ADDR2BASE(ret, alignment) == ret);
@@ -915,7 +919,7 @@ iralloct(void *ptr, size_t size, size_t extra, size_t alignment, bool zero,
alignment, zero, try_tcache_alloc,
try_tcache_dalloc));
} else {
return (huge_ralloc(ptr, oldsize, size, extra,
return (huge_ralloc(arena, ptr, oldsize, size, extra,
alignment, zero, try_tcache_dalloc, huge_dss_prec_get(arena)));
}
}