Allow chunks to not be naturally aligned.

Precisely size extents for huge size classes that aren't multiples of
chunksize.
This commit is contained in:
Jason Evans
2016-05-26 22:12:38 -07:00
parent 741967e79d
commit 4731cd47f7
11 changed files with 105 additions and 268 deletions

View File

@@ -486,8 +486,6 @@ void arena_chunk_cache_maybe_remove(arena_t *arena, extent_t *extent,
extent_t *arena_chunk_alloc_huge(tsdn_t *tsdn, arena_t *arena,
size_t usize, size_t alignment, bool *zero);
void arena_chunk_dalloc_huge(tsdn_t *tsdn, arena_t *arena, extent_t *extent);
void arena_chunk_ralloc_huge_similar(tsdn_t *tsdn, arena_t *arena,
extent_t *extent, size_t oldsize);
void arena_chunk_ralloc_huge_shrink(tsdn_t *tsdn, arena_t *arena,
extent_t *extent, size_t oldsize);
void arena_chunk_ralloc_huge_expand(tsdn_t *tsdn, arena_t *arena,

View File

@@ -90,6 +90,8 @@ ph_proto(, extent_heap_, extent_heap_t, extent_t)
arena_t *extent_arena_get(const extent_t *extent);
void *extent_addr_get(const extent_t *extent);
size_t extent_size_get(const extent_t *extent);
void *extent_before_get(const extent_t *extent);
void *extent_last_get(const extent_t *extent);
void *extent_past_get(const extent_t *extent);
bool extent_active_get(const extent_t *extent);
bool extent_dirty_get(const extent_t *extent);
@@ -137,6 +139,20 @@ extent_size_get(const extent_t *extent)
return (extent->e_size);
}
JEMALLOC_INLINE void *
extent_before_get(const extent_t *extent)
{
return ((void *)(uintptr_t)extent->e_addr - PAGE);
}
JEMALLOC_INLINE void *
extent_last_get(const extent_t *extent)
{
return ((void *)(uintptr_t)extent->e_addr + extent->e_size - PAGE);
}
JEMALLOC_INLINE void *
extent_past_get(const extent_t *extent)
{

View File

@@ -797,14 +797,14 @@ sa2u(size_t size, size_t alignment)
return (usize);
}
/* Try for a large size class. */
if (likely(size <= large_maxclass) && likely(alignment < chunksize)) {
/*
* We can't achieve subpage alignment, so round up alignment
* to the minimum that can actually be supported.
*/
alignment = PAGE_CEILING(alignment);
/*
* We can't achieve subpage alignment, so round up alignment to the
* minimum that can actually be supported.
*/
alignment = PAGE_CEILING(alignment);
/* Try for a large size class. */
if (likely(size <= large_maxclass) && likely(alignment == PAGE)) {
/* Make sure result is a large size class. */
usize = (size <= LARGE_MINCLASS) ? LARGE_MINCLASS : s2u(size);
@@ -821,12 +821,6 @@ sa2u(size_t size, size_t alignment)
if (unlikely(alignment > HUGE_MAXCLASS))
return (0);
/*
* We can't achieve subchunk alignment, so round up alignment to the
* minimum that can actually be supported.
*/
alignment = CHUNK_CEILING(alignment);
/* Make sure result is a huge size class. */
if (size <= chunksize)
usize = chunksize;
@@ -839,7 +833,7 @@ sa2u(size_t size, size_t alignment)
}
/*
* Calculate the multi-chunk mapping that huge_palloc() would need in
* Calculate the multi-page mapping that huge_palloc() would need in
* order to guarantee the alignment.
*/
if (usize + alignment < usize) {

View File

@@ -19,7 +19,6 @@ arena_chunk_cache_maybe_remove
arena_chunk_dalloc_huge
arena_chunk_ralloc_huge_expand
arena_chunk_ralloc_huge_shrink
arena_chunk_ralloc_huge_similar
arena_cleanup
arena_dalloc
arena_dalloc_bin
@@ -211,6 +210,7 @@ extent_addr_set
extent_alloc
extent_arena_get
extent_arena_set
extent_before_get
extent_committed_get
extent_committed_set
extent_dalloc
@@ -219,6 +219,7 @@ extent_dirty_insert
extent_dirty_remove
extent_dirty_set
extent_init
extent_last_get
extent_past_get
extent_prof_tctx_get
extent_prof_tctx_set