diff --git a/include/jemalloc/internal/arena.h b/include/jemalloc/internal/arena.h index 42086ca1..50b296e4 100644 --- a/include/jemalloc/internal/arena.h +++ b/include/jemalloc/internal/arena.h @@ -312,7 +312,7 @@ struct arena_s { /* * Size/address-ordered tree of this arena's available runs. The tree - * is used for first-best-fit run allocation. + * is used for first-fit run allocation. */ arena_avail_tree_t runs_avail; diff --git a/src/arena.c b/src/arena.c index 34329a62..6f4197b2 100644 --- a/src/arena.c +++ b/src/arena.c @@ -907,23 +907,55 @@ arena_chunk_ralloc_huge_expand(arena_t *arena, void *chunk, size_t oldsize, return (err); } +/* Do first-fit run selection. */ +static arena_run_t * +arena_run_first_fit(arena_t *arena, size_t size) +{ + arena_run_t *run; + index_t index, max_index; + + assert(size == s2u(size)); + assert(size == PAGE_CEILING(size)); + + /* + * Iterate over all size classes that are at least large enough to + * satisfy the request, search for the lowest run of each size class, + * and choose the lowest of the runs found. + */ + run = NULL; + for (index = size2index(size), max_index = size2index(arena_maxclass); + index <= max_index;) { + arena_run_t *currun; + arena_chunk_t *currun_chunk; + size_t currun_pageind, currun_size; + size_t usize = PAGE_CEILING(index2size(index)); + arena_chunk_map_misc_t *key = (arena_chunk_map_misc_t *)(usize | + CHUNK_MAP_KEY); + arena_chunk_map_misc_t *miscelm = + arena_avail_tree_nsearch(&arena->runs_avail, key); + if (miscelm == NULL) + break; + currun = &miscelm->run; + if (run == NULL || (uintptr_t)currun < (uintptr_t)run) + run = currun; + currun_chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(currun); + currun_pageind = arena_miscelm_to_pageind(miscelm); + currun_size = arena_mapbits_unallocated_size_get(currun_chunk, + currun_pageind); + assert(size2index(currun_size) + 1 > index); + index = size2index(currun_size) + 1; + } + + return (run); +} + static arena_run_t * arena_run_alloc_large_helper(arena_t *arena, size_t size, bool zero) { - arena_chunk_map_misc_t *miscelm; - arena_chunk_map_misc_t *key; - size_t usize; - - usize = s2u(size); - key = (arena_chunk_map_misc_t *)(usize | CHUNK_MAP_KEY); - miscelm = arena_avail_tree_nsearch(&arena->runs_avail, key); - if (miscelm != NULL) { - arena_run_t *run = &miscelm->run; - arena_run_split_large(arena, &miscelm->run, size, zero); - return (run); - } - - return (NULL); + arena_run_t *run = arena_run_first_fit(arena, s2u(size)); + if (run != NULL) + arena_run_split_large(arena, run, size, zero); + return (run); } static arena_run_t * @@ -961,20 +993,10 @@ arena_run_alloc_large(arena_t *arena, size_t size, bool zero) static arena_run_t * arena_run_alloc_small_helper(arena_t *arena, size_t size, index_t binind) { - arena_run_t *run; - arena_chunk_map_misc_t *miscelm; - arena_chunk_map_misc_t *key; - - assert(size == s2u(size)); - key = (arena_chunk_map_misc_t *)(PAGE_CEILING(size) | CHUNK_MAP_KEY); - miscelm = arena_avail_tree_nsearch(&arena->runs_avail, key); - if (miscelm != NULL) { - run = &miscelm->run; + arena_run_t *run = arena_run_first_fit(arena, PAGE_CEILING(size)); + if (run != NULL) arena_run_split_small(arena, run, size, binind); - return (run); - } - - return (NULL); + return (run); } static arena_run_t * diff --git a/src/chunk.c b/src/chunk.c index 972fecde..875fa4cc 100644 --- a/src/chunk.c +++ b/src/chunk.c @@ -62,6 +62,39 @@ chunk_deregister(const void *chunk, const extent_node_t *node) } } +/* Do first-fit chunk selection. */ +static extent_node_t * +chunk_first_fit(arena_t *arena, extent_tree_t *chunks_szad, size_t size) +{ + extent_node_t *node; + index_t index; + + assert(size == CHUNK_CEILING(size)); + + /* + * Iterate over all size classes that are at least large enough to + * satisfy the request, search for the lowest chunk of each size class, + * and choose the lowest of the chunks found. + */ + node = NULL; + for (index = size2index(size); index < NSIZES;) { + extent_node_t *curnode; + extent_node_t key; + extent_node_init(&key, arena, NULL, + CHUNK_CEILING(index2size(index)), false); + curnode = extent_tree_szad_nsearch(chunks_szad, &key); + if (curnode == NULL) + break; + if (node == NULL || (uintptr_t)extent_node_addr_get(curnode) < + (uintptr_t)extent_node_addr_get(node)) + node = curnode; + assert(size2index(extent_node_size_get(curnode)) + 1 > index); + index = size2index(extent_node_size_get(curnode)) + 1; + } + + return (node); +} + static void * chunk_recycle(arena_t *arena, extent_tree_t *chunks_szad, extent_tree_t *chunks_ad, bool cache, void *new_addr, size_t size, @@ -69,7 +102,6 @@ chunk_recycle(arena_t *arena, extent_tree_t *chunks_szad, { void *ret; extent_node_t *node; - extent_node_t key; size_t alloc_size, leadsize, trailsize; bool zeroed; @@ -80,10 +112,13 @@ chunk_recycle(arena_t *arena, extent_tree_t *chunks_szad, /* Beware size_t wrap-around. */ if (alloc_size < size) return (NULL); - extent_node_init(&key, arena, new_addr, alloc_size, false); malloc_mutex_lock(&arena->chunks_mtx); - node = (new_addr != NULL) ? extent_tree_ad_search(chunks_ad, &key) : - extent_tree_szad_nsearch(chunks_szad, &key); + if (new_addr != NULL || size == chunksize) { + extent_node_t key; + extent_node_init(&key, arena, new_addr, alloc_size, false); + node = extent_tree_ad_search(chunks_ad, &key); + } else + node = chunk_first_fit(arena, chunks_szad, alloc_size); if (node == NULL || (new_addr != NULL && extent_node_size_get(node) < size)) { malloc_mutex_unlock(&arena->chunks_mtx);