From 04ca7580dbc409915de05cb1cee12a369e898590 Mon Sep 17 00:00:00 2001 From: Jason Evans Date: Fri, 6 Mar 2015 23:25:13 -0800 Subject: [PATCH] Fix a chunk_recycle() regression. This regression was introduced by 97c04a93838c4001688fe31bf018972b4696efe2 (Use first-fit rather than first-best-fit run/chunk allocation.). --- src/chunk.c | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/src/chunk.c b/src/chunk.c index 875fa4cc..fb8cd413 100644 --- a/src/chunk.c +++ b/src/chunk.c @@ -64,13 +64,22 @@ chunk_deregister(const void *chunk, const extent_node_t *node) /* Do first-fit chunk selection. */ static extent_node_t * -chunk_first_fit(arena_t *arena, extent_tree_t *chunks_szad, size_t size) +chunk_first_fit(arena_t *arena, extent_tree_t *chunks_szad, + extent_tree_t *chunks_ad, size_t size) { extent_node_t *node; index_t index; assert(size == CHUNK_CEILING(size)); + if (size == chunksize) { + /* + * Any chunk will suffice, so simply select the one lowest in + * memory. + */ + return (extent_tree_ad_first(chunks_ad)); + } + /* * Iterate over all size classes that are at least large enough to * satisfy the request, search for the lowest chunk of each size class, @@ -113,12 +122,14 @@ chunk_recycle(arena_t *arena, extent_tree_t *chunks_szad, if (alloc_size < size) return (NULL); malloc_mutex_lock(&arena->chunks_mtx); - if (new_addr != NULL || size == chunksize) { + if (new_addr != NULL) { extent_node_t key; extent_node_init(&key, arena, new_addr, alloc_size, false); node = extent_tree_ad_search(chunks_ad, &key); - } else - node = chunk_first_fit(arena, chunks_szad, alloc_size); + } else { + node = chunk_first_fit(arena, chunks_szad, chunks_ad, + alloc_size); + } if (node == NULL || (new_addr != NULL && extent_node_size_get(node) < size)) { malloc_mutex_unlock(&arena->chunks_mtx);