Fix run leak.

Fix arena_run_first_best_fit() to search all potentially non-empty
runs_avail heaps, rather than ignoring the heap that contains runs
larger than large_maxclass, but less than chunksize.

This fixes a regression caused by
f193fd80cf (Refactor runs_avail.).

This resolves #493.
This commit is contained in:
Jason Evans 2016-11-07 09:37:12 -08:00
parent 9bef119b42
commit a4e83e8593
2 changed files with 8 additions and 6 deletions

View File

@ -150,6 +150,8 @@ arena_avail_insert(arena_t *arena, arena_chunk_t *chunk, size_t pageind,
arena_miscelm_get_const(chunk, pageind)))); arena_miscelm_get_const(chunk, pageind))));
assert(npages == (arena_mapbits_unallocated_size_get(chunk, pageind) >> assert(npages == (arena_mapbits_unallocated_size_get(chunk, pageind) >>
LG_PAGE)); LG_PAGE));
assert((npages << LG_PAGE) < chunksize);
assert(pind2sz(pind) <= chunksize);
arena_run_heap_insert(&arena->runs_avail[pind], arena_run_heap_insert(&arena->runs_avail[pind],
arena_miscelm_get_mutable(chunk, pageind)); arena_miscelm_get_mutable(chunk, pageind));
} }
@ -162,6 +164,8 @@ arena_avail_remove(arena_t *arena, arena_chunk_t *chunk, size_t pageind,
arena_miscelm_get_const(chunk, pageind)))); arena_miscelm_get_const(chunk, pageind))));
assert(npages == (arena_mapbits_unallocated_size_get(chunk, pageind) >> assert(npages == (arena_mapbits_unallocated_size_get(chunk, pageind) >>
LG_PAGE)); LG_PAGE));
assert((npages << LG_PAGE) < chunksize);
assert(pind2sz(pind) <= chunksize);
arena_run_heap_remove(&arena->runs_avail[pind], arena_run_heap_remove(&arena->runs_avail[pind],
arena_miscelm_get_mutable(chunk, pageind)); arena_miscelm_get_mutable(chunk, pageind));
} }
@ -1046,7 +1050,7 @@ arena_run_first_best_fit(arena_t *arena, size_t size)
pind = psz2ind(run_quantize_ceil(size)); pind = psz2ind(run_quantize_ceil(size));
for (i = pind; pind2sz(i) <= large_maxclass; i++) { for (i = pind; pind2sz(i) <= chunksize; i++) {
arena_chunk_map_misc_t *miscelm = arena_run_heap_first( arena_chunk_map_misc_t *miscelm = arena_run_heap_first(
&arena->runs_avail[i]); &arena->runs_avail[i]);
if (miscelm != NULL) if (miscelm != NULL)
@ -1922,8 +1926,7 @@ arena_reset(tsd_t *tsd, arena_t *arena)
assert(!arena->purging); assert(!arena->purging);
arena->nactive = 0; arena->nactive = 0;
for (i = 0; i < sizeof(arena->runs_avail) / sizeof(arena_run_heap_t); for (i = 0; i < NPSIZES; i++)
i++)
arena_run_heap_new(&arena->runs_avail[i]); arena_run_heap_new(&arena->runs_avail[i]);
malloc_mutex_unlock(tsd_tsdn(tsd), &arena->lock); malloc_mutex_unlock(tsd_tsdn(tsd), &arena->lock);
@ -3514,8 +3517,7 @@ arena_new(tsdn_t *tsdn, unsigned ind)
arena->nactive = 0; arena->nactive = 0;
arena->ndirty = 0; arena->ndirty = 0;
for (i = 0; i < sizeof(arena->runs_avail) / sizeof(arena_run_heap_t); for (i = 0; i < NPSIZES; i++)
i++)
arena_run_heap_new(&arena->runs_avail[i]); arena_run_heap_new(&arena->runs_avail[i]);
qr_new(&arena->runs_dirty, rd_link); qr_new(&arena->runs_dirty, rd_link);

View File

@ -111,7 +111,7 @@ TEST_BEGIN(test_monotonic)
floor_prev = 0; floor_prev = 0;
ceil_prev = 0; ceil_prev = 0;
for (i = 1; i <= large_maxclass >> LG_PAGE; i++) { for (i = 1; i <= chunksize >> LG_PAGE; i++) {
size_t run_size, floor, ceil; size_t run_size, floor, ceil;
run_size = i << LG_PAGE; run_size = i << LG_PAGE;