From 12ca91402bc5d5c5a1cca495957463bb8e71fdcf Mon Sep 17 00:00:00 2001 From: Jason Evans Date: Sun, 17 Oct 2010 19:56:09 -0700 Subject: [PATCH] Add assertions to run coalescing. Assert that the chunk map bits at the ends of the runs that participate in coalescing are self-consistent. --- jemalloc/src/arena.c | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/jemalloc/src/arena.c b/jemalloc/src/arena.c index 36957d97..6a84737e 100644 --- a/jemalloc/src/arena.c +++ b/jemalloc/src/arena.c @@ -934,19 +934,24 @@ arena_run_dalloc(arena_t *arena, arena_run_t *run, bool dirty) CHUNK_MAP_DIRTY) == flag_dirty) { size_t nrun_size = chunk->map[run_ind+run_pages-map_bias].bits & ~PAGE_MASK; + size_t nrun_pages = nrun_size >> PAGE_SHIFT; /* * Remove successor from runs_avail; the coalesced run is * inserted later. */ + assert((chunk->map[run_ind+run_pages+nrun_pages-1-map_bias].bits + & ~PAGE_MASK) == nrun_size); + assert((chunk->map[run_ind+run_pages+nrun_pages-1-map_bias].bits + & CHUNK_MAP_ALLOCATED) == 0); + assert((chunk->map[run_ind+run_pages+nrun_pages-1-map_bias].bits + & CHUNK_MAP_DIRTY) == flag_dirty); arena_avail_tree_remove(runs_avail, &chunk->map[run_ind+run_pages-map_bias]); size += nrun_size; - run_pages = size >> PAGE_SHIFT; + run_pages += nrun_pages; - assert((chunk->map[run_ind+run_pages-1-map_bias].bits & - ~PAGE_MASK) == nrun_size); chunk->map[run_ind-map_bias].bits = size | (chunk->map[run_ind-map_bias].bits & CHUNK_MAP_FLAGS_MASK); chunk->map[run_ind+run_pages-1-map_bias].bits = size | @@ -960,21 +965,26 @@ arena_run_dalloc(arena_t *arena, arena_run_t *run, bool dirty) CHUNK_MAP_DIRTY) == flag_dirty) { size_t prun_size = chunk->map[run_ind-1-map_bias].bits & ~PAGE_MASK; + size_t prun_pages = prun_size >> PAGE_SHIFT; - run_ind -= prun_size >> PAGE_SHIFT; + run_ind -= prun_pages; /* * Remove predecessor from runs_avail; the coalesced run is * inserted later. */ + assert((chunk->map[run_ind-map_bias].bits & ~PAGE_MASK) + == prun_size); + assert((chunk->map[run_ind-map_bias].bits & CHUNK_MAP_ALLOCATED) + == 0); + assert((chunk->map[run_ind-map_bias].bits & CHUNK_MAP_DIRTY) + == flag_dirty); arena_avail_tree_remove(runs_avail, &chunk->map[run_ind-map_bias]); size += prun_size; - run_pages = size >> PAGE_SHIFT; + run_pages += prun_pages; - assert((chunk->map[run_ind-map_bias].bits & ~PAGE_MASK) == - prun_size); chunk->map[run_ind-map_bias].bits = size | (chunk->map[run_ind-map_bias].bits & CHUNK_MAP_FLAGS_MASK); chunk->map[run_ind+run_pages-1-map_bias].bits = size |