From 004ed142a66529ecf4a55e8f4fa42ff2e535f586 Mon Sep 17 00:00:00 2001 From: Jason Evans Date: Thu, 14 Oct 2010 00:28:31 -0700 Subject: [PATCH] Fix a regression in CHUNK_MAP_UNZEROED change. Fix a regression added by revision: 3377ffa1f4f8e67bce1e36624285e5baf5f9ecef Change CHUNK_MAP_ZEROED to CHUNK_MAP_UNZEROED. A modified chunk->map dereference was missing the subtraction of map_bias, which caused incorrect chunk map initialization, as well as potential corruption of the first non-header page of memory within each chunk. --- jemalloc/src/arena.c | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/jemalloc/src/arena.c b/jemalloc/src/arena.c index 45e5fd19..7c99d436 100644 --- a/jemalloc/src/arena.c +++ b/jemalloc/src/arena.c @@ -395,7 +395,7 @@ arena_run_split(arena_t *arena, arena_run_t *run, size_t size, bool large, chunk->map[run_ind+i-map_bias].bits = (i << PAGE_SHIFT) | CHUNK_MAP_ALLOCATED; } - chunk->map[run_ind + need_pages-1-map_bias].bits = ((need_pages + chunk->map[run_ind+need_pages-1-map_bias].bits = ((need_pages - 1) << PAGE_SHIFT) | CHUNK_MAP_ALLOCATED | flag_dirty; } } @@ -457,7 +457,8 @@ arena_chunk_alloc(arena_t *arena) for (i = map_bias+1; i < chunk_npages-1; i++) chunk->map[i-map_bias].bits = unzeroed; } - chunk->map[chunk_npages-1].bits = arena_maxclass | unzeroed; + chunk->map[chunk_npages-1-map_bias].bits = arena_maxclass | + unzeroed; /* Insert the run into the runs_avail_clean tree. */ arena_avail_tree_insert(&arena->runs_avail_clean,