Remove mmap_unaligned.

Remove mmap_unaligned, which was used to heuristically decide whether to
optimistically call mmap() in such a way that could reduce the total
number of system calls.  If I remember correctly, the intention of
mmap_unaligned was to avoid always executing the slow path in the
presence of ASLR.  However, that reasoning seems to have been based on a
flawed understanding of how ASLR actually works.  Although ASLR
apparently causes mmap() to ignore address requests, it does not cause
total placement randomness, so there is a reasonable expectation that
iterative mmap() calls will start returning chunk-aligned mappings once
the first chunk has been properly aligned.
This commit is contained in:
Jason Evans
2012-04-21 19:17:21 -07:00
parent 7ad54c1c30
commit a8f8d7540d
6 changed files with 29 additions and 101 deletions

View File

@@ -44,8 +44,7 @@ extern size_t arena_maxclass; /* Max size class for arenas. */
void *chunk_alloc(size_t size, size_t alignment, bool base, bool *zero);
void chunk_dealloc(void *chunk, size_t size, bool unmap);
bool chunk_boot0(void);
bool chunk_boot1(void);
bool chunk_boot(void);
#endif /* JEMALLOC_H_EXTERNS */
/******************************************************************************/

View File

@@ -14,8 +14,6 @@ void pages_purge(void *addr, size_t length);
void *chunk_alloc_mmap(size_t size, size_t alignment, bool *zero);
bool chunk_dealloc_mmap(void *chunk, size_t size);
bool chunk_mmap_boot(void);
#endif /* JEMALLOC_H_EXTERNS */
/******************************************************************************/
#ifdef JEMALLOC_H_INLINES

View File

@@ -74,8 +74,7 @@
#define chunk_alloc JEMALLOC_N(chunk_alloc)
#define chunk_alloc_dss JEMALLOC_N(chunk_alloc_dss)
#define chunk_alloc_mmap JEMALLOC_N(chunk_alloc_mmap)
#define chunk_boot0 JEMALLOC_N(chunk_boot0)
#define chunk_boot1 JEMALLOC_N(chunk_boot1)
#define chunk_boot JEMALLOC_N(chunk_boot)
#define chunk_dealloc JEMALLOC_N(chunk_dealloc)
#define chunk_dealloc_mmap JEMALLOC_N(chunk_dealloc_mmap)
#define chunk_dss_boot JEMALLOC_N(chunk_dss_boot)
@@ -83,7 +82,6 @@
#define chunk_dss_postfork_parent JEMALLOC_N(chunk_dss_postfork_parent)
#define chunk_dss_prefork JEMALLOC_N(chunk_dss_prefork)
#define chunk_in_dss JEMALLOC_N(chunk_in_dss)
#define chunk_mmap_boot JEMALLOC_N(chunk_mmap_boot)
#define chunk_npages JEMALLOC_N(chunk_npages)
#define chunks_mtx JEMALLOC_N(chunks_mtx)
#define chunks_rtree JEMALLOC_N(chunks_rtree)
@@ -192,10 +190,6 @@
#define malloc_write JEMALLOC_N(malloc_write)
#define map_bias JEMALLOC_N(map_bias)
#define mb_write JEMALLOC_N(mb_write)
#define mmap_unaligned_tsd_boot JEMALLOC_N(mmap_unaligned_tsd_boot)
#define mmap_unaligned_tsd_cleanup_wrapper JEMALLOC_N(mmap_unaligned_tsd_cleanup_wrapper)
#define mmap_unaligned_tsd_get JEMALLOC_N(mmap_unaligned_tsd_get)
#define mmap_unaligned_tsd_set JEMALLOC_N(mmap_unaligned_tsd_set)
#define mutex_boot JEMALLOC_N(mutex_boot)
#define narenas JEMALLOC_N(narenas)
#define ncpus JEMALLOC_N(ncpus)