Rename index_t to szind_t to avoid an existing type on Solaris.
This resolves #256.
This commit is contained in:
parent
5ef33a9f2b
commit
d01fd19755
@ -9,6 +9,7 @@ brevity. Much more detail can be found in the git revision history:
|
|||||||
Bug fixes:
|
Bug fixes:
|
||||||
- Don't bitshift by negative amounts when encoding/decoding run sizes in chunk
|
- Don't bitshift by negative amounts when encoding/decoding run sizes in chunk
|
||||||
header maps. This affected systems with page sizes greater than 8 KiB.
|
header maps. This affected systems with page sizes greater than 8 KiB.
|
||||||
|
- Rename index_t to szind_t to avoid an existing type on Solaris.
|
||||||
|
|
||||||
* 4.0.0 (August 17, 2015)
|
* 4.0.0 (August 17, 2015)
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ typedef struct arena_s arena_t;
|
|||||||
#ifdef JEMALLOC_ARENA_STRUCTS_A
|
#ifdef JEMALLOC_ARENA_STRUCTS_A
|
||||||
struct arena_run_s {
|
struct arena_run_s {
|
||||||
/* Index of bin this run is associated with. */
|
/* Index of bin this run is associated with. */
|
||||||
index_t binind;
|
szind_t binind;
|
||||||
|
|
||||||
/* Number of free regions in run. */
|
/* Number of free regions in run. */
|
||||||
unsigned nfree;
|
unsigned nfree;
|
||||||
@ -448,7 +448,7 @@ bool arena_lg_dirty_mult_set(arena_t *arena, ssize_t lg_dirty_mult);
|
|||||||
void arena_maybe_purge(arena_t *arena);
|
void arena_maybe_purge(arena_t *arena);
|
||||||
void arena_purge_all(arena_t *arena);
|
void arena_purge_all(arena_t *arena);
|
||||||
void arena_tcache_fill_small(arena_t *arena, tcache_bin_t *tbin,
|
void arena_tcache_fill_small(arena_t *arena, tcache_bin_t *tbin,
|
||||||
index_t binind, uint64_t prof_accumbytes);
|
szind_t binind, uint64_t prof_accumbytes);
|
||||||
void arena_alloc_junk_small(void *ptr, arena_bin_info_t *bin_info,
|
void arena_alloc_junk_small(void *ptr, arena_bin_info_t *bin_info,
|
||||||
bool zero);
|
bool zero);
|
||||||
#ifdef JEMALLOC_JET
|
#ifdef JEMALLOC_JET
|
||||||
@ -524,7 +524,7 @@ size_t arena_mapbits_unallocated_size_get(arena_chunk_t *chunk,
|
|||||||
size_t pageind);
|
size_t pageind);
|
||||||
size_t arena_mapbits_large_size_get(arena_chunk_t *chunk, size_t pageind);
|
size_t arena_mapbits_large_size_get(arena_chunk_t *chunk, size_t pageind);
|
||||||
size_t arena_mapbits_small_runind_get(arena_chunk_t *chunk, size_t pageind);
|
size_t arena_mapbits_small_runind_get(arena_chunk_t *chunk, size_t pageind);
|
||||||
index_t arena_mapbits_binind_get(arena_chunk_t *chunk, size_t pageind);
|
szind_t arena_mapbits_binind_get(arena_chunk_t *chunk, size_t pageind);
|
||||||
size_t arena_mapbits_dirty_get(arena_chunk_t *chunk, size_t pageind);
|
size_t arena_mapbits_dirty_get(arena_chunk_t *chunk, size_t pageind);
|
||||||
size_t arena_mapbits_unzeroed_get(arena_chunk_t *chunk, size_t pageind);
|
size_t arena_mapbits_unzeroed_get(arena_chunk_t *chunk, size_t pageind);
|
||||||
size_t arena_mapbits_decommitted_get(arena_chunk_t *chunk, size_t pageind);
|
size_t arena_mapbits_decommitted_get(arena_chunk_t *chunk, size_t pageind);
|
||||||
@ -541,17 +541,17 @@ void arena_mapbits_internal_set(arena_chunk_t *chunk, size_t pageind,
|
|||||||
void arena_mapbits_large_set(arena_chunk_t *chunk, size_t pageind,
|
void arena_mapbits_large_set(arena_chunk_t *chunk, size_t pageind,
|
||||||
size_t size, size_t flags);
|
size_t size, size_t flags);
|
||||||
void arena_mapbits_large_binind_set(arena_chunk_t *chunk, size_t pageind,
|
void arena_mapbits_large_binind_set(arena_chunk_t *chunk, size_t pageind,
|
||||||
index_t binind);
|
szind_t binind);
|
||||||
void arena_mapbits_small_set(arena_chunk_t *chunk, size_t pageind,
|
void arena_mapbits_small_set(arena_chunk_t *chunk, size_t pageind,
|
||||||
size_t runind, index_t binind, size_t flags);
|
size_t runind, szind_t binind, size_t flags);
|
||||||
void arena_metadata_allocated_add(arena_t *arena, size_t size);
|
void arena_metadata_allocated_add(arena_t *arena, size_t size);
|
||||||
void arena_metadata_allocated_sub(arena_t *arena, size_t size);
|
void arena_metadata_allocated_sub(arena_t *arena, size_t size);
|
||||||
size_t arena_metadata_allocated_get(arena_t *arena);
|
size_t arena_metadata_allocated_get(arena_t *arena);
|
||||||
bool arena_prof_accum_impl(arena_t *arena, uint64_t accumbytes);
|
bool arena_prof_accum_impl(arena_t *arena, uint64_t accumbytes);
|
||||||
bool arena_prof_accum_locked(arena_t *arena, uint64_t accumbytes);
|
bool arena_prof_accum_locked(arena_t *arena, uint64_t accumbytes);
|
||||||
bool arena_prof_accum(arena_t *arena, uint64_t accumbytes);
|
bool arena_prof_accum(arena_t *arena, uint64_t accumbytes);
|
||||||
index_t arena_ptr_small_binind_get(const void *ptr, size_t mapbits);
|
szind_t arena_ptr_small_binind_get(const void *ptr, size_t mapbits);
|
||||||
index_t arena_bin_index(arena_t *arena, arena_bin_t *bin);
|
szind_t arena_bin_index(arena_t *arena, arena_bin_t *bin);
|
||||||
unsigned arena_run_regind(arena_run_t *run, arena_bin_info_t *bin_info,
|
unsigned arena_run_regind(arena_run_t *run, arena_bin_info_t *bin_info,
|
||||||
const void *ptr);
|
const void *ptr);
|
||||||
prof_tctx_t *arena_prof_tctx_get(const void *ptr);
|
prof_tctx_t *arena_prof_tctx_get(const void *ptr);
|
||||||
@ -701,11 +701,11 @@ arena_mapbits_small_runind_get(arena_chunk_t *chunk, size_t pageind)
|
|||||||
return (mapbits >> CHUNK_MAP_RUNIND_SHIFT);
|
return (mapbits >> CHUNK_MAP_RUNIND_SHIFT);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE index_t
|
JEMALLOC_ALWAYS_INLINE szind_t
|
||||||
arena_mapbits_binind_get(arena_chunk_t *chunk, size_t pageind)
|
arena_mapbits_binind_get(arena_chunk_t *chunk, size_t pageind)
|
||||||
{
|
{
|
||||||
size_t mapbits;
|
size_t mapbits;
|
||||||
index_t binind;
|
szind_t binind;
|
||||||
|
|
||||||
mapbits = arena_mapbits_get(chunk, pageind);
|
mapbits = arena_mapbits_get(chunk, pageind);
|
||||||
binind = (mapbits & CHUNK_MAP_BININD_MASK) >> CHUNK_MAP_BININD_SHIFT;
|
binind = (mapbits & CHUNK_MAP_BININD_MASK) >> CHUNK_MAP_BININD_SHIFT;
|
||||||
@ -840,7 +840,7 @@ arena_mapbits_large_set(arena_chunk_t *chunk, size_t pageind, size_t size,
|
|||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_mapbits_large_binind_set(arena_chunk_t *chunk, size_t pageind,
|
arena_mapbits_large_binind_set(arena_chunk_t *chunk, size_t pageind,
|
||||||
index_t binind)
|
szind_t binind)
|
||||||
{
|
{
|
||||||
size_t *mapbitsp = arena_mapbitsp_get(chunk, pageind);
|
size_t *mapbitsp = arena_mapbitsp_get(chunk, pageind);
|
||||||
size_t mapbits = arena_mapbitsp_read(mapbitsp);
|
size_t mapbits = arena_mapbitsp_read(mapbitsp);
|
||||||
@ -854,7 +854,7 @@ arena_mapbits_large_binind_set(arena_chunk_t *chunk, size_t pageind,
|
|||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_mapbits_small_set(arena_chunk_t *chunk, size_t pageind, size_t runind,
|
arena_mapbits_small_set(arena_chunk_t *chunk, size_t pageind, size_t runind,
|
||||||
index_t binind, size_t flags)
|
szind_t binind, size_t flags)
|
||||||
{
|
{
|
||||||
size_t *mapbitsp = arena_mapbitsp_get(chunk, pageind);
|
size_t *mapbitsp = arena_mapbitsp_get(chunk, pageind);
|
||||||
|
|
||||||
@ -931,10 +931,10 @@ arena_prof_accum(arena_t *arena, uint64_t accumbytes)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE index_t
|
JEMALLOC_ALWAYS_INLINE szind_t
|
||||||
arena_ptr_small_binind_get(const void *ptr, size_t mapbits)
|
arena_ptr_small_binind_get(const void *ptr, size_t mapbits)
|
||||||
{
|
{
|
||||||
index_t binind;
|
szind_t binind;
|
||||||
|
|
||||||
binind = (mapbits & CHUNK_MAP_BININD_MASK) >> CHUNK_MAP_BININD_SHIFT;
|
binind = (mapbits & CHUNK_MAP_BININD_MASK) >> CHUNK_MAP_BININD_SHIFT;
|
||||||
|
|
||||||
@ -946,7 +946,7 @@ arena_ptr_small_binind_get(const void *ptr, size_t mapbits)
|
|||||||
size_t rpages_ind;
|
size_t rpages_ind;
|
||||||
arena_run_t *run;
|
arena_run_t *run;
|
||||||
arena_bin_t *bin;
|
arena_bin_t *bin;
|
||||||
index_t run_binind, actual_binind;
|
szind_t run_binind, actual_binind;
|
||||||
arena_bin_info_t *bin_info;
|
arena_bin_info_t *bin_info;
|
||||||
arena_chunk_map_misc_t *miscelm;
|
arena_chunk_map_misc_t *miscelm;
|
||||||
void *rpages;
|
void *rpages;
|
||||||
@ -980,10 +980,10 @@ arena_ptr_small_binind_get(const void *ptr, size_t mapbits)
|
|||||||
# endif /* JEMALLOC_ARENA_INLINE_A */
|
# endif /* JEMALLOC_ARENA_INLINE_A */
|
||||||
|
|
||||||
# ifdef JEMALLOC_ARENA_INLINE_B
|
# ifdef JEMALLOC_ARENA_INLINE_B
|
||||||
JEMALLOC_INLINE index_t
|
JEMALLOC_INLINE szind_t
|
||||||
arena_bin_index(arena_t *arena, arena_bin_t *bin)
|
arena_bin_index(arena_t *arena, arena_bin_t *bin)
|
||||||
{
|
{
|
||||||
index_t binind = bin - arena->bins;
|
szind_t binind = bin - arena->bins;
|
||||||
assert(binind < NBINS);
|
assert(binind < NBINS);
|
||||||
return (binind);
|
return (binind);
|
||||||
}
|
}
|
||||||
@ -1161,7 +1161,7 @@ arena_salloc(const void *ptr, bool demote)
|
|||||||
size_t ret;
|
size_t ret;
|
||||||
arena_chunk_t *chunk;
|
arena_chunk_t *chunk;
|
||||||
size_t pageind;
|
size_t pageind;
|
||||||
index_t binind;
|
szind_t binind;
|
||||||
|
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
|
|
||||||
@ -1220,7 +1220,7 @@ arena_dalloc(tsd_t *tsd, void *ptr, tcache_t *tcache)
|
|||||||
if (likely((mapbits & CHUNK_MAP_LARGE) == 0)) {
|
if (likely((mapbits & CHUNK_MAP_LARGE) == 0)) {
|
||||||
/* Small allocation. */
|
/* Small allocation. */
|
||||||
if (likely(tcache != NULL)) {
|
if (likely(tcache != NULL)) {
|
||||||
index_t binind = arena_ptr_small_binind_get(ptr,
|
szind_t binind = arena_ptr_small_binind_get(ptr,
|
||||||
mapbits);
|
mapbits);
|
||||||
tcache_dalloc_small(tsd, tcache, ptr, binind);
|
tcache_dalloc_small(tsd, tcache, ptr, binind);
|
||||||
} else {
|
} else {
|
||||||
@ -1272,7 +1272,7 @@ arena_sdalloc(tsd_t *tsd, void *ptr, size_t size, tcache_t *tcache)
|
|||||||
if (likely(size <= SMALL_MAXCLASS)) {
|
if (likely(size <= SMALL_MAXCLASS)) {
|
||||||
/* Small allocation. */
|
/* Small allocation. */
|
||||||
if (likely(tcache != NULL)) {
|
if (likely(tcache != NULL)) {
|
||||||
index_t binind = size2index(size);
|
szind_t binind = size2index(size);
|
||||||
tcache_dalloc_small(tsd, tcache, ptr, binind);
|
tcache_dalloc_small(tsd, tcache, ptr, binind);
|
||||||
} else {
|
} else {
|
||||||
size_t pageind = ((uintptr_t)ptr -
|
size_t pageind = ((uintptr_t)ptr -
|
||||||
|
@ -184,7 +184,7 @@ static const bool config_cache_oblivious =
|
|||||||
#include "jemalloc/internal/jemalloc_internal_macros.h"
|
#include "jemalloc/internal/jemalloc_internal_macros.h"
|
||||||
|
|
||||||
/* Size class index type. */
|
/* Size class index type. */
|
||||||
typedef unsigned index_t;
|
typedef unsigned szind_t;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Flags bits:
|
* Flags bits:
|
||||||
@ -511,12 +511,12 @@ void jemalloc_postfork_child(void);
|
|||||||
#include "jemalloc/internal/huge.h"
|
#include "jemalloc/internal/huge.h"
|
||||||
|
|
||||||
#ifndef JEMALLOC_ENABLE_INLINE
|
#ifndef JEMALLOC_ENABLE_INLINE
|
||||||
index_t size2index_compute(size_t size);
|
szind_t size2index_compute(size_t size);
|
||||||
index_t size2index_lookup(size_t size);
|
szind_t size2index_lookup(size_t size);
|
||||||
index_t size2index(size_t size);
|
szind_t size2index(size_t size);
|
||||||
size_t index2size_compute(index_t index);
|
size_t index2size_compute(szind_t index);
|
||||||
size_t index2size_lookup(index_t index);
|
size_t index2size_lookup(szind_t index);
|
||||||
size_t index2size(index_t index);
|
size_t index2size(szind_t index);
|
||||||
size_t s2u_compute(size_t size);
|
size_t s2u_compute(size_t size);
|
||||||
size_t s2u_lookup(size_t size);
|
size_t s2u_lookup(size_t size);
|
||||||
size_t s2u(size_t size);
|
size_t s2u(size_t size);
|
||||||
@ -527,7 +527,7 @@ arena_t *arena_get(tsd_t *tsd, unsigned ind, bool init_if_missing,
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
|
||||||
JEMALLOC_INLINE index_t
|
JEMALLOC_INLINE szind_t
|
||||||
size2index_compute(size_t size)
|
size2index_compute(size_t size)
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -558,7 +558,7 @@ size2index_compute(size_t size)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE index_t
|
JEMALLOC_ALWAYS_INLINE szind_t
|
||||||
size2index_lookup(size_t size)
|
size2index_lookup(size_t size)
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -571,7 +571,7 @@ size2index_lookup(size_t size)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE index_t
|
JEMALLOC_ALWAYS_INLINE szind_t
|
||||||
size2index(size_t size)
|
size2index(size_t size)
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -582,7 +582,7 @@ size2index(size_t size)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_INLINE size_t
|
||||||
index2size_compute(index_t index)
|
index2size_compute(szind_t index)
|
||||||
{
|
{
|
||||||
|
|
||||||
#if (NTBINS > 0)
|
#if (NTBINS > 0)
|
||||||
@ -609,7 +609,7 @@ index2size_compute(index_t index)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
index2size_lookup(index_t index)
|
index2size_lookup(szind_t index)
|
||||||
{
|
{
|
||||||
size_t ret = (size_t)index2size_tab[index];
|
size_t ret = (size_t)index2size_tab[index];
|
||||||
assert(ret == index2size_compute(index));
|
assert(ret == index2size_compute(index));
|
||||||
@ -617,7 +617,7 @@ index2size_lookup(index_t index)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
index2size(index_t index)
|
index2size(szind_t index)
|
||||||
{
|
{
|
||||||
|
|
||||||
assert(index < NSIZES);
|
assert(index < NSIZES);
|
||||||
@ -976,7 +976,7 @@ u2rz(size_t usize)
|
|||||||
size_t ret;
|
size_t ret;
|
||||||
|
|
||||||
if (usize <= SMALL_MAXCLASS) {
|
if (usize <= SMALL_MAXCLASS) {
|
||||||
index_t binind = size2index(usize);
|
szind_t binind = size2index(usize);
|
||||||
ret = arena_bin_info[binind].redzone_size;
|
ret = arena_bin_info[binind].redzone_size;
|
||||||
} else
|
} else
|
||||||
ret = 0;
|
ret = 0;
|
||||||
|
@ -77,7 +77,7 @@ struct tcache_s {
|
|||||||
ql_elm(tcache_t) link; /* Used for aggregating stats. */
|
ql_elm(tcache_t) link; /* Used for aggregating stats. */
|
||||||
uint64_t prof_accumbytes;/* Cleared after arena_prof_accum(). */
|
uint64_t prof_accumbytes;/* Cleared after arena_prof_accum(). */
|
||||||
unsigned ev_cnt; /* Event count since incremental GC. */
|
unsigned ev_cnt; /* Event count since incremental GC. */
|
||||||
index_t next_gc_bin; /* Next bin to GC. */
|
szind_t next_gc_bin; /* Next bin to GC. */
|
||||||
tcache_bin_t tbins[1]; /* Dynamically sized. */
|
tcache_bin_t tbins[1]; /* Dynamically sized. */
|
||||||
/*
|
/*
|
||||||
* The pointer stacks associated with tbins follow as a contiguous
|
* The pointer stacks associated with tbins follow as a contiguous
|
||||||
@ -126,10 +126,10 @@ extern tcaches_t *tcaches;
|
|||||||
size_t tcache_salloc(const void *ptr);
|
size_t tcache_salloc(const void *ptr);
|
||||||
void tcache_event_hard(tsd_t *tsd, tcache_t *tcache);
|
void tcache_event_hard(tsd_t *tsd, tcache_t *tcache);
|
||||||
void *tcache_alloc_small_hard(tsd_t *tsd, arena_t *arena, tcache_t *tcache,
|
void *tcache_alloc_small_hard(tsd_t *tsd, arena_t *arena, tcache_t *tcache,
|
||||||
tcache_bin_t *tbin, index_t binind);
|
tcache_bin_t *tbin, szind_t binind);
|
||||||
void tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, tcache_bin_t *tbin,
|
void tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, tcache_bin_t *tbin,
|
||||||
index_t binind, unsigned rem);
|
szind_t binind, unsigned rem);
|
||||||
void tcache_bin_flush_large(tsd_t *tsd, tcache_bin_t *tbin, index_t binind,
|
void tcache_bin_flush_large(tsd_t *tsd, tcache_bin_t *tbin, szind_t binind,
|
||||||
unsigned rem, tcache_t *tcache);
|
unsigned rem, tcache_t *tcache);
|
||||||
void tcache_arena_associate(tcache_t *tcache, arena_t *arena);
|
void tcache_arena_associate(tcache_t *tcache, arena_t *arena);
|
||||||
void tcache_arena_reassociate(tcache_t *tcache, arena_t *oldarena,
|
void tcache_arena_reassociate(tcache_t *tcache, arena_t *oldarena,
|
||||||
@ -161,7 +161,7 @@ void *tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache,
|
|||||||
void *tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache,
|
void *tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache,
|
||||||
size_t size, bool zero);
|
size_t size, bool zero);
|
||||||
void tcache_dalloc_small(tsd_t *tsd, tcache_t *tcache, void *ptr,
|
void tcache_dalloc_small(tsd_t *tsd, tcache_t *tcache, void *ptr,
|
||||||
index_t binind);
|
szind_t binind);
|
||||||
void tcache_dalloc_large(tsd_t *tsd, tcache_t *tcache, void *ptr,
|
void tcache_dalloc_large(tsd_t *tsd, tcache_t *tcache, void *ptr,
|
||||||
size_t size);
|
size_t size);
|
||||||
tcache_t *tcaches_get(tsd_t *tsd, unsigned ind);
|
tcache_t *tcaches_get(tsd_t *tsd, unsigned ind);
|
||||||
@ -267,7 +267,7 @@ tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
|
|||||||
bool zero)
|
bool zero)
|
||||||
{
|
{
|
||||||
void *ret;
|
void *ret;
|
||||||
index_t binind;
|
szind_t binind;
|
||||||
size_t usize;
|
size_t usize;
|
||||||
tcache_bin_t *tbin;
|
tcache_bin_t *tbin;
|
||||||
|
|
||||||
@ -312,7 +312,7 @@ tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
|
|||||||
bool zero)
|
bool zero)
|
||||||
{
|
{
|
||||||
void *ret;
|
void *ret;
|
||||||
index_t binind;
|
szind_t binind;
|
||||||
size_t usize;
|
size_t usize;
|
||||||
tcache_bin_t *tbin;
|
tcache_bin_t *tbin;
|
||||||
|
|
||||||
@ -360,7 +360,7 @@ tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
tcache_dalloc_small(tsd_t *tsd, tcache_t *tcache, void *ptr, index_t binind)
|
tcache_dalloc_small(tsd_t *tsd, tcache_t *tcache, void *ptr, szind_t binind)
|
||||||
{
|
{
|
||||||
tcache_bin_t *tbin;
|
tcache_bin_t *tbin;
|
||||||
tcache_bin_info_t *tbin_info;
|
tcache_bin_info_t *tbin_info;
|
||||||
@ -386,7 +386,7 @@ tcache_dalloc_small(tsd_t *tsd, tcache_t *tcache, void *ptr, index_t binind)
|
|||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
tcache_dalloc_large(tsd_t *tsd, tcache_t *tcache, void *ptr, size_t size)
|
tcache_dalloc_large(tsd_t *tsd, tcache_t *tcache, void *ptr, size_t size)
|
||||||
{
|
{
|
||||||
index_t binind;
|
szind_t binind;
|
||||||
tcache_bin_t *tbin;
|
tcache_bin_t *tbin;
|
||||||
tcache_bin_info_t *tbin_info;
|
tcache_bin_info_t *tbin_info;
|
||||||
|
|
||||||
|
46
src/arena.c
46
src/arena.c
@ -314,7 +314,7 @@ arena_run_reg_dalloc(arena_run_t *run, void *ptr)
|
|||||||
arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(run);
|
arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(run);
|
||||||
size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||||
size_t mapbits = arena_mapbits_get(chunk, pageind);
|
size_t mapbits = arena_mapbits_get(chunk, pageind);
|
||||||
index_t binind = arena_ptr_small_binind_get(ptr, mapbits);
|
szind_t binind = arena_ptr_small_binind_get(ptr, mapbits);
|
||||||
arena_bin_info_t *bin_info = &arena_bin_info[binind];
|
arena_bin_info_t *bin_info = &arena_bin_info[binind];
|
||||||
unsigned regind = arena_run_regind(run, bin_info, ptr);
|
unsigned regind = arena_run_regind(run, bin_info, ptr);
|
||||||
|
|
||||||
@ -507,7 +507,7 @@ arena_run_init_large(arena_t *arena, arena_run_t *run, size_t size, bool zero)
|
|||||||
|
|
||||||
static bool
|
static bool
|
||||||
arena_run_split_small(arena_t *arena, arena_run_t *run, size_t size,
|
arena_run_split_small(arena_t *arena, arena_run_t *run, size_t size,
|
||||||
index_t binind)
|
szind_t binind)
|
||||||
{
|
{
|
||||||
arena_chunk_t *chunk;
|
arena_chunk_t *chunk;
|
||||||
arena_chunk_map_misc_t *miscelm;
|
arena_chunk_map_misc_t *miscelm;
|
||||||
@ -779,7 +779,7 @@ arena_chunk_dalloc(arena_t *arena, arena_chunk_t *chunk)
|
|||||||
static void
|
static void
|
||||||
arena_huge_malloc_stats_update(arena_t *arena, size_t usize)
|
arena_huge_malloc_stats_update(arena_t *arena, size_t usize)
|
||||||
{
|
{
|
||||||
index_t index = size2index(usize) - nlclasses - NBINS;
|
szind_t index = size2index(usize) - nlclasses - NBINS;
|
||||||
|
|
||||||
cassert(config_stats);
|
cassert(config_stats);
|
||||||
|
|
||||||
@ -792,7 +792,7 @@ arena_huge_malloc_stats_update(arena_t *arena, size_t usize)
|
|||||||
static void
|
static void
|
||||||
arena_huge_malloc_stats_update_undo(arena_t *arena, size_t usize)
|
arena_huge_malloc_stats_update_undo(arena_t *arena, size_t usize)
|
||||||
{
|
{
|
||||||
index_t index = size2index(usize) - nlclasses - NBINS;
|
szind_t index = size2index(usize) - nlclasses - NBINS;
|
||||||
|
|
||||||
cassert(config_stats);
|
cassert(config_stats);
|
||||||
|
|
||||||
@ -805,7 +805,7 @@ arena_huge_malloc_stats_update_undo(arena_t *arena, size_t usize)
|
|||||||
static void
|
static void
|
||||||
arena_huge_dalloc_stats_update(arena_t *arena, size_t usize)
|
arena_huge_dalloc_stats_update(arena_t *arena, size_t usize)
|
||||||
{
|
{
|
||||||
index_t index = size2index(usize) - nlclasses - NBINS;
|
szind_t index = size2index(usize) - nlclasses - NBINS;
|
||||||
|
|
||||||
cassert(config_stats);
|
cassert(config_stats);
|
||||||
|
|
||||||
@ -818,7 +818,7 @@ arena_huge_dalloc_stats_update(arena_t *arena, size_t usize)
|
|||||||
static void
|
static void
|
||||||
arena_huge_dalloc_stats_update_undo(arena_t *arena, size_t usize)
|
arena_huge_dalloc_stats_update_undo(arena_t *arena, size_t usize)
|
||||||
{
|
{
|
||||||
index_t index = size2index(usize) - nlclasses - NBINS;
|
szind_t index = size2index(usize) - nlclasses - NBINS;
|
||||||
|
|
||||||
cassert(config_stats);
|
cassert(config_stats);
|
||||||
|
|
||||||
@ -1124,7 +1124,7 @@ arena_run_alloc_large(arena_t *arena, size_t size, bool zero)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static arena_run_t *
|
static arena_run_t *
|
||||||
arena_run_alloc_small_helper(arena_t *arena, size_t size, index_t binind)
|
arena_run_alloc_small_helper(arena_t *arena, size_t size, szind_t binind)
|
||||||
{
|
{
|
||||||
arena_run_t *run = arena_run_first_best_fit(arena, size);
|
arena_run_t *run = arena_run_first_best_fit(arena, size);
|
||||||
if (run != NULL) {
|
if (run != NULL) {
|
||||||
@ -1135,7 +1135,7 @@ arena_run_alloc_small_helper(arena_t *arena, size_t size, index_t binind)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static arena_run_t *
|
static arena_run_t *
|
||||||
arena_run_alloc_small(arena_t *arena, size_t size, index_t binind)
|
arena_run_alloc_small(arena_t *arena, size_t size, szind_t binind)
|
||||||
{
|
{
|
||||||
arena_chunk_t *chunk;
|
arena_chunk_t *chunk;
|
||||||
arena_run_t *run;
|
arena_run_t *run;
|
||||||
@ -1888,7 +1888,7 @@ static arena_run_t *
|
|||||||
arena_bin_nonfull_run_get(arena_t *arena, arena_bin_t *bin)
|
arena_bin_nonfull_run_get(arena_t *arena, arena_bin_t *bin)
|
||||||
{
|
{
|
||||||
arena_run_t *run;
|
arena_run_t *run;
|
||||||
index_t binind;
|
szind_t binind;
|
||||||
arena_bin_info_t *bin_info;
|
arena_bin_info_t *bin_info;
|
||||||
|
|
||||||
/* Look for a usable run. */
|
/* Look for a usable run. */
|
||||||
@ -1939,7 +1939,7 @@ static void *
|
|||||||
arena_bin_malloc_hard(arena_t *arena, arena_bin_t *bin)
|
arena_bin_malloc_hard(arena_t *arena, arena_bin_t *bin)
|
||||||
{
|
{
|
||||||
void *ret;
|
void *ret;
|
||||||
index_t binind;
|
szind_t binind;
|
||||||
arena_bin_info_t *bin_info;
|
arena_bin_info_t *bin_info;
|
||||||
arena_run_t *run;
|
arena_run_t *run;
|
||||||
|
|
||||||
@ -1985,7 +1985,7 @@ arena_bin_malloc_hard(arena_t *arena, arena_bin_t *bin)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
arena_tcache_fill_small(arena_t *arena, tcache_bin_t *tbin, index_t binind,
|
arena_tcache_fill_small(arena_t *arena, tcache_bin_t *tbin, szind_t binind,
|
||||||
uint64_t prof_accumbytes)
|
uint64_t prof_accumbytes)
|
||||||
{
|
{
|
||||||
unsigned i, nfill;
|
unsigned i, nfill;
|
||||||
@ -2130,7 +2130,7 @@ arena_dalloc_junk_small_t *arena_dalloc_junk_small =
|
|||||||
void
|
void
|
||||||
arena_quarantine_junk_small(void *ptr, size_t usize)
|
arena_quarantine_junk_small(void *ptr, size_t usize)
|
||||||
{
|
{
|
||||||
index_t binind;
|
szind_t binind;
|
||||||
arena_bin_info_t *bin_info;
|
arena_bin_info_t *bin_info;
|
||||||
cassert(config_fill);
|
cassert(config_fill);
|
||||||
assert(opt_junk_free);
|
assert(opt_junk_free);
|
||||||
@ -2148,7 +2148,7 @@ arena_malloc_small(arena_t *arena, size_t size, bool zero)
|
|||||||
void *ret;
|
void *ret;
|
||||||
arena_bin_t *bin;
|
arena_bin_t *bin;
|
||||||
arena_run_t *run;
|
arena_run_t *run;
|
||||||
index_t binind;
|
szind_t binind;
|
||||||
|
|
||||||
binind = size2index(size);
|
binind = size2index(size);
|
||||||
assert(binind < NBINS);
|
assert(binind < NBINS);
|
||||||
@ -2232,7 +2232,7 @@ arena_malloc_large(arena_t *arena, size_t size, bool zero)
|
|||||||
ret = (void *)((uintptr_t)arena_miscelm_to_rpages(miscelm) +
|
ret = (void *)((uintptr_t)arena_miscelm_to_rpages(miscelm) +
|
||||||
random_offset);
|
random_offset);
|
||||||
if (config_stats) {
|
if (config_stats) {
|
||||||
index_t index = size2index(usize) - NBINS;
|
szind_t index = size2index(usize) - NBINS;
|
||||||
|
|
||||||
arena->stats.nmalloc_large++;
|
arena->stats.nmalloc_large++;
|
||||||
arena->stats.nrequests_large++;
|
arena->stats.nrequests_large++;
|
||||||
@ -2325,7 +2325,7 @@ arena_palloc_large(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment,
|
|||||||
ret = arena_miscelm_to_rpages(miscelm);
|
ret = arena_miscelm_to_rpages(miscelm);
|
||||||
|
|
||||||
if (config_stats) {
|
if (config_stats) {
|
||||||
index_t index = size2index(usize) - NBINS;
|
szind_t index = size2index(usize) - NBINS;
|
||||||
|
|
||||||
arena->stats.nmalloc_large++;
|
arena->stats.nmalloc_large++;
|
||||||
arena->stats.nrequests_large++;
|
arena->stats.nrequests_large++;
|
||||||
@ -2384,7 +2384,7 @@ arena_prof_promoted(const void *ptr, size_t size)
|
|||||||
{
|
{
|
||||||
arena_chunk_t *chunk;
|
arena_chunk_t *chunk;
|
||||||
size_t pageind;
|
size_t pageind;
|
||||||
index_t binind;
|
szind_t binind;
|
||||||
|
|
||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
@ -2412,7 +2412,7 @@ arena_dissociate_bin_run(arena_chunk_t *chunk, arena_run_t *run,
|
|||||||
if (run == bin->runcur)
|
if (run == bin->runcur)
|
||||||
bin->runcur = NULL;
|
bin->runcur = NULL;
|
||||||
else {
|
else {
|
||||||
index_t binind = arena_bin_index(extent_node_arena_get(
|
szind_t binind = arena_bin_index(extent_node_arena_get(
|
||||||
&chunk->node), bin);
|
&chunk->node), bin);
|
||||||
arena_bin_info_t *bin_info = &arena_bin_info[binind];
|
arena_bin_info_t *bin_info = &arena_bin_info[binind];
|
||||||
|
|
||||||
@ -2476,7 +2476,7 @@ arena_dalloc_bin_locked_impl(arena_t *arena, arena_chunk_t *chunk, void *ptr,
|
|||||||
arena_run_t *run;
|
arena_run_t *run;
|
||||||
arena_bin_t *bin;
|
arena_bin_t *bin;
|
||||||
arena_bin_info_t *bin_info;
|
arena_bin_info_t *bin_info;
|
||||||
index_t binind;
|
szind_t binind;
|
||||||
|
|
||||||
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||||
rpages_ind = pageind - arena_mapbits_small_runind_get(chunk, pageind);
|
rpages_ind = pageind - arena_mapbits_small_runind_get(chunk, pageind);
|
||||||
@ -2573,7 +2573,7 @@ arena_dalloc_large_locked_impl(arena_t *arena, arena_chunk_t *chunk,
|
|||||||
if (!junked)
|
if (!junked)
|
||||||
arena_dalloc_junk_large(ptr, usize);
|
arena_dalloc_junk_large(ptr, usize);
|
||||||
if (config_stats) {
|
if (config_stats) {
|
||||||
index_t index = size2index(usize) - NBINS;
|
szind_t index = size2index(usize) - NBINS;
|
||||||
|
|
||||||
arena->stats.ndalloc_large++;
|
arena->stats.ndalloc_large++;
|
||||||
arena->stats.allocated_large -= usize;
|
arena->stats.allocated_large -= usize;
|
||||||
@ -2620,8 +2620,8 @@ arena_ralloc_large_shrink(arena_t *arena, arena_chunk_t *chunk, void *ptr,
|
|||||||
arena_run_trim_tail(arena, chunk, run, oldsize + large_pad, size +
|
arena_run_trim_tail(arena, chunk, run, oldsize + large_pad, size +
|
||||||
large_pad, true);
|
large_pad, true);
|
||||||
if (config_stats) {
|
if (config_stats) {
|
||||||
index_t oldindex = size2index(oldsize) - NBINS;
|
szind_t oldindex = size2index(oldsize) - NBINS;
|
||||||
index_t index = size2index(size) - NBINS;
|
szind_t index = size2index(size) - NBINS;
|
||||||
|
|
||||||
arena->stats.ndalloc_large++;
|
arena->stats.ndalloc_large++;
|
||||||
arena->stats.allocated_large -= oldsize;
|
arena->stats.allocated_large -= oldsize;
|
||||||
@ -2699,8 +2699,8 @@ arena_ralloc_large_grow(arena_t *arena, arena_chunk_t *chunk, void *ptr,
|
|||||||
pageind+npages-1)));
|
pageind+npages-1)));
|
||||||
|
|
||||||
if (config_stats) {
|
if (config_stats) {
|
||||||
index_t oldindex = size2index(oldsize) - NBINS;
|
szind_t oldindex = size2index(oldsize) - NBINS;
|
||||||
index_t index = size2index(size) - NBINS;
|
szind_t index = size2index(size) - NBINS;
|
||||||
|
|
||||||
arena->stats.ndalloc_large++;
|
arena->stats.ndalloc_large++;
|
||||||
arena->stats.allocated_large -= oldsize;
|
arena->stats.allocated_large -= oldsize;
|
||||||
|
@ -32,7 +32,7 @@ size_t tcache_salloc(const void *ptr)
|
|||||||
void
|
void
|
||||||
tcache_event_hard(tsd_t *tsd, tcache_t *tcache)
|
tcache_event_hard(tsd_t *tsd, tcache_t *tcache)
|
||||||
{
|
{
|
||||||
index_t binind = tcache->next_gc_bin;
|
szind_t binind = tcache->next_gc_bin;
|
||||||
tcache_bin_t *tbin = &tcache->tbins[binind];
|
tcache_bin_t *tbin = &tcache->tbins[binind];
|
||||||
tcache_bin_info_t *tbin_info = &tcache_bin_info[binind];
|
tcache_bin_info_t *tbin_info = &tcache_bin_info[binind];
|
||||||
|
|
||||||
@ -72,7 +72,7 @@ tcache_event_hard(tsd_t *tsd, tcache_t *tcache)
|
|||||||
|
|
||||||
void *
|
void *
|
||||||
tcache_alloc_small_hard(tsd_t *tsd, arena_t *arena, tcache_t *tcache,
|
tcache_alloc_small_hard(tsd_t *tsd, arena_t *arena, tcache_t *tcache,
|
||||||
tcache_bin_t *tbin, index_t binind)
|
tcache_bin_t *tbin, szind_t binind)
|
||||||
{
|
{
|
||||||
void *ret;
|
void *ret;
|
||||||
|
|
||||||
@ -87,7 +87,7 @@ tcache_alloc_small_hard(tsd_t *tsd, arena_t *arena, tcache_t *tcache,
|
|||||||
|
|
||||||
void
|
void
|
||||||
tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, tcache_bin_t *tbin,
|
tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, tcache_bin_t *tbin,
|
||||||
index_t binind, unsigned rem)
|
szind_t binind, unsigned rem)
|
||||||
{
|
{
|
||||||
arena_t *arena;
|
arena_t *arena;
|
||||||
void *ptr;
|
void *ptr;
|
||||||
@ -166,7 +166,7 @@ tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, tcache_bin_t *tbin,
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
tcache_bin_flush_large(tsd_t *tsd, tcache_bin_t *tbin, index_t binind,
|
tcache_bin_flush_large(tsd_t *tsd, tcache_bin_t *tbin, szind_t binind,
|
||||||
unsigned rem, tcache_t *tcache)
|
unsigned rem, tcache_t *tcache)
|
||||||
{
|
{
|
||||||
arena_t *arena;
|
arena_t *arena;
|
||||||
|
@ -26,7 +26,7 @@ get_max_size_class(void)
|
|||||||
TEST_BEGIN(test_size_classes)
|
TEST_BEGIN(test_size_classes)
|
||||||
{
|
{
|
||||||
size_t size_class, max_size_class;
|
size_t size_class, max_size_class;
|
||||||
index_t index, max_index;
|
szind_t index, max_index;
|
||||||
|
|
||||||
max_size_class = get_max_size_class();
|
max_size_class = get_max_size_class();
|
||||||
max_index = size2index(max_size_class);
|
max_index = size2index(max_size_class);
|
||||||
|
Loading…
Reference in New Issue
Block a user