Rename index_t to szind_t to avoid an existing type on Solaris.
This resolves #256.
This commit is contained in:
46
src/arena.c
46
src/arena.c
@@ -314,7 +314,7 @@ arena_run_reg_dalloc(arena_run_t *run, void *ptr)
|
||||
arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(run);
|
||||
size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||
size_t mapbits = arena_mapbits_get(chunk, pageind);
|
||||
index_t binind = arena_ptr_small_binind_get(ptr, mapbits);
|
||||
szind_t binind = arena_ptr_small_binind_get(ptr, mapbits);
|
||||
arena_bin_info_t *bin_info = &arena_bin_info[binind];
|
||||
unsigned regind = arena_run_regind(run, bin_info, ptr);
|
||||
|
||||
@@ -507,7 +507,7 @@ arena_run_init_large(arena_t *arena, arena_run_t *run, size_t size, bool zero)
|
||||
|
||||
static bool
|
||||
arena_run_split_small(arena_t *arena, arena_run_t *run, size_t size,
|
||||
index_t binind)
|
||||
szind_t binind)
|
||||
{
|
||||
arena_chunk_t *chunk;
|
||||
arena_chunk_map_misc_t *miscelm;
|
||||
@@ -779,7 +779,7 @@ arena_chunk_dalloc(arena_t *arena, arena_chunk_t *chunk)
|
||||
static void
|
||||
arena_huge_malloc_stats_update(arena_t *arena, size_t usize)
|
||||
{
|
||||
index_t index = size2index(usize) - nlclasses - NBINS;
|
||||
szind_t index = size2index(usize) - nlclasses - NBINS;
|
||||
|
||||
cassert(config_stats);
|
||||
|
||||
@@ -792,7 +792,7 @@ arena_huge_malloc_stats_update(arena_t *arena, size_t usize)
|
||||
static void
|
||||
arena_huge_malloc_stats_update_undo(arena_t *arena, size_t usize)
|
||||
{
|
||||
index_t index = size2index(usize) - nlclasses - NBINS;
|
||||
szind_t index = size2index(usize) - nlclasses - NBINS;
|
||||
|
||||
cassert(config_stats);
|
||||
|
||||
@@ -805,7 +805,7 @@ arena_huge_malloc_stats_update_undo(arena_t *arena, size_t usize)
|
||||
static void
|
||||
arena_huge_dalloc_stats_update(arena_t *arena, size_t usize)
|
||||
{
|
||||
index_t index = size2index(usize) - nlclasses - NBINS;
|
||||
szind_t index = size2index(usize) - nlclasses - NBINS;
|
||||
|
||||
cassert(config_stats);
|
||||
|
||||
@@ -818,7 +818,7 @@ arena_huge_dalloc_stats_update(arena_t *arena, size_t usize)
|
||||
static void
|
||||
arena_huge_dalloc_stats_update_undo(arena_t *arena, size_t usize)
|
||||
{
|
||||
index_t index = size2index(usize) - nlclasses - NBINS;
|
||||
szind_t index = size2index(usize) - nlclasses - NBINS;
|
||||
|
||||
cassert(config_stats);
|
||||
|
||||
@@ -1124,7 +1124,7 @@ arena_run_alloc_large(arena_t *arena, size_t size, bool zero)
|
||||
}
|
||||
|
||||
static arena_run_t *
|
||||
arena_run_alloc_small_helper(arena_t *arena, size_t size, index_t binind)
|
||||
arena_run_alloc_small_helper(arena_t *arena, size_t size, szind_t binind)
|
||||
{
|
||||
arena_run_t *run = arena_run_first_best_fit(arena, size);
|
||||
if (run != NULL) {
|
||||
@@ -1135,7 +1135,7 @@ arena_run_alloc_small_helper(arena_t *arena, size_t size, index_t binind)
|
||||
}
|
||||
|
||||
static arena_run_t *
|
||||
arena_run_alloc_small(arena_t *arena, size_t size, index_t binind)
|
||||
arena_run_alloc_small(arena_t *arena, size_t size, szind_t binind)
|
||||
{
|
||||
arena_chunk_t *chunk;
|
||||
arena_run_t *run;
|
||||
@@ -1888,7 +1888,7 @@ static arena_run_t *
|
||||
arena_bin_nonfull_run_get(arena_t *arena, arena_bin_t *bin)
|
||||
{
|
||||
arena_run_t *run;
|
||||
index_t binind;
|
||||
szind_t binind;
|
||||
arena_bin_info_t *bin_info;
|
||||
|
||||
/* Look for a usable run. */
|
||||
@@ -1939,7 +1939,7 @@ static void *
|
||||
arena_bin_malloc_hard(arena_t *arena, arena_bin_t *bin)
|
||||
{
|
||||
void *ret;
|
||||
index_t binind;
|
||||
szind_t binind;
|
||||
arena_bin_info_t *bin_info;
|
||||
arena_run_t *run;
|
||||
|
||||
@@ -1985,7 +1985,7 @@ arena_bin_malloc_hard(arena_t *arena, arena_bin_t *bin)
|
||||
}
|
||||
|
||||
void
|
||||
arena_tcache_fill_small(arena_t *arena, tcache_bin_t *tbin, index_t binind,
|
||||
arena_tcache_fill_small(arena_t *arena, tcache_bin_t *tbin, szind_t binind,
|
||||
uint64_t prof_accumbytes)
|
||||
{
|
||||
unsigned i, nfill;
|
||||
@@ -2130,7 +2130,7 @@ arena_dalloc_junk_small_t *arena_dalloc_junk_small =
|
||||
void
|
||||
arena_quarantine_junk_small(void *ptr, size_t usize)
|
||||
{
|
||||
index_t binind;
|
||||
szind_t binind;
|
||||
arena_bin_info_t *bin_info;
|
||||
cassert(config_fill);
|
||||
assert(opt_junk_free);
|
||||
@@ -2148,7 +2148,7 @@ arena_malloc_small(arena_t *arena, size_t size, bool zero)
|
||||
void *ret;
|
||||
arena_bin_t *bin;
|
||||
arena_run_t *run;
|
||||
index_t binind;
|
||||
szind_t binind;
|
||||
|
||||
binind = size2index(size);
|
||||
assert(binind < NBINS);
|
||||
@@ -2232,7 +2232,7 @@ arena_malloc_large(arena_t *arena, size_t size, bool zero)
|
||||
ret = (void *)((uintptr_t)arena_miscelm_to_rpages(miscelm) +
|
||||
random_offset);
|
||||
if (config_stats) {
|
||||
index_t index = size2index(usize) - NBINS;
|
||||
szind_t index = size2index(usize) - NBINS;
|
||||
|
||||
arena->stats.nmalloc_large++;
|
||||
arena->stats.nrequests_large++;
|
||||
@@ -2325,7 +2325,7 @@ arena_palloc_large(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment,
|
||||
ret = arena_miscelm_to_rpages(miscelm);
|
||||
|
||||
if (config_stats) {
|
||||
index_t index = size2index(usize) - NBINS;
|
||||
szind_t index = size2index(usize) - NBINS;
|
||||
|
||||
arena->stats.nmalloc_large++;
|
||||
arena->stats.nrequests_large++;
|
||||
@@ -2384,7 +2384,7 @@ arena_prof_promoted(const void *ptr, size_t size)
|
||||
{
|
||||
arena_chunk_t *chunk;
|
||||
size_t pageind;
|
||||
index_t binind;
|
||||
szind_t binind;
|
||||
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
@@ -2412,7 +2412,7 @@ arena_dissociate_bin_run(arena_chunk_t *chunk, arena_run_t *run,
|
||||
if (run == bin->runcur)
|
||||
bin->runcur = NULL;
|
||||
else {
|
||||
index_t binind = arena_bin_index(extent_node_arena_get(
|
||||
szind_t binind = arena_bin_index(extent_node_arena_get(
|
||||
&chunk->node), bin);
|
||||
arena_bin_info_t *bin_info = &arena_bin_info[binind];
|
||||
|
||||
@@ -2476,7 +2476,7 @@ arena_dalloc_bin_locked_impl(arena_t *arena, arena_chunk_t *chunk, void *ptr,
|
||||
arena_run_t *run;
|
||||
arena_bin_t *bin;
|
||||
arena_bin_info_t *bin_info;
|
||||
index_t binind;
|
||||
szind_t binind;
|
||||
|
||||
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||
rpages_ind = pageind - arena_mapbits_small_runind_get(chunk, pageind);
|
||||
@@ -2573,7 +2573,7 @@ arena_dalloc_large_locked_impl(arena_t *arena, arena_chunk_t *chunk,
|
||||
if (!junked)
|
||||
arena_dalloc_junk_large(ptr, usize);
|
||||
if (config_stats) {
|
||||
index_t index = size2index(usize) - NBINS;
|
||||
szind_t index = size2index(usize) - NBINS;
|
||||
|
||||
arena->stats.ndalloc_large++;
|
||||
arena->stats.allocated_large -= usize;
|
||||
@@ -2620,8 +2620,8 @@ arena_ralloc_large_shrink(arena_t *arena, arena_chunk_t *chunk, void *ptr,
|
||||
arena_run_trim_tail(arena, chunk, run, oldsize + large_pad, size +
|
||||
large_pad, true);
|
||||
if (config_stats) {
|
||||
index_t oldindex = size2index(oldsize) - NBINS;
|
||||
index_t index = size2index(size) - NBINS;
|
||||
szind_t oldindex = size2index(oldsize) - NBINS;
|
||||
szind_t index = size2index(size) - NBINS;
|
||||
|
||||
arena->stats.ndalloc_large++;
|
||||
arena->stats.allocated_large -= oldsize;
|
||||
@@ -2699,8 +2699,8 @@ arena_ralloc_large_grow(arena_t *arena, arena_chunk_t *chunk, void *ptr,
|
||||
pageind+npages-1)));
|
||||
|
||||
if (config_stats) {
|
||||
index_t oldindex = size2index(oldsize) - NBINS;
|
||||
index_t index = size2index(size) - NBINS;
|
||||
szind_t oldindex = size2index(oldsize) - NBINS;
|
||||
szind_t index = size2index(size) - NBINS;
|
||||
|
||||
arena->stats.ndalloc_large++;
|
||||
arena->stats.allocated_large -= oldsize;
|
||||
|
@@ -32,7 +32,7 @@ size_t tcache_salloc(const void *ptr)
|
||||
void
|
||||
tcache_event_hard(tsd_t *tsd, tcache_t *tcache)
|
||||
{
|
||||
index_t binind = tcache->next_gc_bin;
|
||||
szind_t binind = tcache->next_gc_bin;
|
||||
tcache_bin_t *tbin = &tcache->tbins[binind];
|
||||
tcache_bin_info_t *tbin_info = &tcache_bin_info[binind];
|
||||
|
||||
@@ -72,7 +72,7 @@ tcache_event_hard(tsd_t *tsd, tcache_t *tcache)
|
||||
|
||||
void *
|
||||
tcache_alloc_small_hard(tsd_t *tsd, arena_t *arena, tcache_t *tcache,
|
||||
tcache_bin_t *tbin, index_t binind)
|
||||
tcache_bin_t *tbin, szind_t binind)
|
||||
{
|
||||
void *ret;
|
||||
|
||||
@@ -87,7 +87,7 @@ tcache_alloc_small_hard(tsd_t *tsd, arena_t *arena, tcache_t *tcache,
|
||||
|
||||
void
|
||||
tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, tcache_bin_t *tbin,
|
||||
index_t binind, unsigned rem)
|
||||
szind_t binind, unsigned rem)
|
||||
{
|
||||
arena_t *arena;
|
||||
void *ptr;
|
||||
@@ -166,7 +166,7 @@ tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, tcache_bin_t *tbin,
|
||||
}
|
||||
|
||||
void
|
||||
tcache_bin_flush_large(tsd_t *tsd, tcache_bin_t *tbin, index_t binind,
|
||||
tcache_bin_flush_large(tsd_t *tsd, tcache_bin_t *tbin, szind_t binind,
|
||||
unsigned rem, tcache_t *tcache)
|
||||
{
|
||||
arena_t *arena;
|
||||
|
Reference in New Issue
Block a user