Reduce variables scope

This commit is contained in:
Dmitry-Me 2015-09-04 13:15:28 +03:00 committed by Jason Evans
parent 0a116faf95
commit a306a60651

View File

@ -425,7 +425,7 @@ arena_run_split_large_helper(arena_t *arena, arena_run_t *run, size_t size,
{ {
arena_chunk_t *chunk; arena_chunk_t *chunk;
arena_chunk_map_misc_t *miscelm; arena_chunk_map_misc_t *miscelm;
size_t flag_dirty, flag_decommitted, run_ind, need_pages, i; size_t flag_dirty, flag_decommitted, run_ind, need_pages;
size_t flag_unzeroed_mask; size_t flag_unzeroed_mask;
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(run); chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(run);
@ -459,6 +459,7 @@ arena_run_split_large_helper(arena_t *arena, arena_run_t *run, size_t size,
* The run is clean, so some pages may be zeroed (i.e. * The run is clean, so some pages may be zeroed (i.e.
* never before touched). * never before touched).
*/ */
size_t i;
for (i = 0; i < need_pages; i++) { for (i = 0; i < need_pages; i++) {
if (arena_mapbits_unzeroed_get(chunk, run_ind+i) if (arena_mapbits_unzeroed_get(chunk, run_ind+i)
!= 0) != 0)
@ -1938,7 +1939,6 @@ arena_bin_nonfull_run_get(arena_t *arena, arena_bin_t *bin)
static void * static void *
arena_bin_malloc_hard(arena_t *arena, arena_bin_t *bin) arena_bin_malloc_hard(arena_t *arena, arena_bin_t *bin)
{ {
void *ret;
szind_t binind; szind_t binind;
arena_bin_info_t *bin_info; arena_bin_info_t *bin_info;
arena_run_t *run; arena_run_t *run;
@ -1952,6 +1952,7 @@ arena_bin_malloc_hard(arena_t *arena, arena_bin_t *bin)
* Another thread updated runcur while this one ran without the * Another thread updated runcur while this one ran without the
* bin lock in arena_bin_nonfull_run_get(). * bin lock in arena_bin_nonfull_run_get().
*/ */
void *ret;
assert(bin->runcur->nfree > 0); assert(bin->runcur->nfree > 0);
ret = arena_run_reg_alloc(bin->runcur, bin_info); ret = arena_run_reg_alloc(bin->runcur, bin_info);
if (run != NULL) { if (run != NULL) {
@ -1990,8 +1991,6 @@ arena_tcache_fill_small(arena_t *arena, tcache_bin_t *tbin, szind_t binind,
{ {
unsigned i, nfill; unsigned i, nfill;
arena_bin_t *bin; arena_bin_t *bin;
arena_run_t *run;
void *ptr;
assert(tbin->ncached == 0); assert(tbin->ncached == 0);
@ -2001,6 +2000,8 @@ arena_tcache_fill_small(arena_t *arena, tcache_bin_t *tbin, szind_t binind,
malloc_mutex_lock(&bin->lock); malloc_mutex_lock(&bin->lock);
for (i = 0, nfill = (tcache_bin_info[binind].ncached_max >> for (i = 0, nfill = (tcache_bin_info[binind].ncached_max >>
tbin->lg_fill_div); i < nfill; i++) { tbin->lg_fill_div); i < nfill; i++) {
arena_run_t *run;
void *ptr;
if ((run = bin->runcur) != NULL && run->nfree > 0) if ((run = bin->runcur) != NULL && run->nfree > 0)
ptr = arena_run_reg_alloc(run, &arena_bin_info[binind]); ptr = arena_run_reg_alloc(run, &arena_bin_info[binind]);
else else
@ -2075,12 +2076,13 @@ arena_redzone_corruption_t *arena_redzone_corruption =
static void static void
arena_redzones_validate(void *ptr, arena_bin_info_t *bin_info, bool reset) arena_redzones_validate(void *ptr, arena_bin_info_t *bin_info, bool reset)
{ {
size_t size = bin_info->reg_size;
size_t redzone_size = bin_info->redzone_size;
size_t i;
bool error = false; bool error = false;
if (opt_junk_alloc) { if (opt_junk_alloc) {
size_t size = bin_info->reg_size;
size_t redzone_size = bin_info->redzone_size;
size_t i;
for (i = 1; i <= redzone_size; i++) { for (i = 1; i <= redzone_size; i++) {
uint8_t *byte = (uint8_t *)((uintptr_t)ptr - i); uint8_t *byte = (uint8_t *)((uintptr_t)ptr - i);
if (*byte != 0xa5) { if (*byte != 0xa5) {
@ -3240,7 +3242,6 @@ small_run_size_init(void)
bool bool
arena_boot(void) arena_boot(void)
{ {
size_t header_size;
unsigned i; unsigned i;
arena_lg_dirty_mult_default_set(opt_lg_dirty_mult); arena_lg_dirty_mult_default_set(opt_lg_dirty_mult);
@ -3259,7 +3260,7 @@ arena_boot(void)
*/ */
map_bias = 0; map_bias = 0;
for (i = 0; i < 3; i++) { for (i = 0; i < 3; i++) {
header_size = offsetof(arena_chunk_t, map_bits) + size_t header_size = offsetof(arena_chunk_t, map_bits) +
((sizeof(arena_chunk_map_bits_t) + ((sizeof(arena_chunk_map_bits_t) +
sizeof(arena_chunk_map_misc_t)) * (chunk_npages-map_bias)); sizeof(arena_chunk_map_misc_t)) * (chunk_npages-map_bias));
map_bias = (header_size + PAGE_MASK) >> LG_PAGE; map_bias = (header_size + PAGE_MASK) >> LG_PAGE;