Avoid pointless chunk_recycle() call.

Avoid calling chunk_recycle() for mmap()ed chunks if config_munmap is
disabled, in which case there are never any recyclable chunks.

This resolves #164.
This commit is contained in:
Jason Evans 2015-01-25 17:31:24 -08:00
parent 77d597ebb2
commit 0fd663e9c5

View File

@ -132,6 +132,19 @@ chunk_recycle(extent_tree_t *chunks_szad, extent_tree_t *chunks_ad,
return (ret); return (ret);
} }
static void *
chunk_alloc_core_dss(void *new_addr, size_t size, size_t alignment, bool base,
bool *zero)
{
void *ret;
if ((ret = chunk_recycle(&chunks_szad_dss, &chunks_ad_dss,
new_addr, size, alignment, base, zero)) != NULL)
return (ret);
ret = chunk_alloc_dss(new_addr, size, alignment, zero);
return (ret);
}
/* /*
* If the caller specifies (!*zero), it is still possible to receive zeroed * If the caller specifies (!*zero), it is still possible to receive zeroed
* memory, in which case *zero is toggled to true. arena_chunk_alloc() takes * memory, in which case *zero is toggled to true. arena_chunk_alloc() takes
@ -150,31 +163,26 @@ chunk_alloc_core(void *new_addr, size_t size, size_t alignment, bool base,
assert((alignment & chunksize_mask) == 0); assert((alignment & chunksize_mask) == 0);
/* "primary" dss. */ /* "primary" dss. */
if (have_dss && dss_prec == dss_prec_primary) { if (have_dss && dss_prec == dss_prec_primary && (ret =
if ((ret = chunk_recycle(&chunks_szad_dss, &chunks_ad_dss, chunk_alloc_core_dss(new_addr, size, alignment, base, zero)) !=
new_addr, size, alignment, base, zero)) != NULL) NULL)
return (ret); return (ret);
if ((ret = chunk_alloc_dss(new_addr, size, alignment, zero))
!= NULL)
return (ret);
}
/* mmap. */ /* mmap. */
if ((ret = chunk_recycle(&chunks_szad_mmap, &chunks_ad_mmap, new_addr, if (!config_munmap && (ret = chunk_recycle(&chunks_szad_mmap,
size, alignment, base, zero)) != NULL) &chunks_ad_mmap, new_addr, size, alignment, base, zero)) != NULL)
return (ret); return (ret);
/* Requesting an address not implemented for chunk_alloc_mmap(). */ /*
if (new_addr == NULL && * Requesting an address is not implemented for chunk_alloc_mmap(), so
(ret = chunk_alloc_mmap(size, alignment, zero)) != NULL) * only call it if (new_addr == NULL).
*/
if (new_addr == NULL && (ret = chunk_alloc_mmap(size, alignment, zero))
!= NULL)
return (ret); return (ret);
/* "secondary" dss. */ /* "secondary" dss. */
if (have_dss && dss_prec == dss_prec_secondary) { if (have_dss && dss_prec == dss_prec_secondary && (ret =
if ((ret = chunk_recycle(&chunks_szad_dss, &chunks_ad_dss, chunk_alloc_core_dss(new_addr, size, alignment, base, zero)) !=
new_addr, size, alignment, base, zero)) != NULL) NULL)
return (ret); return (ret);
if ((ret = chunk_alloc_dss(new_addr, size, alignment, zero))
!= NULL)
return (ret);
}
/* All strategies for allocation failed. */ /* All strategies for allocation failed. */
return (NULL); return (NULL);