2020-02-26 04:14:48 +08:00
|
|
|
#include "jemalloc/internal/jemalloc_preamble.h"
|
|
|
|
#include "jemalloc/internal/jemalloc_internal_includes.h"
|
|
|
|
|
2020-03-01 06:41:47 +08:00
|
|
|
#include "jemalloc/internal/bit_util.h"
|
|
|
|
|
|
|
|
void
|
|
|
|
cache_bin_info_init(cache_bin_info_t *info,
|
|
|
|
cache_bin_sz_t ncached_max) {
|
2020-05-12 05:19:37 +08:00
|
|
|
assert(ncached_max <= CACHE_BIN_NCACHED_MAX);
|
2020-03-01 06:41:47 +08:00
|
|
|
size_t stack_size = (size_t)ncached_max * sizeof(void *);
|
|
|
|
assert(stack_size < ((size_t)1 << (sizeof(cache_bin_sz_t) * 8)));
|
2020-03-04 10:32:36 +08:00
|
|
|
info->ncached_max = (cache_bin_sz_t)ncached_max;
|
2020-03-01 06:41:47 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
cache_bin_info_compute_alloc(cache_bin_info_t *infos, szind_t ninfos,
|
|
|
|
size_t *size, size_t *alignment) {
|
|
|
|
/* For the total bin stack region (per tcache), reserve 2 more slots so
|
|
|
|
* that
|
|
|
|
* 1) the empty position can be safely read on the fast path before
|
|
|
|
* checking "is_empty"; and
|
|
|
|
* 2) the cur_ptr can go beyond the empty position by 1 step safely on
|
|
|
|
* the fast path (i.e. no overflow).
|
|
|
|
*/
|
|
|
|
*size = sizeof(void *) * 2;
|
|
|
|
for (szind_t i = 0; i < ninfos; i++) {
|
2020-10-22 10:47:57 +08:00
|
|
|
assert(infos[i].ncached_max > 0);
|
2020-03-04 10:32:36 +08:00
|
|
|
*size += infos[i].ncached_max * sizeof(void *);
|
2020-03-01 06:41:47 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
2020-03-04 10:32:36 +08:00
|
|
|
* Align to at least PAGE, to minimize the # of TLBs needed by the
|
2020-03-01 06:41:47 +08:00
|
|
|
* smaller sizes; also helps if the larger sizes don't get used at all.
|
|
|
|
*/
|
2020-03-04 10:32:36 +08:00
|
|
|
*alignment = PAGE;
|
2020-03-01 06:41:47 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
cache_bin_preincrement(cache_bin_info_t *infos, szind_t ninfos, void *alloc,
|
|
|
|
size_t *cur_offset) {
|
|
|
|
if (config_debug) {
|
|
|
|
size_t computed_size;
|
|
|
|
size_t computed_alignment;
|
|
|
|
|
|
|
|
/* Pointer should be as aligned as we asked for. */
|
|
|
|
cache_bin_info_compute_alloc(infos, ninfos, &computed_size,
|
|
|
|
&computed_alignment);
|
|
|
|
assert(((uintptr_t)alloc & (computed_alignment - 1)) == 0);
|
|
|
|
}
|
2020-10-22 10:47:57 +08:00
|
|
|
|
|
|
|
*(uintptr_t *)((uintptr_t)alloc + *cur_offset) =
|
|
|
|
cache_bin_preceding_junk;
|
2020-03-01 06:41:47 +08:00
|
|
|
*cur_offset += sizeof(void *);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
cache_bin_postincrement(cache_bin_info_t *infos, szind_t ninfos, void *alloc,
|
|
|
|
size_t *cur_offset) {
|
2020-10-22 10:47:57 +08:00
|
|
|
*(uintptr_t *)((uintptr_t)alloc + *cur_offset) =
|
|
|
|
cache_bin_trailing_junk;
|
2020-03-01 06:41:47 +08:00
|
|
|
*cur_offset += sizeof(void *);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
cache_bin_init(cache_bin_t *bin, cache_bin_info_t *info, void *alloc,
|
|
|
|
size_t *cur_offset) {
|
|
|
|
/*
|
|
|
|
* The full_position points to the lowest available space. Allocations
|
|
|
|
* will access the slots toward higher addresses (for the benefit of
|
|
|
|
* adjacent prefetch).
|
|
|
|
*/
|
|
|
|
void *stack_cur = (void *)((uintptr_t)alloc + *cur_offset);
|
|
|
|
void *full_position = stack_cur;
|
2020-03-04 10:32:36 +08:00
|
|
|
uint16_t bin_stack_size = info->ncached_max * sizeof(void *);
|
2020-03-01 06:41:47 +08:00
|
|
|
|
|
|
|
*cur_offset += bin_stack_size;
|
|
|
|
void *empty_position = (void *)((uintptr_t)alloc + *cur_offset);
|
|
|
|
|
|
|
|
/* Init to the empty position. */
|
2020-03-04 10:32:36 +08:00
|
|
|
bin->stack_head = (void **)empty_position;
|
|
|
|
bin->low_bits_low_water = (uint16_t)(uintptr_t)bin->stack_head;
|
|
|
|
bin->low_bits_full = (uint16_t)(uintptr_t)full_position;
|
|
|
|
bin->low_bits_empty = (uint16_t)(uintptr_t)empty_position;
|
|
|
|
assert(cache_bin_diff(bin, bin->low_bits_full,
|
|
|
|
(uint16_t)(uintptr_t) bin->stack_head) == bin_stack_size);
|
2021-01-08 05:22:08 +08:00
|
|
|
assert(cache_bin_ncached_get_local(bin, info) == 0);
|
2020-10-23 05:44:36 +08:00
|
|
|
assert(cache_bin_empty_position_get(bin) == empty_position);
|
2020-10-22 10:47:57 +08:00
|
|
|
|
|
|
|
assert(bin_stack_size > 0 || empty_position == full_position);
|
2020-03-01 06:41:47 +08:00
|
|
|
}
|
2020-03-03 10:07:19 +08:00
|
|
|
|
|
|
|
bool
|
|
|
|
cache_bin_still_zero_initialized(cache_bin_t *bin) {
|
2020-03-04 10:32:36 +08:00
|
|
|
return bin->stack_head == NULL;
|
2020-03-03 10:07:19 +08:00
|
|
|
}
|