Refactor rtree to always use base_alloc() for node allocation.
This commit is contained in:
parent
db72272bef
commit
8c9be3e837
@ -633,7 +633,8 @@ size_t arena_metadata_allocated_get(arena_t *arena);
|
|||||||
bool arena_prof_accum_impl(arena_t *arena, uint64_t accumbytes);
|
bool arena_prof_accum_impl(arena_t *arena, uint64_t accumbytes);
|
||||||
bool arena_prof_accum_locked(arena_t *arena, uint64_t accumbytes);
|
bool arena_prof_accum_locked(arena_t *arena, uint64_t accumbytes);
|
||||||
bool arena_prof_accum(tsdn_t *tsdn, arena_t *arena, uint64_t accumbytes);
|
bool arena_prof_accum(tsdn_t *tsdn, arena_t *arena, uint64_t accumbytes);
|
||||||
szind_t arena_ptr_small_binind_get(const void *ptr, size_t mapbits);
|
szind_t arena_ptr_small_binind_get(tsdn_t *tsdn, const void *ptr,
|
||||||
|
size_t mapbits);
|
||||||
szind_t arena_bin_index(arena_t *arena, arena_bin_t *bin);
|
szind_t arena_bin_index(arena_t *arena, arena_bin_t *bin);
|
||||||
size_t arena_run_regind(arena_run_t *run, const arena_bin_info_t *bin_info,
|
size_t arena_run_regind(arena_run_t *run, const arena_bin_info_t *bin_info,
|
||||||
const void *ptr);
|
const void *ptr);
|
||||||
@ -647,7 +648,7 @@ void arena_decay_ticks(tsdn_t *tsdn, arena_t *arena, unsigned nticks);
|
|||||||
void arena_decay_tick(tsdn_t *tsdn, arena_t *arena);
|
void arena_decay_tick(tsdn_t *tsdn, arena_t *arena);
|
||||||
void *arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind,
|
void *arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind,
|
||||||
bool zero, tcache_t *tcache, bool slow_path);
|
bool zero, tcache_t *tcache, bool slow_path);
|
||||||
arena_t *arena_aalloc(const void *ptr);
|
arena_t *arena_aalloc(tsdn_t *tsdn, const void *ptr);
|
||||||
size_t arena_salloc(tsdn_t *tsdn, const extent_t *extent, const void *ptr,
|
size_t arena_salloc(tsdn_t *tsdn, const extent_t *extent, const void *ptr,
|
||||||
bool demote);
|
bool demote);
|
||||||
void arena_dalloc(tsdn_t *tsdn, extent_t *extent, void *ptr,
|
void arena_dalloc(tsdn_t *tsdn, extent_t *extent, void *ptr,
|
||||||
@ -1049,7 +1050,7 @@ arena_prof_accum(tsdn_t *tsdn, arena_t *arena, uint64_t accumbytes)
|
|||||||
|
|
||||||
# ifdef JEMALLOC_ARENA_INLINE_B
|
# ifdef JEMALLOC_ARENA_INLINE_B
|
||||||
JEMALLOC_ALWAYS_INLINE szind_t
|
JEMALLOC_ALWAYS_INLINE szind_t
|
||||||
arena_ptr_small_binind_get(const void *ptr, size_t mapbits)
|
arena_ptr_small_binind_get(tsdn_t *tsdn, const void *ptr, size_t mapbits)
|
||||||
{
|
{
|
||||||
szind_t binind;
|
szind_t binind;
|
||||||
|
|
||||||
@ -1071,7 +1072,7 @@ arena_ptr_small_binind_get(const void *ptr, size_t mapbits)
|
|||||||
|
|
||||||
assert(binind != BININD_INVALID);
|
assert(binind != BININD_INVALID);
|
||||||
assert(binind < NBINS);
|
assert(binind < NBINS);
|
||||||
extent = iealloc(ptr);
|
extent = iealloc(tsdn, ptr);
|
||||||
chunk = (arena_chunk_t *)extent_addr_get(extent);
|
chunk = (arena_chunk_t *)extent_addr_get(extent);
|
||||||
arena = extent_arena_get(extent);
|
arena = extent_arena_get(extent);
|
||||||
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||||
@ -1314,10 +1315,10 @@ arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind, bool zero,
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE arena_t *
|
JEMALLOC_ALWAYS_INLINE arena_t *
|
||||||
arena_aalloc(const void *ptr)
|
arena_aalloc(tsdn_t *tsdn, const void *ptr)
|
||||||
{
|
{
|
||||||
|
|
||||||
return (extent_arena_get(iealloc(ptr)));
|
return (extent_arena_get(iealloc(tsdn, ptr)));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Return the size of the allocation pointed to by ptr. */
|
/* Return the size of the allocation pointed to by ptr. */
|
||||||
@ -1361,7 +1362,7 @@ arena_salloc(tsdn_t *tsdn, const extent_t *extent, const void *ptr, bool demote)
|
|||||||
* object).
|
* object).
|
||||||
*/
|
*/
|
||||||
assert(arena_mapbits_large_get(chunk, pageind) != 0 ||
|
assert(arena_mapbits_large_get(chunk, pageind) != 0 ||
|
||||||
arena_ptr_small_binind_get(ptr,
|
arena_ptr_small_binind_get(tsdn, ptr,
|
||||||
arena_mapbits_get(chunk, pageind)) == binind);
|
arena_mapbits_get(chunk, pageind)) == binind);
|
||||||
ret = index2size(binind);
|
ret = index2size(binind);
|
||||||
}
|
}
|
||||||
@ -1389,7 +1390,8 @@ arena_dalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
|||||||
if (likely((mapbits & CHUNK_MAP_LARGE) == 0)) {
|
if (likely((mapbits & CHUNK_MAP_LARGE) == 0)) {
|
||||||
/* Small allocation. */
|
/* Small allocation. */
|
||||||
if (likely(tcache != NULL)) {
|
if (likely(tcache != NULL)) {
|
||||||
szind_t binind = arena_ptr_small_binind_get(ptr,
|
szind_t binind =
|
||||||
|
arena_ptr_small_binind_get(tsdn, ptr,
|
||||||
mapbits);
|
mapbits);
|
||||||
tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr,
|
tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr,
|
||||||
binind, slow_path);
|
binind, slow_path);
|
||||||
|
@ -53,7 +53,8 @@ chunk_hooks_t chunk_hooks_set(tsdn_t *tsdn, arena_t *arena,
|
|||||||
const chunk_hooks_t *chunk_hooks);
|
const chunk_hooks_t *chunk_hooks);
|
||||||
|
|
||||||
bool chunk_register(tsdn_t *tsdn, const void *chunk, const extent_t *extent);
|
bool chunk_register(tsdn_t *tsdn, const void *chunk, const extent_t *extent);
|
||||||
void chunk_deregister(const void *chunk, const extent_t *extent);
|
void chunk_deregister(tsdn_t *tsdn, const void *chunk,
|
||||||
|
const extent_t *extent);
|
||||||
void chunk_reregister(tsdn_t *tsdn, const void *chunk,
|
void chunk_reregister(tsdn_t *tsdn, const void *chunk,
|
||||||
const extent_t *extent);
|
const extent_t *extent);
|
||||||
void *chunk_alloc_base(size_t size);
|
void *chunk_alloc_base(size_t size);
|
||||||
@ -81,15 +82,15 @@ void chunk_postfork_child(tsdn_t *tsdn);
|
|||||||
#ifdef JEMALLOC_H_INLINES
|
#ifdef JEMALLOC_H_INLINES
|
||||||
|
|
||||||
#ifndef JEMALLOC_ENABLE_INLINE
|
#ifndef JEMALLOC_ENABLE_INLINE
|
||||||
extent_t *chunk_lookup(const void *chunk, bool dependent);
|
extent_t *chunk_lookup(tsdn_t *tsdn, const void *chunk, bool dependent);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_CHUNK_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_CHUNK_C_))
|
||||||
JEMALLOC_INLINE extent_t *
|
JEMALLOC_INLINE extent_t *
|
||||||
chunk_lookup(const void *ptr, bool dependent)
|
chunk_lookup(tsdn_t *tsdn, const void *ptr, bool dependent)
|
||||||
{
|
{
|
||||||
|
|
||||||
return (rtree_read(&chunks_rtree, (uintptr_t)ptr, dependent));
|
return (rtree_read(tsdn, &chunks_rtree, (uintptr_t)ptr, dependent));
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -961,15 +961,15 @@ decay_ticker_get(tsd_t *tsd, unsigned ind)
|
|||||||
#undef JEMALLOC_ARENA_INLINE_A
|
#undef JEMALLOC_ARENA_INLINE_A
|
||||||
|
|
||||||
#ifndef JEMALLOC_ENABLE_INLINE
|
#ifndef JEMALLOC_ENABLE_INLINE
|
||||||
extent_t *iealloc(const void *ptr);
|
extent_t *iealloc(tsdn_t *tsdn, const void *ptr);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
|
||||||
JEMALLOC_ALWAYS_INLINE extent_t *
|
JEMALLOC_ALWAYS_INLINE extent_t *
|
||||||
iealloc(const void *ptr)
|
iealloc(tsdn_t *tsdn, const void *ptr)
|
||||||
{
|
{
|
||||||
|
|
||||||
return (chunk_lookup(ptr, true));
|
return (chunk_lookup(tsdn, ptr, true));
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@ -980,8 +980,7 @@ iealloc(const void *ptr)
|
|||||||
#include "jemalloc/internal/hash.h"
|
#include "jemalloc/internal/hash.h"
|
||||||
|
|
||||||
#ifndef JEMALLOC_ENABLE_INLINE
|
#ifndef JEMALLOC_ENABLE_INLINE
|
||||||
extent_t *iealloc(const void *ptr);
|
arena_t *iaalloc(tsdn_t *tsdn, const void *ptr);
|
||||||
arena_t *iaalloc(const void *ptr);
|
|
||||||
size_t isalloc(tsdn_t *tsdn, const extent_t *extent, const void *ptr,
|
size_t isalloc(tsdn_t *tsdn, const extent_t *extent, const void *ptr,
|
||||||
bool demote);
|
bool demote);
|
||||||
void *iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero,
|
void *iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero,
|
||||||
@ -1012,19 +1011,19 @@ bool ixalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
|
|||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
|
||||||
JEMALLOC_ALWAYS_INLINE arena_t *
|
JEMALLOC_ALWAYS_INLINE arena_t *
|
||||||
iaalloc(const void *ptr)
|
iaalloc(tsdn_t *tsdn, const void *ptr)
|
||||||
{
|
{
|
||||||
|
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
|
|
||||||
return (arena_aalloc(ptr));
|
return (arena_aalloc(tsdn, ptr));
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Typical usage:
|
* Typical usage:
|
||||||
* tsdn_t *tsdn = [...]
|
* tsdn_t *tsdn = [...]
|
||||||
* void *ptr = [...]
|
* void *ptr = [...]
|
||||||
* extent_t *extent = iealloc(ptr);
|
* extent_t *extent = iealloc(tsdn, ptr);
|
||||||
* size_t sz = isalloc(tsdn, extent, ptr, config_prof);
|
* size_t sz = isalloc(tsdn, extent, ptr, config_prof);
|
||||||
*/
|
*/
|
||||||
JEMALLOC_ALWAYS_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
@ -1050,8 +1049,8 @@ iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero, tcache_t *tcache,
|
|||||||
|
|
||||||
ret = arena_malloc(tsdn, arena, size, ind, zero, tcache, slow_path);
|
ret = arena_malloc(tsdn, arena, size, ind, zero, tcache, slow_path);
|
||||||
if (config_stats && is_metadata && likely(ret != NULL)) {
|
if (config_stats && is_metadata && likely(ret != NULL)) {
|
||||||
arena_metadata_allocated_add(iaalloc(ret), isalloc(tsdn,
|
arena_metadata_allocated_add(iaalloc(tsdn, ret), isalloc(tsdn,
|
||||||
iealloc(ret), ret, config_prof));
|
iealloc(tsdn, ret), ret, config_prof));
|
||||||
}
|
}
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
@ -1078,8 +1077,8 @@ ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
|
|||||||
ret = arena_palloc(tsdn, arena, usize, alignment, zero, tcache);
|
ret = arena_palloc(tsdn, arena, usize, alignment, zero, tcache);
|
||||||
assert(ALIGNMENT_ADDR2BASE(ret, alignment) == ret);
|
assert(ALIGNMENT_ADDR2BASE(ret, alignment) == ret);
|
||||||
if (config_stats && is_metadata && likely(ret != NULL)) {
|
if (config_stats && is_metadata && likely(ret != NULL)) {
|
||||||
arena_metadata_allocated_add(iaalloc(ret), isalloc(tsdn,
|
arena_metadata_allocated_add(iaalloc(tsdn, ret), isalloc(tsdn,
|
||||||
iealloc(ret), ret, config_prof));
|
iealloc(tsdn, ret), ret, config_prof));
|
||||||
}
|
}
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
@ -1106,7 +1105,7 @@ ivsalloc(tsdn_t *tsdn, const void *ptr, bool demote)
|
|||||||
extent_t *extent;
|
extent_t *extent;
|
||||||
|
|
||||||
/* Return 0 if ptr is not within a chunk managed by jemalloc. */
|
/* Return 0 if ptr is not within a chunk managed by jemalloc. */
|
||||||
extent = chunk_lookup(ptr, false);
|
extent = chunk_lookup(tsdn, ptr, false);
|
||||||
if (extent == NULL)
|
if (extent == NULL)
|
||||||
return (0);
|
return (0);
|
||||||
/* Only arena chunks should be looked up via interior pointers. */
|
/* Only arena chunks should be looked up via interior pointers. */
|
||||||
@ -1123,10 +1122,10 @@ idalloctm(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
|||||||
|
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
assert(!is_metadata || tcache == NULL);
|
assert(!is_metadata || tcache == NULL);
|
||||||
assert(!is_metadata || iaalloc(ptr)->ind < narenas_auto);
|
assert(!is_metadata || iaalloc(tsdn, ptr)->ind < narenas_auto);
|
||||||
if (config_stats && is_metadata) {
|
if (config_stats && is_metadata) {
|
||||||
arena_metadata_allocated_sub(iaalloc(ptr), isalloc(tsdn, extent,
|
arena_metadata_allocated_sub(iaalloc(tsdn, ptr), isalloc(tsdn,
|
||||||
ptr, config_prof));
|
extent, ptr, config_prof));
|
||||||
}
|
}
|
||||||
|
|
||||||
arena_dalloc(tsdn, extent, ptr, tcache, slow_path);
|
arena_dalloc(tsdn, extent, ptr, tcache, slow_path);
|
||||||
|
@ -460,6 +460,8 @@ rtree_child_tryread
|
|||||||
rtree_clear
|
rtree_clear
|
||||||
rtree_delete
|
rtree_delete
|
||||||
rtree_new
|
rtree_new
|
||||||
|
rtree_node_alloc
|
||||||
|
rtree_node_dalloc
|
||||||
rtree_node_valid
|
rtree_node_valid
|
||||||
rtree_elm_acquire
|
rtree_elm_acquire
|
||||||
rtree_elm_lookup
|
rtree_elm_lookup
|
||||||
|
@ -23,13 +23,6 @@ typedef struct rtree_s rtree_t;
|
|||||||
/* Used for two-stage lock-free node initialization. */
|
/* Used for two-stage lock-free node initialization. */
|
||||||
#define RTREE_NODE_INITIALIZING ((rtree_elm_t *)0x1)
|
#define RTREE_NODE_INITIALIZING ((rtree_elm_t *)0x1)
|
||||||
|
|
||||||
/*
|
|
||||||
* The node allocation callback function's argument is the number of contiguous
|
|
||||||
* rtree_elm_t structures to allocate, and the resulting memory must be zeroed.
|
|
||||||
*/
|
|
||||||
typedef rtree_elm_t *(rtree_node_alloc_t)(size_t);
|
|
||||||
typedef void (rtree_node_dalloc_t)(rtree_elm_t *);
|
|
||||||
|
|
||||||
#endif /* JEMALLOC_H_TYPES */
|
#endif /* JEMALLOC_H_TYPES */
|
||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
#ifdef JEMALLOC_H_STRUCTS
|
#ifdef JEMALLOC_H_STRUCTS
|
||||||
@ -79,8 +72,6 @@ struct rtree_level_s {
|
|||||||
};
|
};
|
||||||
|
|
||||||
struct rtree_s {
|
struct rtree_s {
|
||||||
rtree_node_alloc_t *alloc;
|
|
||||||
rtree_node_dalloc_t *dalloc;
|
|
||||||
unsigned height;
|
unsigned height;
|
||||||
/*
|
/*
|
||||||
* Precomputed table used to convert from the number of leading 0 key
|
* Precomputed table used to convert from the number of leading 0 key
|
||||||
@ -94,12 +85,18 @@ struct rtree_s {
|
|||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
#ifdef JEMALLOC_H_EXTERNS
|
#ifdef JEMALLOC_H_EXTERNS
|
||||||
|
|
||||||
bool rtree_new(rtree_t *rtree, unsigned bits, rtree_node_alloc_t *alloc,
|
bool rtree_new(rtree_t *rtree, unsigned bits);
|
||||||
rtree_node_dalloc_t *dalloc);
|
#ifdef JEMALLOC_JET
|
||||||
void rtree_delete(rtree_t *rtree);
|
typedef rtree_elm_t *(rtree_node_alloc_t)(tsdn_t *, rtree_t *, size_t);
|
||||||
rtree_elm_t *rtree_subtree_read_hard(rtree_t *rtree, unsigned level);
|
extern rtree_node_alloc_t *rtree_node_alloc;
|
||||||
rtree_elm_t *rtree_child_read_hard(rtree_t *rtree, rtree_elm_t *elm,
|
typedef void (rtree_node_dalloc_t)(tsdn_t *, rtree_t *, rtree_elm_t *);
|
||||||
|
extern rtree_node_dalloc_t *rtree_node_dalloc;
|
||||||
|
void rtree_delete(tsdn_t *tsdn, rtree_t *rtree);
|
||||||
|
#endif
|
||||||
|
rtree_elm_t *rtree_subtree_read_hard(tsdn_t *tsdn, rtree_t *rtree,
|
||||||
unsigned level);
|
unsigned level);
|
||||||
|
rtree_elm_t *rtree_child_read_hard(tsdn_t *tsdn, rtree_t *rtree,
|
||||||
|
rtree_elm_t *elm, unsigned level);
|
||||||
|
|
||||||
#endif /* JEMALLOC_H_EXTERNS */
|
#endif /* JEMALLOC_H_EXTERNS */
|
||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
@ -111,25 +108,27 @@ uintptr_t rtree_subkey(rtree_t *rtree, uintptr_t key, unsigned level);
|
|||||||
|
|
||||||
bool rtree_node_valid(rtree_elm_t *node);
|
bool rtree_node_valid(rtree_elm_t *node);
|
||||||
rtree_elm_t *rtree_child_tryread(rtree_elm_t *elm, bool dependent);
|
rtree_elm_t *rtree_child_tryread(rtree_elm_t *elm, bool dependent);
|
||||||
rtree_elm_t *rtree_child_read(rtree_t *rtree, rtree_elm_t *elm,
|
rtree_elm_t *rtree_child_read(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *elm,
|
||||||
unsigned level, bool dependent);
|
unsigned level, bool dependent);
|
||||||
extent_t *rtree_elm_read(rtree_elm_t *elm, bool dependent);
|
extent_t *rtree_elm_read(rtree_elm_t *elm, bool dependent);
|
||||||
void rtree_elm_write(rtree_elm_t *elm, const extent_t *extent);
|
void rtree_elm_write(rtree_elm_t *elm, const extent_t *extent);
|
||||||
rtree_elm_t *rtree_subtree_tryread(rtree_t *rtree, unsigned level,
|
rtree_elm_t *rtree_subtree_tryread(rtree_t *rtree, unsigned level,
|
||||||
bool dependent);
|
bool dependent);
|
||||||
rtree_elm_t *rtree_subtree_read(rtree_t *rtree, unsigned level,
|
rtree_elm_t *rtree_subtree_read(tsdn_t *tsdn, rtree_t *rtree,
|
||||||
bool dependent);
|
unsigned level, bool dependent);
|
||||||
rtree_elm_t *rtree_elm_lookup(rtree_t *rtree, uintptr_t key,
|
rtree_elm_t *rtree_elm_lookup(tsdn_t *tsdn, rtree_t *rtree, uintptr_t key,
|
||||||
bool dependent, bool init_missing);
|
bool dependent, bool init_missing);
|
||||||
|
|
||||||
bool rtree_write(rtree_t *rtree, uintptr_t key, const extent_t *extent);
|
bool rtree_write(tsdn_t *tsdn, rtree_t *rtree, uintptr_t key,
|
||||||
extent_t *rtree_read(rtree_t *rtree, uintptr_t key, bool dependent);
|
const extent_t *extent);
|
||||||
rtree_elm_t *rtree_elm_acquire(rtree_t *rtree, uintptr_t key,
|
extent_t *rtree_read(tsdn_t *tsdn, rtree_t *rtree, uintptr_t key,
|
||||||
|
bool dependent);
|
||||||
|
rtree_elm_t *rtree_elm_acquire(tsdn_t *tsdn, rtree_t *rtree, uintptr_t key,
|
||||||
bool dependent, bool init_missing);
|
bool dependent, bool init_missing);
|
||||||
extent_t *rtree_elm_read_acquired(rtree_elm_t *elm);
|
extent_t *rtree_elm_read_acquired(rtree_elm_t *elm);
|
||||||
void rtree_elm_write_acquired(rtree_elm_t *elm, const extent_t *extent);
|
void rtree_elm_write_acquired(rtree_elm_t *elm, const extent_t *extent);
|
||||||
void rtree_elm_release(rtree_elm_t *elm);
|
void rtree_elm_release(rtree_elm_t *elm);
|
||||||
void rtree_clear(rtree_t *rtree, uintptr_t key);
|
void rtree_clear(tsdn_t *tsdn, rtree_t *rtree, uintptr_t key);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_RTREE_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_RTREE_C_))
|
||||||
@ -177,14 +176,14 @@ rtree_child_tryread(rtree_elm_t *elm, bool dependent)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE rtree_elm_t *
|
JEMALLOC_ALWAYS_INLINE rtree_elm_t *
|
||||||
rtree_child_read(rtree_t *rtree, rtree_elm_t *elm, unsigned level,
|
rtree_child_read(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *elm, unsigned level,
|
||||||
bool dependent)
|
bool dependent)
|
||||||
{
|
{
|
||||||
rtree_elm_t *child;
|
rtree_elm_t *child;
|
||||||
|
|
||||||
child = rtree_child_tryread(elm, dependent);
|
child = rtree_child_tryread(elm, dependent);
|
||||||
if (!dependent && unlikely(!rtree_node_valid(child)))
|
if (!dependent && unlikely(!rtree_node_valid(child)))
|
||||||
child = rtree_child_read_hard(rtree, elm, level);
|
child = rtree_child_read_hard(tsdn, rtree, elm, level);
|
||||||
assert(!dependent || child != NULL);
|
assert(!dependent || child != NULL);
|
||||||
return (child);
|
return (child);
|
||||||
}
|
}
|
||||||
@ -238,19 +237,19 @@ rtree_subtree_tryread(rtree_t *rtree, unsigned level, bool dependent)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE rtree_elm_t *
|
JEMALLOC_ALWAYS_INLINE rtree_elm_t *
|
||||||
rtree_subtree_read(rtree_t *rtree, unsigned level, bool dependent)
|
rtree_subtree_read(tsdn_t *tsdn, rtree_t *rtree, unsigned level, bool dependent)
|
||||||
{
|
{
|
||||||
rtree_elm_t *subtree;
|
rtree_elm_t *subtree;
|
||||||
|
|
||||||
subtree = rtree_subtree_tryread(rtree, level, dependent);
|
subtree = rtree_subtree_tryread(rtree, level, dependent);
|
||||||
if (!dependent && unlikely(!rtree_node_valid(subtree)))
|
if (!dependent && unlikely(!rtree_node_valid(subtree)))
|
||||||
subtree = rtree_subtree_read_hard(rtree, level);
|
subtree = rtree_subtree_read_hard(tsdn, rtree, level);
|
||||||
assert(!dependent || subtree != NULL);
|
assert(!dependent || subtree != NULL);
|
||||||
return (subtree);
|
return (subtree);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE rtree_elm_t *
|
JEMALLOC_ALWAYS_INLINE rtree_elm_t *
|
||||||
rtree_elm_lookup(rtree_t *rtree, uintptr_t key, bool dependent,
|
rtree_elm_lookup(tsdn_t *tsdn, rtree_t *rtree, uintptr_t key, bool dependent,
|
||||||
bool init_missing)
|
bool init_missing)
|
||||||
{
|
{
|
||||||
uintptr_t subkey;
|
uintptr_t subkey;
|
||||||
@ -261,8 +260,8 @@ rtree_elm_lookup(rtree_t *rtree, uintptr_t key, bool dependent,
|
|||||||
|
|
||||||
start_level = rtree_start_level(rtree, key);
|
start_level = rtree_start_level(rtree, key);
|
||||||
|
|
||||||
node = init_missing ? rtree_subtree_read(rtree, start_level, dependent)
|
node = init_missing ? rtree_subtree_read(tsdn, rtree, start_level,
|
||||||
: rtree_subtree_tryread(rtree, start_level, dependent);
|
dependent) : rtree_subtree_tryread(rtree, start_level, dependent);
|
||||||
#define RTREE_GET_BIAS (RTREE_HEIGHT_MAX - rtree->height)
|
#define RTREE_GET_BIAS (RTREE_HEIGHT_MAX - rtree->height)
|
||||||
switch (start_level + RTREE_GET_BIAS) {
|
switch (start_level + RTREE_GET_BIAS) {
|
||||||
#define RTREE_GET_SUBTREE(level) \
|
#define RTREE_GET_SUBTREE(level) \
|
||||||
@ -272,7 +271,7 @@ rtree_elm_lookup(rtree_t *rtree, uintptr_t key, bool dependent,
|
|||||||
return (NULL); \
|
return (NULL); \
|
||||||
subkey = rtree_subkey(rtree, key, level - \
|
subkey = rtree_subkey(rtree, key, level - \
|
||||||
RTREE_GET_BIAS); \
|
RTREE_GET_BIAS); \
|
||||||
node = init_missing ? rtree_child_read(rtree, \
|
node = init_missing ? rtree_child_read(tsdn, rtree, \
|
||||||
&node[subkey], level - RTREE_GET_BIAS, dependent) : \
|
&node[subkey], level - RTREE_GET_BIAS, dependent) : \
|
||||||
rtree_child_tryread(&node[subkey], dependent); \
|
rtree_child_tryread(&node[subkey], dependent); \
|
||||||
/* Fall through. */
|
/* Fall through. */
|
||||||
@ -346,14 +345,14 @@ rtree_elm_lookup(rtree_t *rtree, uintptr_t key, bool dependent,
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
rtree_write(rtree_t *rtree, uintptr_t key, const extent_t *extent)
|
rtree_write(tsdn_t *tsdn, rtree_t *rtree, uintptr_t key, const extent_t *extent)
|
||||||
{
|
{
|
||||||
rtree_elm_t *elm;
|
rtree_elm_t *elm;
|
||||||
|
|
||||||
assert(extent != NULL); /* Use rtree_clear() for this case. */
|
assert(extent != NULL); /* Use rtree_clear() for this case. */
|
||||||
assert(((uintptr_t)extent & (uintptr_t)0x1) == (uintptr_t)0x0);
|
assert(((uintptr_t)extent & (uintptr_t)0x1) == (uintptr_t)0x0);
|
||||||
|
|
||||||
elm = rtree_elm_lookup(rtree, key, false, true);
|
elm = rtree_elm_lookup(tsdn, rtree, key, false, true);
|
||||||
if (elm == NULL)
|
if (elm == NULL)
|
||||||
return (true);
|
return (true);
|
||||||
assert(rtree_elm_read(elm, false) == NULL);
|
assert(rtree_elm_read(elm, false) == NULL);
|
||||||
@ -363,11 +362,11 @@ rtree_write(rtree_t *rtree, uintptr_t key, const extent_t *extent)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE extent_t *
|
JEMALLOC_ALWAYS_INLINE extent_t *
|
||||||
rtree_read(rtree_t *rtree, uintptr_t key, bool dependent)
|
rtree_read(tsdn_t *tsdn, rtree_t *rtree, uintptr_t key, bool dependent)
|
||||||
{
|
{
|
||||||
rtree_elm_t *elm;
|
rtree_elm_t *elm;
|
||||||
|
|
||||||
elm = rtree_elm_lookup(rtree, key, dependent, false);
|
elm = rtree_elm_lookup(tsdn, rtree, key, dependent, false);
|
||||||
if (elm == NULL)
|
if (elm == NULL)
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
|
||||||
@ -375,12 +374,12 @@ rtree_read(rtree_t *rtree, uintptr_t key, bool dependent)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE rtree_elm_t *
|
JEMALLOC_INLINE rtree_elm_t *
|
||||||
rtree_elm_acquire(rtree_t *rtree, uintptr_t key, bool dependent,
|
rtree_elm_acquire(tsdn_t *tsdn, rtree_t *rtree, uintptr_t key, bool dependent,
|
||||||
bool init_missing)
|
bool init_missing)
|
||||||
{
|
{
|
||||||
rtree_elm_t *elm;
|
rtree_elm_t *elm;
|
||||||
|
|
||||||
elm = rtree_elm_lookup(rtree, key, dependent, init_missing);
|
elm = rtree_elm_lookup(tsdn, rtree, key, dependent, init_missing);
|
||||||
if (!dependent && elm == NULL)
|
if (!dependent && elm == NULL)
|
||||||
return (NULL);
|
return (NULL);
|
||||||
{
|
{
|
||||||
@ -427,11 +426,11 @@ rtree_elm_release(rtree_elm_t *elm)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
rtree_clear(rtree_t *rtree, uintptr_t key)
|
rtree_clear(tsdn_t *tsdn, rtree_t *rtree, uintptr_t key)
|
||||||
{
|
{
|
||||||
rtree_elm_t *elm;
|
rtree_elm_t *elm;
|
||||||
|
|
||||||
elm = rtree_elm_acquire(rtree, key, true, false);
|
elm = rtree_elm_acquire(tsdn, rtree, key, true, false);
|
||||||
rtree_elm_write_acquired(elm, NULL);
|
rtree_elm_write_acquired(elm, NULL);
|
||||||
rtree_elm_release(elm);
|
rtree_elm_release(elm);
|
||||||
}
|
}
|
||||||
|
@ -370,8 +370,8 @@ tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (config_prof && usize == LARGE_MINCLASS) {
|
if (config_prof && usize == LARGE_MINCLASS) {
|
||||||
arena_chunk_t *chunk =
|
arena_chunk_t *chunk =(arena_chunk_t *)extent_addr_get(
|
||||||
(arena_chunk_t *)extent_addr_get(iealloc(ret));
|
iealloc(tsd_tsdn(tsd), ret));
|
||||||
size_t pageind = (((uintptr_t)ret - (uintptr_t)chunk) >>
|
size_t pageind = (((uintptr_t)ret - (uintptr_t)chunk) >>
|
||||||
LG_PAGE);
|
LG_PAGE);
|
||||||
arena_mapbits_large_binind_set(chunk, pageind,
|
arena_mapbits_large_binind_set(chunk, pageind,
|
||||||
|
59
src/arena.c
59
src/arena.c
@ -264,12 +264,13 @@ arena_run_reg_alloc(arena_run_t *run, const arena_bin_info_t *bin_info)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE_C void
|
JEMALLOC_INLINE_C void
|
||||||
arena_run_reg_dalloc(arena_run_t *run, extent_t *extent, void *ptr)
|
arena_run_reg_dalloc(tsdn_t *tsdn, arena_run_t *run, extent_t *extent,
|
||||||
|
void *ptr)
|
||||||
{
|
{
|
||||||
arena_chunk_t *chunk = (arena_chunk_t *)extent_addr_get(extent);
|
arena_chunk_t *chunk = (arena_chunk_t *)extent_addr_get(extent);
|
||||||
size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||||
size_t mapbits = arena_mapbits_get(chunk, pageind);
|
size_t mapbits = arena_mapbits_get(chunk, pageind);
|
||||||
szind_t binind = arena_ptr_small_binind_get(ptr, mapbits);
|
szind_t binind = arena_ptr_small_binind_get(tsdn, ptr, mapbits);
|
||||||
const arena_bin_info_t *bin_info = &arena_bin_info[binind];
|
const arena_bin_info_t *bin_info = &arena_bin_info[binind];
|
||||||
size_t regind = arena_run_regind(run, bin_info, ptr);
|
size_t regind = arena_run_regind(run, bin_info, ptr);
|
||||||
|
|
||||||
@ -665,7 +666,7 @@ arena_chunk_discard(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk)
|
|||||||
bool committed;
|
bool committed;
|
||||||
chunk_hooks_t chunk_hooks = CHUNK_HOOKS_INITIALIZER;
|
chunk_hooks_t chunk_hooks = CHUNK_HOOKS_INITIALIZER;
|
||||||
|
|
||||||
chunk_deregister(chunk, &chunk->extent);
|
chunk_deregister(tsdn, chunk, &chunk->extent);
|
||||||
|
|
||||||
committed = (arena_mapbits_decommitted_get(chunk, map_bias) == 0);
|
committed = (arena_mapbits_decommitted_get(chunk, map_bias) == 0);
|
||||||
if (!committed) {
|
if (!committed) {
|
||||||
@ -1037,11 +1038,13 @@ arena_run_first_best_fit(arena_t *arena, size_t size)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static arena_run_t *
|
static arena_run_t *
|
||||||
arena_run_alloc_large_helper(arena_t *arena, size_t size, bool zero)
|
arena_run_alloc_large_helper(tsdn_t *tsdn, arena_t *arena, size_t size,
|
||||||
|
bool zero)
|
||||||
{
|
{
|
||||||
arena_run_t *run = arena_run_first_best_fit(arena, s2u(size));
|
arena_run_t *run = arena_run_first_best_fit(arena, s2u(size));
|
||||||
if (run != NULL) {
|
if (run != NULL) {
|
||||||
if (arena_run_split_large(arena, iealloc(run), run, size, zero))
|
if (arena_run_split_large(arena, iealloc(tsdn, run), run, size,
|
||||||
|
zero))
|
||||||
run = NULL;
|
run = NULL;
|
||||||
}
|
}
|
||||||
return (run);
|
return (run);
|
||||||
@ -1057,7 +1060,7 @@ arena_run_alloc_large(tsdn_t *tsdn, arena_t *arena, size_t size, bool zero)
|
|||||||
assert(size == PAGE_CEILING(size));
|
assert(size == PAGE_CEILING(size));
|
||||||
|
|
||||||
/* Search the arena's chunks for the lowest best fit. */
|
/* Search the arena's chunks for the lowest best fit. */
|
||||||
run = arena_run_alloc_large_helper(arena, size, zero);
|
run = arena_run_alloc_large_helper(tsdn, arena, size, zero);
|
||||||
if (run != NULL)
|
if (run != NULL)
|
||||||
return (run);
|
return (run);
|
||||||
|
|
||||||
@ -1067,7 +1070,8 @@ arena_run_alloc_large(tsdn_t *tsdn, arena_t *arena, size_t size, bool zero)
|
|||||||
chunk = arena_chunk_alloc(tsdn, arena);
|
chunk = arena_chunk_alloc(tsdn, arena);
|
||||||
if (chunk != NULL) {
|
if (chunk != NULL) {
|
||||||
run = &arena_miscelm_get_mutable(chunk, map_bias)->run;
|
run = &arena_miscelm_get_mutable(chunk, map_bias)->run;
|
||||||
if (arena_run_split_large(arena, iealloc(run), run, size, zero))
|
if (arena_run_split_large(arena, iealloc(tsdn, run), run, size,
|
||||||
|
zero))
|
||||||
run = NULL;
|
run = NULL;
|
||||||
return (run);
|
return (run);
|
||||||
}
|
}
|
||||||
@ -1077,15 +1081,16 @@ arena_run_alloc_large(tsdn_t *tsdn, arena_t *arena, size_t size, bool zero)
|
|||||||
* sufficient memory available while this one dropped arena->lock in
|
* sufficient memory available while this one dropped arena->lock in
|
||||||
* arena_chunk_alloc(), so search one more time.
|
* arena_chunk_alloc(), so search one more time.
|
||||||
*/
|
*/
|
||||||
return (arena_run_alloc_large_helper(arena, size, zero));
|
return (arena_run_alloc_large_helper(tsdn, arena, size, zero));
|
||||||
}
|
}
|
||||||
|
|
||||||
static arena_run_t *
|
static arena_run_t *
|
||||||
arena_run_alloc_small_helper(arena_t *arena, size_t size, szind_t binind)
|
arena_run_alloc_small_helper(tsdn_t *tsdn, arena_t *arena, size_t size,
|
||||||
|
szind_t binind)
|
||||||
{
|
{
|
||||||
arena_run_t *run = arena_run_first_best_fit(arena, size);
|
arena_run_t *run = arena_run_first_best_fit(arena, size);
|
||||||
if (run != NULL) {
|
if (run != NULL) {
|
||||||
if (arena_run_split_small(arena, iealloc(run), run, size,
|
if (arena_run_split_small(arena, iealloc(tsdn, run), run, size,
|
||||||
binind))
|
binind))
|
||||||
run = NULL;
|
run = NULL;
|
||||||
}
|
}
|
||||||
@ -1103,7 +1108,7 @@ arena_run_alloc_small(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t binind)
|
|||||||
assert(binind != BININD_INVALID);
|
assert(binind != BININD_INVALID);
|
||||||
|
|
||||||
/* Search the arena's chunks for the lowest best fit. */
|
/* Search the arena's chunks for the lowest best fit. */
|
||||||
run = arena_run_alloc_small_helper(arena, size, binind);
|
run = arena_run_alloc_small_helper(tsdn, arena, size, binind);
|
||||||
if (run != NULL)
|
if (run != NULL)
|
||||||
return (run);
|
return (run);
|
||||||
|
|
||||||
@ -1113,7 +1118,7 @@ arena_run_alloc_small(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t binind)
|
|||||||
chunk = arena_chunk_alloc(tsdn, arena);
|
chunk = arena_chunk_alloc(tsdn, arena);
|
||||||
if (chunk != NULL) {
|
if (chunk != NULL) {
|
||||||
run = &arena_miscelm_get_mutable(chunk, map_bias)->run;
|
run = &arena_miscelm_get_mutable(chunk, map_bias)->run;
|
||||||
if (arena_run_split_small(arena, iealloc(run), run, size,
|
if (arena_run_split_small(arena, iealloc(tsdn, run), run, size,
|
||||||
binind))
|
binind))
|
||||||
run = NULL;
|
run = NULL;
|
||||||
return (run);
|
return (run);
|
||||||
@ -1124,7 +1129,7 @@ arena_run_alloc_small(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t binind)
|
|||||||
* sufficient memory available while this one dropped arena->lock in
|
* sufficient memory available while this one dropped arena->lock in
|
||||||
* arena_chunk_alloc(), so search one more time.
|
* arena_chunk_alloc(), so search one more time.
|
||||||
*/
|
*/
|
||||||
return (arena_run_alloc_small_helper(arena, size, binind));
|
return (arena_run_alloc_small_helper(tsdn, arena, size, binind));
|
||||||
}
|
}
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
@ -1426,7 +1431,7 @@ arena_maybe_purge(tsdn_t *tsdn, arena_t *arena)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static size_t
|
static size_t
|
||||||
arena_dirty_count(arena_t *arena)
|
arena_dirty_count(tsdn_t *tsdn, arena_t *arena)
|
||||||
{
|
{
|
||||||
size_t ndirty = 0;
|
size_t ndirty = 0;
|
||||||
arena_runs_dirty_link_t *rdelm;
|
arena_runs_dirty_link_t *rdelm;
|
||||||
@ -1441,7 +1446,7 @@ arena_dirty_count(arena_t *arena)
|
|||||||
npages = extent_size_get(chunkselm) >> LG_PAGE;
|
npages = extent_size_get(chunkselm) >> LG_PAGE;
|
||||||
chunkselm = qr_next(chunkselm, cc_link);
|
chunkselm = qr_next(chunkselm, cc_link);
|
||||||
} else {
|
} else {
|
||||||
extent_t *extent = iealloc(rdelm);
|
extent_t *extent = iealloc(tsdn, rdelm);
|
||||||
arena_chunk_t *chunk =
|
arena_chunk_t *chunk =
|
||||||
(arena_chunk_t *)extent_addr_get(extent);
|
(arena_chunk_t *)extent_addr_get(extent);
|
||||||
arena_chunk_map_misc_t *miscelm =
|
arena_chunk_map_misc_t *miscelm =
|
||||||
@ -1504,7 +1509,7 @@ arena_stash_dirty(tsdn_t *tsdn, arena_t *arena, chunk_hooks_t *chunk_hooks,
|
|||||||
LG_PAGE));
|
LG_PAGE));
|
||||||
chunkselm = chunkselm_next;
|
chunkselm = chunkselm_next;
|
||||||
} else {
|
} else {
|
||||||
extent_t *extent = iealloc(rdelm);
|
extent_t *extent = iealloc(tsdn, rdelm);
|
||||||
arena_chunk_t *chunk =
|
arena_chunk_t *chunk =
|
||||||
(arena_chunk_t *)extent_addr_get(extent);
|
(arena_chunk_t *)extent_addr_get(extent);
|
||||||
arena_chunk_map_misc_t *miscelm =
|
arena_chunk_map_misc_t *miscelm =
|
||||||
@ -1586,7 +1591,7 @@ arena_purge_stashed(tsdn_t *tsdn, arena_t *arena, chunk_hooks_t *chunk_hooks,
|
|||||||
} else {
|
} else {
|
||||||
size_t pageind, run_size, flag_unzeroed, flags, i;
|
size_t pageind, run_size, flag_unzeroed, flags, i;
|
||||||
bool decommitted;
|
bool decommitted;
|
||||||
extent_t *extent = iealloc(rdelm);
|
extent_t *extent = iealloc(tsdn, rdelm);
|
||||||
arena_chunk_t *chunk =
|
arena_chunk_t *chunk =
|
||||||
(arena_chunk_t *)extent_addr_get(extent);
|
(arena_chunk_t *)extent_addr_get(extent);
|
||||||
arena_chunk_map_misc_t *miscelm =
|
arena_chunk_map_misc_t *miscelm =
|
||||||
@ -1671,7 +1676,7 @@ arena_unstash_purged(tsdn_t *tsdn, arena_t *arena, chunk_hooks_t *chunk_hooks,
|
|||||||
chunk_dalloc_wrapper(tsdn, arena, chunk_hooks, addr,
|
chunk_dalloc_wrapper(tsdn, arena, chunk_hooks, addr,
|
||||||
size, zeroed, committed);
|
size, zeroed, committed);
|
||||||
} else {
|
} else {
|
||||||
extent_t *extent = iealloc(rdelm);
|
extent_t *extent = iealloc(tsdn, rdelm);
|
||||||
arena_chunk_t *chunk =
|
arena_chunk_t *chunk =
|
||||||
(arena_chunk_t *)extent_addr_get(extent);
|
(arena_chunk_t *)extent_addr_get(extent);
|
||||||
arena_chunk_map_misc_t *miscelm =
|
arena_chunk_map_misc_t *miscelm =
|
||||||
@ -1711,7 +1716,7 @@ arena_purge_to_limit(tsdn_t *tsdn, arena_t *arena, size_t ndirty_limit)
|
|||||||
* because overhead grows nonlinearly as memory usage increases.
|
* because overhead grows nonlinearly as memory usage increases.
|
||||||
*/
|
*/
|
||||||
if (false && config_debug) {
|
if (false && config_debug) {
|
||||||
size_t ndirty = arena_dirty_count(arena);
|
size_t ndirty = arena_dirty_count(tsdn, arena);
|
||||||
assert(ndirty == arena->ndirty);
|
assert(ndirty == arena->ndirty);
|
||||||
}
|
}
|
||||||
assert(opt_purge != purge_mode_ratio || (arena->nactive >>
|
assert(opt_purge != purge_mode_ratio || (arena->nactive >>
|
||||||
@ -2276,7 +2281,7 @@ arena_bin_malloc_hard(tsdn_t *tsdn, arena_t *arena, arena_bin_t *bin)
|
|||||||
* arena_bin_lower_run() must be called, as if a region
|
* arena_bin_lower_run() must be called, as if a region
|
||||||
* were just deallocated from the run.
|
* were just deallocated from the run.
|
||||||
*/
|
*/
|
||||||
extent = iealloc(run);
|
extent = iealloc(tsdn, run);
|
||||||
chunk = (arena_chunk_t *)extent_addr_get(extent);
|
chunk = (arena_chunk_t *)extent_addr_get(extent);
|
||||||
if (run->nfree == bin_info->nregs) {
|
if (run->nfree == bin_info->nregs) {
|
||||||
arena_dalloc_bin_run(tsdn, arena, chunk, extent,
|
arena_dalloc_bin_run(tsdn, arena, chunk, extent,
|
||||||
@ -2537,7 +2542,7 @@ arena_palloc_large(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment,
|
|||||||
malloc_mutex_unlock(tsdn, &arena->lock);
|
malloc_mutex_unlock(tsdn, &arena->lock);
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
extent = iealloc(run);
|
extent = iealloc(tsdn, run);
|
||||||
chunk = (arena_chunk_t *)extent_addr_get(extent);
|
chunk = (arena_chunk_t *)extent_addr_get(extent);
|
||||||
miscelm = arena_run_to_miscelm(run);
|
miscelm = arena_run_to_miscelm(run);
|
||||||
rpages = arena_miscelm_to_rpages(miscelm);
|
rpages = arena_miscelm_to_rpages(miscelm);
|
||||||
@ -2555,7 +2560,7 @@ arena_palloc_large(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment,
|
|||||||
arena_miscelm_to_pageind(head_miscelm) + (leadsize >>
|
arena_miscelm_to_pageind(head_miscelm) + (leadsize >>
|
||||||
LG_PAGE));
|
LG_PAGE));
|
||||||
run = &miscelm->run;
|
run = &miscelm->run;
|
||||||
extent = iealloc(run);
|
extent = iealloc(tsdn, run);
|
||||||
|
|
||||||
arena_run_trim_head(tsdn, arena, chunk, head_extent, head_run,
|
arena_run_trim_head(tsdn, arena, chunk, head_extent, head_run,
|
||||||
alloc_size, alloc_size - leadsize);
|
alloc_size, alloc_size - leadsize);
|
||||||
@ -2745,7 +2750,7 @@ arena_dalloc_bin_locked_impl(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
|||||||
if (!junked && config_fill && unlikely(opt_junk_free))
|
if (!junked && config_fill && unlikely(opt_junk_free))
|
||||||
arena_dalloc_junk_small(ptr, bin_info);
|
arena_dalloc_junk_small(ptr, bin_info);
|
||||||
|
|
||||||
arena_run_reg_dalloc(run, extent, ptr);
|
arena_run_reg_dalloc(tsdn, run, extent, ptr);
|
||||||
if (run->nfree == bin_info->nregs) {
|
if (run->nfree == bin_info->nregs) {
|
||||||
arena_dissociate_bin_run(extent, run, bin);
|
arena_dissociate_bin_run(extent, run, bin);
|
||||||
arena_dalloc_bin_run(tsdn, arena, chunk, extent, run, bin);
|
arena_dalloc_bin_run(tsdn, arena, chunk, extent, run, bin);
|
||||||
@ -2793,8 +2798,8 @@ arena_dalloc_small(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
|||||||
|
|
||||||
if (config_debug) {
|
if (config_debug) {
|
||||||
/* arena_ptr_small_binind_get() does extra sanity checking. */
|
/* arena_ptr_small_binind_get() does extra sanity checking. */
|
||||||
assert(arena_ptr_small_binind_get(ptr, arena_mapbits_get(chunk,
|
assert(arena_ptr_small_binind_get(tsdn, ptr,
|
||||||
pageind)) != BININD_INVALID);
|
arena_mapbits_get(chunk, pageind)) != BININD_INVALID);
|
||||||
}
|
}
|
||||||
bitselm = arena_bitselm_get_mutable(chunk, pageind);
|
bitselm = arena_bitselm_get_mutable(chunk, pageind);
|
||||||
arena_dalloc_bin(tsdn, arena, chunk, extent, ptr, pageind, bitselm);
|
arena_dalloc_bin(tsdn, arena, chunk, extent, ptr, pageind, bitselm);
|
||||||
@ -2939,8 +2944,8 @@ arena_ralloc_large_grow(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
|||||||
goto label_fail;
|
goto label_fail;
|
||||||
|
|
||||||
run = &arena_miscelm_get_mutable(chunk, pageind+npages)->run;
|
run = &arena_miscelm_get_mutable(chunk, pageind+npages)->run;
|
||||||
if (arena_run_split_large(arena, iealloc(run), run, splitsize,
|
if (arena_run_split_large(arena, iealloc(tsdn, run), run,
|
||||||
zero))
|
splitsize, zero))
|
||||||
goto label_fail;
|
goto label_fail;
|
||||||
|
|
||||||
if (config_cache_oblivious && zero) {
|
if (config_cache_oblivious && zero) {
|
||||||
|
17
src/chunk.c
17
src/chunk.c
@ -146,8 +146,9 @@ chunk_register(tsdn_t *tsdn, const void *chunk, const extent_t *extent)
|
|||||||
|
|
||||||
assert(extent_addr_get(extent) == chunk);
|
assert(extent_addr_get(extent) == chunk);
|
||||||
|
|
||||||
if (rtree_write(&chunks_rtree, (uintptr_t)chunk, extent))
|
if (rtree_write(tsdn, &chunks_rtree, (uintptr_t)chunk, extent))
|
||||||
return (true);
|
return (true);
|
||||||
|
|
||||||
if (config_prof && opt_prof) {
|
if (config_prof && opt_prof) {
|
||||||
size_t size = extent_size_get(extent);
|
size_t size = extent_size_get(extent);
|
||||||
size_t nadd = (size == 0) ? 1 : size / chunksize;
|
size_t nadd = (size == 0) ? 1 : size / chunksize;
|
||||||
@ -168,10 +169,10 @@ chunk_register(tsdn_t *tsdn, const void *chunk, const extent_t *extent)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
chunk_deregister(const void *chunk, const extent_t *extent)
|
chunk_deregister(tsdn_t *tsdn, const void *chunk, const extent_t *extent)
|
||||||
{
|
{
|
||||||
|
|
||||||
rtree_clear(&chunks_rtree, (uintptr_t)chunk);
|
rtree_clear(tsdn, &chunks_rtree, (uintptr_t)chunk);
|
||||||
if (config_prof && opt_prof) {
|
if (config_prof && opt_prof) {
|
||||||
size_t size = extent_size_get(extent);
|
size_t size = extent_size_get(extent);
|
||||||
size_t nsub = (size == 0) ? 1 : size / chunksize;
|
size_t nsub = (size == 0) ? 1 : size / chunksize;
|
||||||
@ -691,14 +692,6 @@ chunk_merge_default(void *chunk_a, size_t size_a, void *chunk_b, size_t size_b,
|
|||||||
return (false);
|
return (false);
|
||||||
}
|
}
|
||||||
|
|
||||||
static rtree_elm_t *
|
|
||||||
chunks_rtree_node_alloc(size_t nelms)
|
|
||||||
{
|
|
||||||
|
|
||||||
return ((rtree_elm_t *)base_alloc(tsdn_fetch(), nelms *
|
|
||||||
sizeof(rtree_elm_t)));
|
|
||||||
}
|
|
||||||
|
|
||||||
bool
|
bool
|
||||||
chunk_boot(void)
|
chunk_boot(void)
|
||||||
{
|
{
|
||||||
@ -735,7 +728,7 @@ chunk_boot(void)
|
|||||||
if (have_dss && chunk_dss_boot())
|
if (have_dss && chunk_dss_boot())
|
||||||
return (true);
|
return (true);
|
||||||
if (rtree_new(&chunks_rtree, (unsigned)((ZU(1) << (LG_SIZEOF_PTR+3)) -
|
if (rtree_new(&chunks_rtree, (unsigned)((ZU(1) << (LG_SIZEOF_PTR+3)) -
|
||||||
opt_lg_chunk), chunks_rtree_node_alloc, NULL))
|
opt_lg_chunk)))
|
||||||
return (true);
|
return (true);
|
||||||
|
|
||||||
return (false);
|
return (false);
|
||||||
|
12
src/ckh.c
12
src/ckh.c
@ -283,12 +283,14 @@ ckh_grow(tsdn_t *tsdn, ckh_t *ckh)
|
|||||||
ckh->lg_curbuckets = lg_curcells - LG_CKH_BUCKET_CELLS;
|
ckh->lg_curbuckets = lg_curcells - LG_CKH_BUCKET_CELLS;
|
||||||
|
|
||||||
if (!ckh_rebuild(ckh, tab)) {
|
if (!ckh_rebuild(ckh, tab)) {
|
||||||
idalloctm(tsdn, iealloc(tab), tab, NULL, true, true);
|
idalloctm(tsdn, iealloc(tsdn, tab), tab, NULL, true,
|
||||||
|
true);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Rebuilding failed, so back out partially rebuilt table. */
|
/* Rebuilding failed, so back out partially rebuilt table. */
|
||||||
idalloctm(tsdn, iealloc(ckh->tab), ckh->tab, NULL, true, true);
|
idalloctm(tsdn, iealloc(tsdn, ckh->tab), ckh->tab, NULL, true,
|
||||||
|
true);
|
||||||
ckh->tab = tab;
|
ckh->tab = tab;
|
||||||
ckh->lg_curbuckets = lg_prevbuckets;
|
ckh->lg_curbuckets = lg_prevbuckets;
|
||||||
}
|
}
|
||||||
@ -330,7 +332,7 @@ ckh_shrink(tsdn_t *tsdn, ckh_t *ckh)
|
|||||||
ckh->lg_curbuckets = lg_curcells - LG_CKH_BUCKET_CELLS;
|
ckh->lg_curbuckets = lg_curcells - LG_CKH_BUCKET_CELLS;
|
||||||
|
|
||||||
if (!ckh_rebuild(ckh, tab)) {
|
if (!ckh_rebuild(ckh, tab)) {
|
||||||
idalloctm(tsdn, iealloc(tab), tab, NULL, true, true);
|
idalloctm(tsdn, iealloc(tsdn, tab), tab, NULL, true, true);
|
||||||
#ifdef CKH_COUNT
|
#ifdef CKH_COUNT
|
||||||
ckh->nshrinks++;
|
ckh->nshrinks++;
|
||||||
#endif
|
#endif
|
||||||
@ -338,7 +340,7 @@ ckh_shrink(tsdn_t *tsdn, ckh_t *ckh)
|
|||||||
}
|
}
|
||||||
|
|
||||||
/* Rebuilding failed, so back out partially rebuilt table. */
|
/* Rebuilding failed, so back out partially rebuilt table. */
|
||||||
idalloctm(tsdn, iealloc(ckh->tab), ckh->tab, NULL, true, true);
|
idalloctm(tsdn, iealloc(tsdn, ckh->tab), ckh->tab, NULL, true, true);
|
||||||
ckh->tab = tab;
|
ckh->tab = tab;
|
||||||
ckh->lg_curbuckets = lg_prevbuckets;
|
ckh->lg_curbuckets = lg_prevbuckets;
|
||||||
#ifdef CKH_COUNT
|
#ifdef CKH_COUNT
|
||||||
@ -421,7 +423,7 @@ ckh_delete(tsdn_t *tsdn, ckh_t *ckh)
|
|||||||
(unsigned long long)ckh->nrelocs);
|
(unsigned long long)ckh->nrelocs);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
idalloctm(tsdn, iealloc(ckh->tab), ckh->tab, NULL, true, true);
|
idalloctm(tsdn, iealloc(tsdn, ckh->tab), ckh->tab, NULL, true, true);
|
||||||
if (config_debug)
|
if (config_debug)
|
||||||
memset(ckh, JEMALLOC_FREE_JUNK, sizeof(ckh_t));
|
memset(ckh, JEMALLOC_FREE_JUNK, sizeof(ckh_t));
|
||||||
}
|
}
|
||||||
|
18
src/huge.c
18
src/huge.c
@ -45,7 +45,8 @@ huge_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment,
|
|||||||
arena = arena_choose(tsdn_tsd(tsdn), arena);
|
arena = arena_choose(tsdn_tsd(tsdn), arena);
|
||||||
if (unlikely(arena == NULL) || (ret = arena_chunk_alloc_huge(tsdn,
|
if (unlikely(arena == NULL) || (ret = arena_chunk_alloc_huge(tsdn,
|
||||||
arena, usize, alignment, &is_zeroed)) == NULL) {
|
arena, usize, alignment, &is_zeroed)) == NULL) {
|
||||||
idalloctm(tsdn, iealloc(extent), extent, NULL, true, true);
|
idalloctm(tsdn, iealloc(tsdn, extent), extent, NULL, true,
|
||||||
|
true);
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -53,7 +54,8 @@ huge_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment,
|
|||||||
|
|
||||||
if (chunk_register(tsdn, ret, extent)) {
|
if (chunk_register(tsdn, ret, extent)) {
|
||||||
arena_chunk_dalloc_huge(tsdn, arena, ret, usize);
|
arena_chunk_dalloc_huge(tsdn, arena, ret, usize);
|
||||||
idalloctm(tsdn, iealloc(extent), extent, NULL, true, true);
|
idalloctm(tsdn, iealloc(tsdn, extent), extent, NULL, true,
|
||||||
|
true);
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -194,7 +196,7 @@ huge_ralloc_no_move_shrink(tsdn_t *tsdn, extent_t *extent, void *ptr,
|
|||||||
post_zeroed = pre_zeroed;
|
post_zeroed = pre_zeroed;
|
||||||
|
|
||||||
/* Update the size of the huge allocation. */
|
/* Update the size of the huge allocation. */
|
||||||
chunk_deregister(ptr, extent);
|
chunk_deregister(tsdn, ptr, extent);
|
||||||
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
||||||
extent_size_set(extent, usize);
|
extent_size_set(extent, usize);
|
||||||
/* Update zeroed. */
|
/* Update zeroed. */
|
||||||
@ -231,7 +233,7 @@ huge_ralloc_no_move_expand(tsdn_t *tsdn, extent_t *extent, void *ptr,
|
|||||||
return (true);
|
return (true);
|
||||||
|
|
||||||
/* Update the size of the huge allocation. */
|
/* Update the size of the huge allocation. */
|
||||||
chunk_deregister(ptr, extent);
|
chunk_deregister(tsdn, ptr, extent);
|
||||||
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
||||||
extent_size_set(extent, usize);
|
extent_size_set(extent, usize);
|
||||||
malloc_mutex_unlock(tsdn, &arena->huge_mtx);
|
malloc_mutex_unlock(tsdn, &arena->huge_mtx);
|
||||||
@ -353,7 +355,7 @@ huge_dalloc(tsdn_t *tsdn, extent_t *extent, void *ptr)
|
|||||||
arena_t *arena;
|
arena_t *arena;
|
||||||
|
|
||||||
arena = extent_arena_get(extent);
|
arena = extent_arena_get(extent);
|
||||||
chunk_deregister(ptr, extent);
|
chunk_deregister(tsdn, ptr, extent);
|
||||||
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
||||||
ql_remove(&arena->huge, extent, ql_link);
|
ql_remove(&arena->huge, extent, ql_link);
|
||||||
malloc_mutex_unlock(tsdn, &arena->huge_mtx);
|
malloc_mutex_unlock(tsdn, &arena->huge_mtx);
|
||||||
@ -362,7 +364,7 @@ huge_dalloc(tsdn_t *tsdn, extent_t *extent, void *ptr)
|
|||||||
extent_size_get(extent));
|
extent_size_get(extent));
|
||||||
arena_chunk_dalloc_huge(tsdn, extent_arena_get(extent),
|
arena_chunk_dalloc_huge(tsdn, extent_arena_get(extent),
|
||||||
extent_addr_get(extent), extent_size_get(extent));
|
extent_addr_get(extent), extent_size_get(extent));
|
||||||
idalloctm(tsdn, iealloc(extent), extent, NULL, true, true);
|
idalloctm(tsdn, iealloc(tsdn, extent), extent, NULL, true, true);
|
||||||
|
|
||||||
arena_decay_tick(tsdn, arena);
|
arena_decay_tick(tsdn, arena);
|
||||||
}
|
}
|
||||||
@ -387,7 +389,7 @@ huge_prof_tctx_get(tsdn_t *tsdn, const extent_t *extent, const void *ptr)
|
|||||||
prof_tctx_t *tctx;
|
prof_tctx_t *tctx;
|
||||||
arena_t *arena;
|
arena_t *arena;
|
||||||
|
|
||||||
assert(extent == iealloc(ptr));
|
assert(extent == iealloc(tsdn, ptr));
|
||||||
|
|
||||||
arena = extent_arena_get(extent);
|
arena = extent_arena_get(extent);
|
||||||
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
||||||
@ -403,7 +405,7 @@ huge_prof_tctx_set(tsdn_t *tsdn, extent_t *extent, const void *ptr,
|
|||||||
{
|
{
|
||||||
arena_t *arena;
|
arena_t *arena;
|
||||||
|
|
||||||
assert(extent == iealloc(ptr));
|
assert(extent == iealloc(tsdn, ptr));
|
||||||
|
|
||||||
arena = extent_arena_get(extent);
|
arena = extent_arena_get(extent);
|
||||||
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
||||||
|
@ -325,7 +325,7 @@ void
|
|||||||
a0dalloc(void *ptr)
|
a0dalloc(void *ptr)
|
||||||
{
|
{
|
||||||
|
|
||||||
a0idalloc(iealloc(ptr), ptr, true);
|
a0idalloc(iealloc(NULL, ptr), ptr, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -365,7 +365,7 @@ bootstrap_free(void *ptr)
|
|||||||
if (unlikely(ptr == NULL))
|
if (unlikely(ptr == NULL))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
a0idalloc(iealloc(ptr), ptr, false);
|
a0idalloc(iealloc(NULL, ptr), ptr, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
@ -1401,7 +1401,8 @@ ialloc_prof_sample(tsd_t *tsd, size_t usize, szind_t ind, bool zero,
|
|||||||
p = ialloc(tsd, LARGE_MINCLASS, ind_large, zero, slow_path);
|
p = ialloc(tsd, LARGE_MINCLASS, ind_large, zero, slow_path);
|
||||||
if (p == NULL)
|
if (p == NULL)
|
||||||
return (NULL);
|
return (NULL);
|
||||||
arena_prof_promoted(tsd_tsdn(tsd), iealloc(p), p, usize);
|
arena_prof_promoted(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), p), p,
|
||||||
|
usize);
|
||||||
} else
|
} else
|
||||||
p = ialloc(tsd, usize, ind, zero, slow_path);
|
p = ialloc(tsd, usize, ind, zero, slow_path);
|
||||||
|
|
||||||
@ -1423,7 +1424,7 @@ ialloc_prof(tsd_t *tsd, size_t usize, szind_t ind, bool zero, bool slow_path)
|
|||||||
prof_alloc_rollback(tsd, tctx, true);
|
prof_alloc_rollback(tsd, tctx, true);
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
prof_malloc(tsd_tsdn(tsd), iealloc(p), p, usize, tctx);
|
prof_malloc(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), p), p, usize, tctx);
|
||||||
|
|
||||||
return (p);
|
return (p);
|
||||||
}
|
}
|
||||||
@ -1482,7 +1483,8 @@ ialloc_post_check(void *ret, tsdn_t *tsdn, size_t usize, const char *func,
|
|||||||
set_errno(ENOMEM);
|
set_errno(ENOMEM);
|
||||||
}
|
}
|
||||||
if (config_stats && likely(ret != NULL)) {
|
if (config_stats && likely(ret != NULL)) {
|
||||||
assert(usize == isalloc(tsdn, iealloc(ret), ret, config_prof));
|
assert(usize == isalloc(tsdn, iealloc(tsdn, ret), ret,
|
||||||
|
config_prof));
|
||||||
*tsd_thread_allocatedp_get(tsdn_tsd(tsdn)) += usize;
|
*tsd_thread_allocatedp_get(tsdn_tsd(tsdn)) += usize;
|
||||||
}
|
}
|
||||||
witness_assert_lockless(tsdn);
|
witness_assert_lockless(tsdn);
|
||||||
@ -1525,7 +1527,8 @@ imemalign_prof_sample(tsd_t *tsd, size_t alignment, size_t usize,
|
|||||||
p = ipalloc(tsd, LARGE_MINCLASS, alignment, false);
|
p = ipalloc(tsd, LARGE_MINCLASS, alignment, false);
|
||||||
if (p == NULL)
|
if (p == NULL)
|
||||||
return (NULL);
|
return (NULL);
|
||||||
arena_prof_promoted(tsd_tsdn(tsd), iealloc(p), p, usize);
|
arena_prof_promoted(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), p), p,
|
||||||
|
usize);
|
||||||
} else
|
} else
|
||||||
p = ipalloc(tsd, usize, alignment, false);
|
p = ipalloc(tsd, usize, alignment, false);
|
||||||
|
|
||||||
@ -1547,7 +1550,7 @@ imemalign_prof(tsd_t *tsd, size_t alignment, size_t usize)
|
|||||||
prof_alloc_rollback(tsd, tctx, true);
|
prof_alloc_rollback(tsd, tctx, true);
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
prof_malloc(tsd_tsdn(tsd), iealloc(p), p, usize, tctx);
|
prof_malloc(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), p), p, usize, tctx);
|
||||||
|
|
||||||
return (p);
|
return (p);
|
||||||
}
|
}
|
||||||
@ -1604,8 +1607,8 @@ imemalign(void **memptr, size_t alignment, size_t size, size_t min_alignment)
|
|||||||
ret = 0;
|
ret = 0;
|
||||||
label_return:
|
label_return:
|
||||||
if (config_stats && likely(result != NULL)) {
|
if (config_stats && likely(result != NULL)) {
|
||||||
assert(usize == isalloc(tsd_tsdn(tsd), iealloc(result), result,
|
assert(usize == isalloc(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd),
|
||||||
config_prof));
|
result), result, config_prof));
|
||||||
*tsd_thread_allocatedp_get(tsd) += usize;
|
*tsd_thread_allocatedp_get(tsd) += usize;
|
||||||
}
|
}
|
||||||
UTRACE(0, size, result);
|
UTRACE(0, size, result);
|
||||||
@ -1696,7 +1699,8 @@ irealloc_prof_sample(tsd_t *tsd, extent_t *extent, void *old_ptr,
|
|||||||
false);
|
false);
|
||||||
if (p == NULL)
|
if (p == NULL)
|
||||||
return (NULL);
|
return (NULL);
|
||||||
arena_prof_promoted(tsd_tsdn(tsd), iealloc(p), p, usize);
|
arena_prof_promoted(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), p), p,
|
||||||
|
usize);
|
||||||
} else
|
} else
|
||||||
p = iralloc(tsd, extent, old_ptr, old_usize, usize, 0, false);
|
p = iralloc(tsd, extent, old_ptr, old_usize, usize, 0, false);
|
||||||
|
|
||||||
@ -1724,7 +1728,7 @@ irealloc_prof(tsd_t *tsd, extent_t *extent, void *old_ptr, size_t old_usize,
|
|||||||
prof_alloc_rollback(tsd, tctx, true);
|
prof_alloc_rollback(tsd, tctx, true);
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
e = (p == old_ptr) ? extent : iealloc(p);
|
e = (p == old_ptr) ? extent : iealloc(tsd_tsdn(tsd), p);
|
||||||
prof_realloc(tsd, e, p, usize, tctx, prof_active, true,
|
prof_realloc(tsd, e, p, usize, tctx, prof_active, true,
|
||||||
old_ptr, old_usize, old_tctx);
|
old_ptr, old_usize, old_tctx);
|
||||||
|
|
||||||
@ -1742,7 +1746,7 @@ ifree(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path)
|
|||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
assert(malloc_initialized() || IS_INITIALIZER);
|
assert(malloc_initialized() || IS_INITIALIZER);
|
||||||
|
|
||||||
extent = iealloc(ptr);
|
extent = iealloc(tsd_tsdn(tsd), ptr);
|
||||||
if (config_prof && opt_prof) {
|
if (config_prof && opt_prof) {
|
||||||
usize = isalloc(tsd_tsdn(tsd), extent, ptr, config_prof);
|
usize = isalloc(tsd_tsdn(tsd), extent, ptr, config_prof);
|
||||||
prof_free(tsd, extent, ptr, usize);
|
prof_free(tsd, extent, ptr, usize);
|
||||||
@ -1810,9 +1814,8 @@ je_realloc(void *ptr, size_t size)
|
|||||||
|
|
||||||
witness_assert_lockless(tsd_tsdn(tsd));
|
witness_assert_lockless(tsd_tsdn(tsd));
|
||||||
|
|
||||||
extent = iealloc(ptr);
|
extent = iealloc(tsd_tsdn(tsd), ptr);
|
||||||
old_usize = isalloc(tsd_tsdn(tsd), extent, ptr, config_prof);
|
old_usize = isalloc(tsd_tsdn(tsd), extent, ptr, config_prof);
|
||||||
|
|
||||||
if (config_prof && opt_prof) {
|
if (config_prof && opt_prof) {
|
||||||
usize = s2u(size);
|
usize = s2u(size);
|
||||||
ret = unlikely(usize == 0 || usize > HUGE_MAXCLASS) ?
|
ret = unlikely(usize == 0 || usize > HUGE_MAXCLASS) ?
|
||||||
@ -1845,7 +1848,8 @@ je_realloc(void *ptr, size_t size)
|
|||||||
if (config_stats && likely(ret != NULL)) {
|
if (config_stats && likely(ret != NULL)) {
|
||||||
tsd_t *tsd;
|
tsd_t *tsd;
|
||||||
|
|
||||||
assert(usize == isalloc(tsdn, iealloc(ret), ret, config_prof));
|
assert(usize == isalloc(tsdn, iealloc(tsdn, ret), ret,
|
||||||
|
config_prof));
|
||||||
tsd = tsdn_tsd(tsdn);
|
tsd = tsdn_tsd(tsdn);
|
||||||
*tsd_thread_allocatedp_get(tsd) += usize;
|
*tsd_thread_allocatedp_get(tsd) += usize;
|
||||||
*tsd_thread_deallocatedp_get(tsd) += old_usize;
|
*tsd_thread_deallocatedp_get(tsd) += old_usize;
|
||||||
@ -1999,7 +2003,7 @@ imallocx_prof_sample(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
|
|||||||
tcache, arena, slow_path);
|
tcache, arena, slow_path);
|
||||||
if (p == NULL)
|
if (p == NULL)
|
||||||
return (NULL);
|
return (NULL);
|
||||||
arena_prof_promoted(tsdn, iealloc(p), p, usize);
|
arena_prof_promoted(tsdn, iealloc(tsdn, p), p, usize);
|
||||||
} else
|
} else
|
||||||
p = imallocx_flags(tsdn, usize, alignment, zero, tcache, arena,
|
p = imallocx_flags(tsdn, usize, alignment, zero, tcache, arena,
|
||||||
slow_path);
|
slow_path);
|
||||||
@ -2033,7 +2037,7 @@ imallocx_prof(tsd_t *tsd, size_t size, int flags, size_t *usize, bool slow_path)
|
|||||||
prof_alloc_rollback(tsd, tctx, true);
|
prof_alloc_rollback(tsd, tctx, true);
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
prof_malloc(tsd_tsdn(tsd), iealloc(p), p, *usize, tctx);
|
prof_malloc(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), p), p, *usize, tctx);
|
||||||
|
|
||||||
assert(alignment == 0 || ((uintptr_t)p & (alignment - 1)) == ZU(0));
|
assert(alignment == 0 || ((uintptr_t)p & (alignment - 1)) == ZU(0));
|
||||||
return (p);
|
return (p);
|
||||||
@ -2134,7 +2138,7 @@ irallocx_prof_sample(tsdn_t *tsdn, extent_t *extent, void *old_ptr,
|
|||||||
alignment, zero, tcache, arena);
|
alignment, zero, tcache, arena);
|
||||||
if (p == NULL)
|
if (p == NULL)
|
||||||
return (NULL);
|
return (NULL);
|
||||||
arena_prof_promoted(tsdn, iealloc(p), p, usize);
|
arena_prof_promoted(tsdn, iealloc(tsdn, p), p, usize);
|
||||||
} else {
|
} else {
|
||||||
p = iralloct(tsdn, extent, old_ptr, old_usize, usize, alignment,
|
p = iralloct(tsdn, extent, old_ptr, old_usize, usize, alignment,
|
||||||
zero, tcache, arena);
|
zero, tcache, arena);
|
||||||
@ -2180,7 +2184,7 @@ irallocx_prof(tsd_t *tsd, extent_t *extent, void *old_ptr, size_t old_usize,
|
|||||||
e = extent;
|
e = extent;
|
||||||
*usize = isalloc(tsd_tsdn(tsd), e, p, config_prof);
|
*usize = isalloc(tsd_tsdn(tsd), e, p, config_prof);
|
||||||
} else
|
} else
|
||||||
e = iealloc(p);
|
e = iealloc(tsd_tsdn(tsd), p);
|
||||||
prof_realloc(tsd, e, p, *usize, tctx, prof_active, true, old_ptr,
|
prof_realloc(tsd, e, p, *usize, tctx, prof_active, true, old_ptr,
|
||||||
old_usize, old_tctx);
|
old_usize, old_tctx);
|
||||||
|
|
||||||
@ -2207,7 +2211,7 @@ je_rallocx(void *ptr, size_t size, int flags)
|
|||||||
assert(malloc_initialized() || IS_INITIALIZER);
|
assert(malloc_initialized() || IS_INITIALIZER);
|
||||||
tsd = tsd_fetch();
|
tsd = tsd_fetch();
|
||||||
witness_assert_lockless(tsd_tsdn(tsd));
|
witness_assert_lockless(tsd_tsdn(tsd));
|
||||||
extent = iealloc(ptr);
|
extent = iealloc(tsd_tsdn(tsd), ptr);
|
||||||
|
|
||||||
if (unlikely((flags & MALLOCX_ARENA_MASK) != 0)) {
|
if (unlikely((flags & MALLOCX_ARENA_MASK) != 0)) {
|
||||||
unsigned arena_ind = MALLOCX_ARENA_GET(flags);
|
unsigned arena_ind = MALLOCX_ARENA_GET(flags);
|
||||||
@ -2241,8 +2245,8 @@ je_rallocx(void *ptr, size_t size, int flags)
|
|||||||
if (unlikely(p == NULL))
|
if (unlikely(p == NULL))
|
||||||
goto label_oom;
|
goto label_oom;
|
||||||
if (config_stats) {
|
if (config_stats) {
|
||||||
usize = isalloc(tsd_tsdn(tsd), iealloc(p), p,
|
usize = isalloc(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd),
|
||||||
config_prof);
|
p), p, config_prof);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assert(alignment == 0 || ((uintptr_t)p & (alignment - 1)) == ZU(0));
|
assert(alignment == 0 || ((uintptr_t)p & (alignment - 1)) == ZU(0));
|
||||||
@ -2357,7 +2361,7 @@ je_xallocx(void *ptr, size_t size, size_t extra, int flags)
|
|||||||
assert(malloc_initialized() || IS_INITIALIZER);
|
assert(malloc_initialized() || IS_INITIALIZER);
|
||||||
tsd = tsd_fetch();
|
tsd = tsd_fetch();
|
||||||
witness_assert_lockless(tsd_tsdn(tsd));
|
witness_assert_lockless(tsd_tsdn(tsd));
|
||||||
extent = iealloc(ptr);
|
extent = iealloc(tsd_tsdn(tsd), ptr);
|
||||||
|
|
||||||
old_usize = isalloc(tsd_tsdn(tsd), extent, ptr, config_prof);
|
old_usize = isalloc(tsd_tsdn(tsd), extent, ptr, config_prof);
|
||||||
|
|
||||||
@ -2412,7 +2416,7 @@ je_sallocx(const void *ptr, int flags)
|
|||||||
if (config_ivsalloc)
|
if (config_ivsalloc)
|
||||||
usize = ivsalloc(tsdn, ptr, config_prof);
|
usize = ivsalloc(tsdn, ptr, config_prof);
|
||||||
else
|
else
|
||||||
usize = isalloc(tsdn, iealloc(ptr), ptr, config_prof);
|
usize = isalloc(tsdn, iealloc(tsdn, ptr), ptr, config_prof);
|
||||||
|
|
||||||
witness_assert_lockless(tsdn);
|
witness_assert_lockless(tsdn);
|
||||||
return (usize);
|
return (usize);
|
||||||
@ -2471,7 +2475,7 @@ je_sdallocx(void *ptr, size_t size, int flags)
|
|||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
assert(malloc_initialized() || IS_INITIALIZER);
|
assert(malloc_initialized() || IS_INITIALIZER);
|
||||||
tsd = tsd_fetch();
|
tsd = tsd_fetch();
|
||||||
extent = iealloc(ptr);
|
extent = iealloc(tsd_tsdn(tsd), ptr);
|
||||||
usize = inallocx(tsd_tsdn(tsd), size, flags);
|
usize = inallocx(tsd_tsdn(tsd), size, flags);
|
||||||
assert(usize == isalloc(tsd_tsdn(tsd), extent, ptr, config_prof));
|
assert(usize == isalloc(tsd_tsdn(tsd), extent, ptr, config_prof));
|
||||||
|
|
||||||
@ -2591,7 +2595,7 @@ je_malloc_usable_size(JEMALLOC_USABLE_SIZE_CONST void *ptr)
|
|||||||
if (config_ivsalloc)
|
if (config_ivsalloc)
|
||||||
ret = ivsalloc(tsdn, ptr, config_prof);
|
ret = ivsalloc(tsdn, ptr, config_prof);
|
||||||
else {
|
else {
|
||||||
ret = (ptr == NULL) ? 0 : isalloc(tsdn, iealloc(ptr), ptr,
|
ret = (ptr == NULL) ? 0 : isalloc(tsdn, iealloc(tsdn, ptr), ptr,
|
||||||
config_prof);
|
config_prof);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
30
src/prof.c
30
src/prof.c
@ -596,7 +596,8 @@ prof_gctx_try_destroy(tsd_t *tsd, prof_tdata_t *tdata_self, prof_gctx_t *gctx,
|
|||||||
prof_leave(tsd, tdata_self);
|
prof_leave(tsd, tdata_self);
|
||||||
/* Destroy gctx. */
|
/* Destroy gctx. */
|
||||||
malloc_mutex_unlock(tsd_tsdn(tsd), gctx->lock);
|
malloc_mutex_unlock(tsd_tsdn(tsd), gctx->lock);
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(gctx), gctx, NULL, true, true);
|
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), gctx), gctx,
|
||||||
|
NULL, true, true);
|
||||||
} else {
|
} else {
|
||||||
/*
|
/*
|
||||||
* Compensate for increment in prof_tctx_destroy() or
|
* Compensate for increment in prof_tctx_destroy() or
|
||||||
@ -707,7 +708,8 @@ prof_tctx_destroy(tsd_t *tsd, prof_tctx_t *tctx)
|
|||||||
prof_tdata_destroy(tsd_tsdn(tsd), tdata, false);
|
prof_tdata_destroy(tsd_tsdn(tsd), tdata, false);
|
||||||
|
|
||||||
if (destroy_tctx)
|
if (destroy_tctx)
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(tctx), tctx, NULL, true, true);
|
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), tctx), tctx,
|
||||||
|
NULL, true, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
@ -736,8 +738,8 @@ prof_lookup_global(tsd_t *tsd, prof_bt_t *bt, prof_tdata_t *tdata,
|
|||||||
if (ckh_insert(tsd_tsdn(tsd), &bt2gctx, btkey.v, gctx.v)) {
|
if (ckh_insert(tsd_tsdn(tsd), &bt2gctx, btkey.v, gctx.v)) {
|
||||||
/* OOM. */
|
/* OOM. */
|
||||||
prof_leave(tsd, tdata);
|
prof_leave(tsd, tdata);
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(gctx.v), gctx.v, NULL,
|
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), gctx.v),
|
||||||
true, true);
|
gctx.v, NULL, true, true);
|
||||||
return (true);
|
return (true);
|
||||||
}
|
}
|
||||||
new_gctx = true;
|
new_gctx = true;
|
||||||
@ -817,8 +819,8 @@ prof_lookup(tsd_t *tsd, prof_bt_t *bt)
|
|||||||
if (error) {
|
if (error) {
|
||||||
if (new_gctx)
|
if (new_gctx)
|
||||||
prof_gctx_try_destroy(tsd, tdata, gctx, tdata);
|
prof_gctx_try_destroy(tsd, tdata, gctx, tdata);
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(ret.v), ret.v, NULL,
|
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), ret.v),
|
||||||
true, true);
|
ret.v, NULL, true, true);
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
malloc_mutex_lock(tsd_tsdn(tsd), gctx->lock);
|
malloc_mutex_lock(tsd_tsdn(tsd), gctx->lock);
|
||||||
@ -1241,8 +1243,8 @@ prof_gctx_finish(tsd_t *tsd, prof_gctx_tree_t *gctxs)
|
|||||||
tctx_tree_remove(&gctx->tctxs,
|
tctx_tree_remove(&gctx->tctxs,
|
||||||
to_destroy);
|
to_destroy);
|
||||||
idalloctm(tsd_tsdn(tsd),
|
idalloctm(tsd_tsdn(tsd),
|
||||||
iealloc(to_destroy), to_destroy,
|
iealloc(tsd_tsdn(tsd), to_destroy),
|
||||||
NULL, true, true);
|
to_destroy, NULL, true, true);
|
||||||
} else
|
} else
|
||||||
next = NULL;
|
next = NULL;
|
||||||
} while (next != NULL);
|
} while (next != NULL);
|
||||||
@ -1818,7 +1820,7 @@ prof_tdata_init_impl(tsdn_t *tsdn, uint64_t thr_uid, uint64_t thr_discrim,
|
|||||||
|
|
||||||
if (ckh_new(tsdn, &tdata->bt2tctx, PROF_CKH_MINITEMS,
|
if (ckh_new(tsdn, &tdata->bt2tctx, PROF_CKH_MINITEMS,
|
||||||
prof_bt_hash, prof_bt_keycomp)) {
|
prof_bt_hash, prof_bt_keycomp)) {
|
||||||
idalloctm(tsdn, iealloc(tdata), tdata, NULL, true, true);
|
idalloctm(tsdn, iealloc(tsdn, tdata), tdata, NULL, true, true);
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1882,11 +1884,11 @@ prof_tdata_destroy_locked(tsdn_t *tsdn, prof_tdata_t *tdata,
|
|||||||
assert(prof_tdata_should_destroy_unlocked(tdata, even_if_attached));
|
assert(prof_tdata_should_destroy_unlocked(tdata, even_if_attached));
|
||||||
|
|
||||||
if (tdata->thread_name != NULL) {
|
if (tdata->thread_name != NULL) {
|
||||||
idalloctm(tsdn, iealloc(tdata->thread_name), tdata->thread_name,
|
idalloctm(tsdn, iealloc(tsdn, tdata->thread_name),
|
||||||
NULL, true, true);
|
tdata->thread_name, NULL, true, true);
|
||||||
}
|
}
|
||||||
ckh_delete(tsdn, &tdata->bt2tctx);
|
ckh_delete(tsdn, &tdata->bt2tctx);
|
||||||
idalloctm(tsdn, iealloc(tdata), tdata, NULL, true, true);
|
idalloctm(tsdn, iealloc(tsdn, tdata), tdata, NULL, true, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
@ -2080,8 +2082,8 @@ prof_thread_name_set(tsd_t *tsd, const char *thread_name)
|
|||||||
return (EAGAIN);
|
return (EAGAIN);
|
||||||
|
|
||||||
if (tdata->thread_name != NULL) {
|
if (tdata->thread_name != NULL) {
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(tdata->thread_name),
|
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd),
|
||||||
tdata->thread_name, NULL, true, true);
|
tdata->thread_name), tdata->thread_name, NULL, true, true);
|
||||||
tdata->thread_name = NULL;
|
tdata->thread_name = NULL;
|
||||||
}
|
}
|
||||||
if (strlen(s) > 0)
|
if (strlen(s) > 0)
|
||||||
|
71
src/rtree.c
71
src/rtree.c
@ -13,8 +13,7 @@ hmin(unsigned ha, unsigned hb)
|
|||||||
* used.
|
* used.
|
||||||
*/
|
*/
|
||||||
bool
|
bool
|
||||||
rtree_new(rtree_t *rtree, unsigned bits, rtree_node_alloc_t *alloc,
|
rtree_new(rtree_t *rtree, unsigned bits)
|
||||||
rtree_node_dalloc_t *dalloc)
|
|
||||||
{
|
{
|
||||||
unsigned bits_in_leaf, height, i;
|
unsigned bits_in_leaf, height, i;
|
||||||
|
|
||||||
@ -32,8 +31,6 @@ rtree_new(rtree_t *rtree, unsigned bits, rtree_node_alloc_t *alloc,
|
|||||||
height = 1;
|
height = 1;
|
||||||
assert((height-1) * RTREE_BITS_PER_LEVEL + bits_in_leaf == bits);
|
assert((height-1) * RTREE_BITS_PER_LEVEL + bits_in_leaf == bits);
|
||||||
|
|
||||||
rtree->alloc = alloc;
|
|
||||||
rtree->dalloc = dalloc;
|
|
||||||
rtree->height = height;
|
rtree->height = height;
|
||||||
|
|
||||||
/* Root level. */
|
/* Root level. */
|
||||||
@ -64,8 +61,43 @@ rtree_new(rtree_t *rtree, unsigned bits, rtree_node_alloc_t *alloc,
|
|||||||
return (false);
|
return (false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef JEMALLOC_JET
|
||||||
|
#undef rtree_node_alloc
|
||||||
|
#define rtree_node_alloc JEMALLOC_N(rtree_node_alloc_impl)
|
||||||
|
#endif
|
||||||
|
static rtree_elm_t *
|
||||||
|
rtree_node_alloc(tsdn_t *tsdn, rtree_t *rtree, size_t nelms)
|
||||||
|
{
|
||||||
|
|
||||||
|
return ((rtree_elm_t *)base_alloc(tsdn, nelms * sizeof(rtree_elm_t)));
|
||||||
|
}
|
||||||
|
#ifdef JEMALLOC_JET
|
||||||
|
#undef rtree_node_alloc
|
||||||
|
#define rtree_node_alloc JEMALLOC_N(rtree_node_alloc)
|
||||||
|
rtree_node_alloc_t *rtree_node_alloc = JEMALLOC_N(rtree_node_alloc_impl);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef JEMALLOC_JET
|
||||||
|
#undef rtree_node_dalloc
|
||||||
|
#define rtree_node_dalloc JEMALLOC_N(rtree_node_dalloc_impl)
|
||||||
|
#endif
|
||||||
|
UNUSED static void
|
||||||
|
rtree_node_dalloc(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node)
|
||||||
|
{
|
||||||
|
|
||||||
|
/* Nodes are never deleted during normal operation. */
|
||||||
|
not_reached();
|
||||||
|
}
|
||||||
|
#ifdef JEMALLOC_JET
|
||||||
|
#undef rtree_node_dalloc
|
||||||
|
#define rtree_node_dalloc JEMALLOC_N(rtree_node_dalloc)
|
||||||
|
rtree_node_dalloc_t *rtree_node_dalloc = JEMALLOC_N(rtree_node_dalloc_impl);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef JEMALLOC_JET
|
||||||
static void
|
static void
|
||||||
rtree_delete_subtree(rtree_t *rtree, rtree_elm_t *node, unsigned level)
|
rtree_delete_subtree(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node,
|
||||||
|
unsigned level)
|
||||||
{
|
{
|
||||||
|
|
||||||
if (level + 1 < rtree->height) {
|
if (level + 1 < rtree->height) {
|
||||||
@ -74,27 +106,31 @@ rtree_delete_subtree(rtree_t *rtree, rtree_elm_t *node, unsigned level)
|
|||||||
nchildren = ZU(1) << rtree->levels[level].bits;
|
nchildren = ZU(1) << rtree->levels[level].bits;
|
||||||
for (i = 0; i < nchildren; i++) {
|
for (i = 0; i < nchildren; i++) {
|
||||||
rtree_elm_t *child = node[i].child;
|
rtree_elm_t *child = node[i].child;
|
||||||
if (child != NULL)
|
if (child != NULL) {
|
||||||
rtree_delete_subtree(rtree, child, level + 1);
|
rtree_delete_subtree(tsdn, rtree, child, level +
|
||||||
|
1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
rtree->dalloc(node);
|
rtree_node_dalloc(tsdn, rtree, node);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
rtree_delete(rtree_t *rtree)
|
rtree_delete(tsdn_t *tsdn, rtree_t *rtree)
|
||||||
{
|
{
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
for (i = 0; i < rtree->height; i++) {
|
for (i = 0; i < rtree->height; i++) {
|
||||||
rtree_elm_t *subtree = rtree->levels[i].subtree;
|
rtree_elm_t *subtree = rtree->levels[i].subtree;
|
||||||
if (subtree != NULL)
|
if (subtree != NULL)
|
||||||
rtree_delete_subtree(rtree, subtree, i);
|
rtree_delete_subtree(tsdn, rtree, subtree, i);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
static rtree_elm_t *
|
static rtree_elm_t *
|
||||||
rtree_node_init(rtree_t *rtree, unsigned level, rtree_elm_t **elmp)
|
rtree_node_init(tsdn_t *tsdn, rtree_t *rtree, unsigned level,
|
||||||
|
rtree_elm_t **elmp)
|
||||||
{
|
{
|
||||||
rtree_elm_t *node;
|
rtree_elm_t *node;
|
||||||
|
|
||||||
@ -108,7 +144,8 @@ rtree_node_init(rtree_t *rtree, unsigned level, rtree_elm_t **elmp)
|
|||||||
node = atomic_read_p((void **)elmp);
|
node = atomic_read_p((void **)elmp);
|
||||||
} while (node == RTREE_NODE_INITIALIZING);
|
} while (node == RTREE_NODE_INITIALIZING);
|
||||||
} else {
|
} else {
|
||||||
node = rtree->alloc(ZU(1) << rtree->levels[level].bits);
|
node = rtree_node_alloc(tsdn, rtree, ZU(1) <<
|
||||||
|
rtree->levels[level].bits);
|
||||||
if (node == NULL)
|
if (node == NULL)
|
||||||
return (NULL);
|
return (NULL);
|
||||||
atomic_write_p((void **)elmp, node);
|
atomic_write_p((void **)elmp, node);
|
||||||
@ -118,15 +155,17 @@ rtree_node_init(rtree_t *rtree, unsigned level, rtree_elm_t **elmp)
|
|||||||
}
|
}
|
||||||
|
|
||||||
rtree_elm_t *
|
rtree_elm_t *
|
||||||
rtree_subtree_read_hard(rtree_t *rtree, unsigned level)
|
rtree_subtree_read_hard(tsdn_t *tsdn, rtree_t *rtree, unsigned level)
|
||||||
{
|
{
|
||||||
|
|
||||||
return (rtree_node_init(rtree, level, &rtree->levels[level].subtree));
|
return (rtree_node_init(tsdn, rtree, level,
|
||||||
|
&rtree->levels[level].subtree));
|
||||||
}
|
}
|
||||||
|
|
||||||
rtree_elm_t *
|
rtree_elm_t *
|
||||||
rtree_child_read_hard(rtree_t *rtree, rtree_elm_t *elm, unsigned level)
|
rtree_child_read_hard(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *elm,
|
||||||
|
unsigned level)
|
||||||
{
|
{
|
||||||
|
|
||||||
return (rtree_node_init(rtree, level, &elm->child));
|
return (rtree_node_init(tsdn, rtree, level, &elm->child));
|
||||||
}
|
}
|
||||||
|
13
src/tcache.c
13
src/tcache.c
@ -27,7 +27,7 @@ size_t
|
|||||||
tcache_salloc(tsdn_t *tsdn, const void *ptr)
|
tcache_salloc(tsdn_t *tsdn, const void *ptr)
|
||||||
{
|
{
|
||||||
|
|
||||||
return (arena_salloc(tsdn, iealloc(ptr), ptr, false));
|
return (arena_salloc(tsdn, iealloc(tsdn, ptr), ptr, false));
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
@ -101,7 +101,7 @@ tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, tcache_bin_t *tbin,
|
|||||||
assert(arena != NULL);
|
assert(arena != NULL);
|
||||||
for (nflush = tbin->ncached - rem; nflush > 0; nflush = ndeferred) {
|
for (nflush = tbin->ncached - rem; nflush > 0; nflush = ndeferred) {
|
||||||
/* Lock the arena bin associated with the first object. */
|
/* Lock the arena bin associated with the first object. */
|
||||||
extent_t *extent = iealloc(*(tbin->avail - 1));
|
extent_t *extent = iealloc(tsd_tsdn(tsd), *(tbin->avail - 1));
|
||||||
arena_t *bin_arena = extent_arena_get(extent);
|
arena_t *bin_arena = extent_arena_get(extent);
|
||||||
arena_bin_t *bin = &bin_arena->bins[binind];
|
arena_bin_t *bin = &bin_arena->bins[binind];
|
||||||
|
|
||||||
@ -125,7 +125,7 @@ tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, tcache_bin_t *tbin,
|
|||||||
ptr = *(tbin->avail - 1 - i);
|
ptr = *(tbin->avail - 1 - i);
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
|
|
||||||
extent = iealloc(ptr);
|
extent = iealloc(tsd_tsdn(tsd), ptr);
|
||||||
if (extent_arena_get(extent) == bin_arena) {
|
if (extent_arena_get(extent) == bin_arena) {
|
||||||
arena_chunk_t *chunk =
|
arena_chunk_t *chunk =
|
||||||
(arena_chunk_t *)extent_addr_get(extent);
|
(arena_chunk_t *)extent_addr_get(extent);
|
||||||
@ -185,7 +185,7 @@ tcache_bin_flush_large(tsd_t *tsd, tcache_bin_t *tbin, szind_t binind,
|
|||||||
assert(arena != NULL);
|
assert(arena != NULL);
|
||||||
for (nflush = tbin->ncached - rem; nflush > 0; nflush = ndeferred) {
|
for (nflush = tbin->ncached - rem; nflush > 0; nflush = ndeferred) {
|
||||||
/* Lock the arena associated with the first object. */
|
/* Lock the arena associated with the first object. */
|
||||||
extent_t *extent = iealloc(*(tbin->avail - 1));
|
extent_t *extent = iealloc(tsd_tsdn(tsd), *(tbin->avail - 1));
|
||||||
arena_t *locked_arena = extent_arena_get(extent);
|
arena_t *locked_arena = extent_arena_get(extent);
|
||||||
UNUSED bool idump;
|
UNUSED bool idump;
|
||||||
|
|
||||||
@ -211,7 +211,7 @@ tcache_bin_flush_large(tsd_t *tsd, tcache_bin_t *tbin, szind_t binind,
|
|||||||
for (i = 0; i < nflush; i++) {
|
for (i = 0; i < nflush; i++) {
|
||||||
ptr = *(tbin->avail - 1 - i);
|
ptr = *(tbin->avail - 1 - i);
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
extent = iealloc(ptr);
|
extent = iealloc(tsd_tsdn(tsd), ptr);
|
||||||
if (extent_arena_get(extent) == locked_arena) {
|
if (extent_arena_get(extent) == locked_arena) {
|
||||||
arena_chunk_t *chunk =
|
arena_chunk_t *chunk =
|
||||||
(arena_chunk_t *)extent_addr_get(extent);
|
(arena_chunk_t *)extent_addr_get(extent);
|
||||||
@ -394,7 +394,8 @@ tcache_destroy(tsd_t *tsd, tcache_t *tcache)
|
|||||||
arena_prof_accum(tsd_tsdn(tsd), arena, tcache->prof_accumbytes))
|
arena_prof_accum(tsd_tsdn(tsd), arena, tcache->prof_accumbytes))
|
||||||
prof_idump(tsd_tsdn(tsd));
|
prof_idump(tsd_tsdn(tsd));
|
||||||
|
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(tcache), tcache, NULL, true, true);
|
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), tcache), tcache, NULL,
|
||||||
|
true, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
@ -1,10 +1,18 @@
|
|||||||
#include "test/jemalloc_test.h"
|
#include "test/jemalloc_test.h"
|
||||||
|
|
||||||
|
rtree_node_alloc_t *rtree_node_alloc_orig;
|
||||||
|
rtree_node_dalloc_t *rtree_node_dalloc_orig;
|
||||||
|
|
||||||
|
rtree_t *test_rtree;
|
||||||
|
|
||||||
static rtree_elm_t *
|
static rtree_elm_t *
|
||||||
node_alloc(size_t nelms)
|
rtree_node_alloc_intercept(tsdn_t *tsdn, rtree_t *rtree, size_t nelms)
|
||||||
{
|
{
|
||||||
rtree_elm_t *node;
|
rtree_elm_t *node;
|
||||||
|
|
||||||
|
if (rtree != test_rtree)
|
||||||
|
return rtree_node_alloc_orig(tsdn, rtree, nelms);
|
||||||
|
|
||||||
node = (rtree_elm_t *)calloc(nelms, sizeof(rtree_elm_t));
|
node = (rtree_elm_t *)calloc(nelms, sizeof(rtree_elm_t));
|
||||||
assert_ptr_not_null(node, "Unexpected calloc() failure");
|
assert_ptr_not_null(node, "Unexpected calloc() failure");
|
||||||
|
|
||||||
@ -12,23 +20,33 @@ node_alloc(size_t nelms)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
node_dalloc(rtree_elm_t *node)
|
rtree_node_dalloc_intercept(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node)
|
||||||
{
|
{
|
||||||
|
|
||||||
|
if (rtree != test_rtree) {
|
||||||
|
rtree_node_dalloc_orig(tsdn, rtree, node);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
free(node);
|
free(node);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_rtree_read_empty)
|
TEST_BEGIN(test_rtree_read_empty)
|
||||||
{
|
{
|
||||||
|
tsdn_t *tsdn;
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
|
tsdn = tsdn_fetch();
|
||||||
|
|
||||||
for (i = 1; i <= (sizeof(uintptr_t) << 3); i++) {
|
for (i = 1; i <= (sizeof(uintptr_t) << 3); i++) {
|
||||||
rtree_t rtree;
|
rtree_t rtree;
|
||||||
assert_false(rtree_new(&rtree, i, node_alloc, node_dalloc),
|
test_rtree = &rtree;
|
||||||
|
assert_false(rtree_new(&rtree, i),
|
||||||
"Unexpected rtree_new() failure");
|
"Unexpected rtree_new() failure");
|
||||||
assert_ptr_null(rtree_read(&rtree, 0, false),
|
assert_ptr_null(rtree_read(tsdn, &rtree, 0, false),
|
||||||
"rtree_read() should return NULL for empty tree");
|
"rtree_read() should return NULL for empty tree");
|
||||||
rtree_delete(&rtree);
|
rtree_delete(tsdn, &rtree);
|
||||||
|
test_rtree = NULL;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
@ -50,30 +68,34 @@ thd_start(void *varg)
|
|||||||
thd_start_arg_t *arg = (thd_start_arg_t *)varg;
|
thd_start_arg_t *arg = (thd_start_arg_t *)varg;
|
||||||
sfmt_t *sfmt;
|
sfmt_t *sfmt;
|
||||||
extent_t *extent;
|
extent_t *extent;
|
||||||
|
tsdn_t *tsdn;
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
sfmt = init_gen_rand(arg->seed);
|
sfmt = init_gen_rand(arg->seed);
|
||||||
extent = (extent_t *)malloc(sizeof(extent));
|
extent = (extent_t *)malloc(sizeof(extent));
|
||||||
assert_ptr_not_null(extent, "Unexpected malloc() failure");
|
assert_ptr_not_null(extent, "Unexpected malloc() failure");
|
||||||
|
tsdn = tsdn_fetch();
|
||||||
|
|
||||||
for (i = 0; i < NITERS; i++) {
|
for (i = 0; i < NITERS; i++) {
|
||||||
uintptr_t key = (uintptr_t)gen_rand64(sfmt);
|
uintptr_t key = (uintptr_t)gen_rand64(sfmt);
|
||||||
if (i % 2 == 0) {
|
if (i % 2 == 0) {
|
||||||
rtree_elm_t *elm;
|
rtree_elm_t *elm;
|
||||||
|
|
||||||
elm = rtree_elm_acquire(&arg->rtree, key, false, true);
|
elm = rtree_elm_acquire(tsdn, &arg->rtree, key, false,
|
||||||
|
true);
|
||||||
assert_ptr_not_null(elm,
|
assert_ptr_not_null(elm,
|
||||||
"Unexpected rtree_elm_acquire() failure");
|
"Unexpected rtree_elm_acquire() failure");
|
||||||
rtree_elm_write_acquired(elm, extent);
|
rtree_elm_write_acquired(elm, extent);
|
||||||
rtree_elm_release(elm);
|
rtree_elm_release(elm);
|
||||||
|
|
||||||
elm = rtree_elm_acquire(&arg->rtree, key, true, false);
|
elm = rtree_elm_acquire(tsdn, &arg->rtree, key, true,
|
||||||
|
false);
|
||||||
assert_ptr_not_null(elm,
|
assert_ptr_not_null(elm,
|
||||||
"Unexpected rtree_elm_acquire() failure");
|
"Unexpected rtree_elm_acquire() failure");
|
||||||
rtree_elm_read_acquired(elm);
|
rtree_elm_read_acquired(elm);
|
||||||
rtree_elm_release(elm);
|
rtree_elm_release(elm);
|
||||||
} else
|
} else
|
||||||
rtree_read(&arg->rtree, key, false);
|
rtree_read(tsdn, &arg->rtree, key, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
free(extent);
|
free(extent);
|
||||||
@ -86,19 +108,23 @@ TEST_BEGIN(test_rtree_concurrent)
|
|||||||
thd_start_arg_t arg;
|
thd_start_arg_t arg;
|
||||||
thd_t thds[NTHREADS];
|
thd_t thds[NTHREADS];
|
||||||
sfmt_t *sfmt;
|
sfmt_t *sfmt;
|
||||||
|
tsdn_t *tsdn;
|
||||||
unsigned i, j;
|
unsigned i, j;
|
||||||
|
|
||||||
sfmt = init_gen_rand(SEED);
|
sfmt = init_gen_rand(SEED);
|
||||||
|
tsdn = tsdn_fetch();
|
||||||
for (i = 1; i < MAX_NBITS; i++) {
|
for (i = 1; i < MAX_NBITS; i++) {
|
||||||
arg.nbits = i;
|
arg.nbits = i;
|
||||||
assert_false(rtree_new(&arg.rtree, arg.nbits, node_alloc,
|
test_rtree = &arg.rtree;
|
||||||
node_dalloc), "Unexpected rtree_new() failure");
|
assert_false(rtree_new(&arg.rtree, arg.nbits),
|
||||||
|
"Unexpected rtree_new() failure");
|
||||||
arg.seed = gen_rand32(sfmt);
|
arg.seed = gen_rand32(sfmt);
|
||||||
for (j = 0; j < NTHREADS; j++)
|
for (j = 0; j < NTHREADS; j++)
|
||||||
thd_create(&thds[j], thd_start, (void *)&arg);
|
thd_create(&thds[j], thd_start, (void *)&arg);
|
||||||
for (j = 0; j < NTHREADS; j++)
|
for (j = 0; j < NTHREADS; j++)
|
||||||
thd_join(thds[j], NULL);
|
thd_join(thds[j], NULL);
|
||||||
rtree_delete(&arg.rtree);
|
rtree_delete(tsdn, &arg.rtree);
|
||||||
|
test_rtree = NULL;
|
||||||
}
|
}
|
||||||
fini_gen_rand(sfmt);
|
fini_gen_rand(sfmt);
|
||||||
}
|
}
|
||||||
@ -113,60 +139,70 @@ TEST_BEGIN(test_rtree_extrema)
|
|||||||
{
|
{
|
||||||
unsigned i;
|
unsigned i;
|
||||||
extent_t extent_a, extent_b;
|
extent_t extent_a, extent_b;
|
||||||
|
tsdn_t *tsdn;
|
||||||
|
|
||||||
|
tsdn = tsdn_fetch();
|
||||||
|
|
||||||
for (i = 1; i <= (sizeof(uintptr_t) << 3); i++) {
|
for (i = 1; i <= (sizeof(uintptr_t) << 3); i++) {
|
||||||
rtree_t rtree;
|
rtree_t rtree;
|
||||||
assert_false(rtree_new(&rtree, i, node_alloc, node_dalloc),
|
test_rtree = &rtree;
|
||||||
|
assert_false(rtree_new(&rtree, i),
|
||||||
"Unexpected rtree_new() failure");
|
"Unexpected rtree_new() failure");
|
||||||
|
|
||||||
assert_false(rtree_write(&rtree, 0, &extent_a),
|
assert_false(rtree_write(tsdn, &rtree, 0, &extent_a),
|
||||||
"Unexpected rtree_write() failure, i=%u", i);
|
"Unexpected rtree_write() failure, i=%u", i);
|
||||||
assert_ptr_eq(rtree_read(&rtree, 0, true), &extent_a,
|
assert_ptr_eq(rtree_read(tsdn, &rtree, 0, true), &extent_a,
|
||||||
"rtree_read() should return previously set value, i=%u", i);
|
"rtree_read() should return previously set value, i=%u", i);
|
||||||
|
|
||||||
assert_false(rtree_write(&rtree, ~((uintptr_t)0), &extent_b),
|
assert_false(rtree_write(tsdn, &rtree, ~((uintptr_t)0),
|
||||||
"Unexpected rtree_write() failure, i=%u", i);
|
&extent_b), "Unexpected rtree_write() failure, i=%u", i);
|
||||||
assert_ptr_eq(rtree_read(&rtree, ~((uintptr_t)0), true),
|
assert_ptr_eq(rtree_read(tsdn, &rtree, ~((uintptr_t)0), true),
|
||||||
&extent_b,
|
&extent_b,
|
||||||
"rtree_read() should return previously set value, i=%u", i);
|
"rtree_read() should return previously set value, i=%u", i);
|
||||||
|
|
||||||
rtree_delete(&rtree);
|
rtree_delete(tsdn, &rtree);
|
||||||
|
test_rtree = NULL;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_rtree_bits)
|
TEST_BEGIN(test_rtree_bits)
|
||||||
{
|
{
|
||||||
|
tsdn_t *tsdn;
|
||||||
unsigned i, j, k;
|
unsigned i, j, k;
|
||||||
|
|
||||||
|
tsdn = tsdn_fetch();
|
||||||
|
|
||||||
for (i = 1; i < (sizeof(uintptr_t) << 3); i++) {
|
for (i = 1; i < (sizeof(uintptr_t) << 3); i++) {
|
||||||
uintptr_t keys[] = {0, 1,
|
uintptr_t keys[] = {0, 1,
|
||||||
(((uintptr_t)1) << (sizeof(uintptr_t)*8-i)) - 1};
|
(((uintptr_t)1) << (sizeof(uintptr_t)*8-i)) - 1};
|
||||||
extent_t extent;
|
extent_t extent;
|
||||||
rtree_t rtree;
|
rtree_t rtree;
|
||||||
|
|
||||||
assert_false(rtree_new(&rtree, i, node_alloc, node_dalloc),
|
test_rtree = &rtree;
|
||||||
|
assert_false(rtree_new(&rtree, i),
|
||||||
"Unexpected rtree_new() failure");
|
"Unexpected rtree_new() failure");
|
||||||
|
|
||||||
for (j = 0; j < sizeof(keys)/sizeof(uintptr_t); j++) {
|
for (j = 0; j < sizeof(keys)/sizeof(uintptr_t); j++) {
|
||||||
assert_false(rtree_write(&rtree, keys[j], &extent),
|
assert_false(rtree_write(tsdn, &rtree, keys[j],
|
||||||
"Unexpected rtree_write() failure");
|
&extent), "Unexpected rtree_write() failure");
|
||||||
for (k = 0; k < sizeof(keys)/sizeof(uintptr_t); k++) {
|
for (k = 0; k < sizeof(keys)/sizeof(uintptr_t); k++) {
|
||||||
assert_ptr_eq(rtree_read(&rtree, keys[k], true),
|
assert_ptr_eq(rtree_read(tsdn, &rtree, keys[k],
|
||||||
&extent, "rtree_read() should return "
|
true), &extent, "rtree_read() should "
|
||||||
"previously set value and ignore "
|
"return previously set value and ignore "
|
||||||
"insignificant key bits; i=%u, j=%u, k=%u, "
|
"insignificant key bits; i=%u, j=%u, k=%u, "
|
||||||
"set key=%#"FMTxPTR", get key=%#"FMTxPTR, i,
|
"set key=%#"FMTxPTR", get key=%#"FMTxPTR, i,
|
||||||
j, k, keys[j], keys[k]);
|
j, k, keys[j], keys[k]);
|
||||||
}
|
}
|
||||||
assert_ptr_null(rtree_read(&rtree,
|
assert_ptr_null(rtree_read(tsdn, &rtree,
|
||||||
(((uintptr_t)1) << (sizeof(uintptr_t)*8-i)), false),
|
(((uintptr_t)1) << (sizeof(uintptr_t)*8-i)), false),
|
||||||
"Only leftmost rtree leaf should be set; "
|
"Only leftmost rtree leaf should be set; "
|
||||||
"i=%u, j=%u", i, j);
|
"i=%u, j=%u", i, j);
|
||||||
rtree_clear(&rtree, keys[j]);
|
rtree_clear(tsdn, &rtree, keys[j]);
|
||||||
}
|
}
|
||||||
|
|
||||||
rtree_delete(&rtree);
|
rtree_delete(tsdn, &rtree);
|
||||||
|
test_rtree = NULL;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
@ -175,10 +211,12 @@ TEST_BEGIN(test_rtree_random)
|
|||||||
{
|
{
|
||||||
unsigned i;
|
unsigned i;
|
||||||
sfmt_t *sfmt;
|
sfmt_t *sfmt;
|
||||||
|
tsdn_t *tsdn;
|
||||||
#define NSET 16
|
#define NSET 16
|
||||||
#define SEED 42
|
#define SEED 42
|
||||||
|
|
||||||
sfmt = init_gen_rand(SEED);
|
sfmt = init_gen_rand(SEED);
|
||||||
|
tsdn = tsdn_fetch();
|
||||||
for (i = 1; i <= (sizeof(uintptr_t) << 3); i++) {
|
for (i = 1; i <= (sizeof(uintptr_t) << 3); i++) {
|
||||||
uintptr_t keys[NSET];
|
uintptr_t keys[NSET];
|
||||||
extent_t extent;
|
extent_t extent;
|
||||||
@ -186,37 +224,40 @@ TEST_BEGIN(test_rtree_random)
|
|||||||
rtree_t rtree;
|
rtree_t rtree;
|
||||||
rtree_elm_t *elm;
|
rtree_elm_t *elm;
|
||||||
|
|
||||||
assert_false(rtree_new(&rtree, i, node_alloc, node_dalloc),
|
test_rtree = &rtree;
|
||||||
|
assert_false(rtree_new(&rtree, i),
|
||||||
"Unexpected rtree_new() failure");
|
"Unexpected rtree_new() failure");
|
||||||
|
|
||||||
for (j = 0; j < NSET; j++) {
|
for (j = 0; j < NSET; j++) {
|
||||||
keys[j] = (uintptr_t)gen_rand64(sfmt);
|
keys[j] = (uintptr_t)gen_rand64(sfmt);
|
||||||
elm = rtree_elm_acquire(&rtree, keys[j], false, true);
|
elm = rtree_elm_acquire(tsdn, &rtree, keys[j], false,
|
||||||
|
true);
|
||||||
assert_ptr_not_null(elm,
|
assert_ptr_not_null(elm,
|
||||||
"Unexpected rtree_elm_acquire() failure");
|
"Unexpected rtree_elm_acquire() failure");
|
||||||
rtree_elm_write_acquired(elm, &extent);
|
rtree_elm_write_acquired(elm, &extent);
|
||||||
rtree_elm_release(elm);
|
rtree_elm_release(elm);
|
||||||
assert_ptr_eq(rtree_read(&rtree, keys[j], true),
|
assert_ptr_eq(rtree_read(tsdn, &rtree, keys[j], true),
|
||||||
&extent,
|
&extent,
|
||||||
"rtree_read() should return previously set value");
|
"rtree_read() should return previously set value");
|
||||||
}
|
}
|
||||||
for (j = 0; j < NSET; j++) {
|
for (j = 0; j < NSET; j++) {
|
||||||
assert_ptr_eq(rtree_read(&rtree, keys[j], true),
|
assert_ptr_eq(rtree_read(tsdn, &rtree, keys[j], true),
|
||||||
&extent, "rtree_read() should return previously "
|
&extent, "rtree_read() should return previously "
|
||||||
"set value, j=%u", j);
|
"set value, j=%u", j);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (j = 0; j < NSET; j++) {
|
for (j = 0; j < NSET; j++) {
|
||||||
rtree_clear(&rtree, keys[j]);
|
rtree_clear(tsdn, &rtree, keys[j]);
|
||||||
assert_ptr_null(rtree_read(&rtree, keys[j], true),
|
assert_ptr_null(rtree_read(tsdn, &rtree, keys[j], true),
|
||||||
"rtree_read() should return previously set value");
|
"rtree_read() should return previously set value");
|
||||||
}
|
}
|
||||||
for (j = 0; j < NSET; j++) {
|
for (j = 0; j < NSET; j++) {
|
||||||
assert_ptr_null(rtree_read(&rtree, keys[j], true),
|
assert_ptr_null(rtree_read(tsdn, &rtree, keys[j], true),
|
||||||
"rtree_read() should return previously set value");
|
"rtree_read() should return previously set value");
|
||||||
}
|
}
|
||||||
|
|
||||||
rtree_delete(&rtree);
|
rtree_delete(tsdn, &rtree);
|
||||||
|
test_rtree = NULL;
|
||||||
}
|
}
|
||||||
fini_gen_rand(sfmt);
|
fini_gen_rand(sfmt);
|
||||||
#undef NSET
|
#undef NSET
|
||||||
@ -228,6 +269,12 @@ int
|
|||||||
main(void)
|
main(void)
|
||||||
{
|
{
|
||||||
|
|
||||||
|
rtree_node_alloc_orig = rtree_node_alloc;
|
||||||
|
rtree_node_alloc = rtree_node_alloc_intercept;
|
||||||
|
rtree_node_dalloc_orig = rtree_node_dalloc;
|
||||||
|
rtree_node_dalloc = rtree_node_dalloc_intercept;
|
||||||
|
test_rtree = NULL;
|
||||||
|
|
||||||
return (test(
|
return (test(
|
||||||
test_rtree_read_empty,
|
test_rtree_read_empty,
|
||||||
test_rtree_concurrent,
|
test_rtree_concurrent,
|
||||||
|
Loading…
Reference in New Issue
Block a user