99d68445ef
Expand and restructure the rtree API such that all common operations can be achieved with minimal work, regardless of whether the rtree leaf fields are independent versus packed into a single atomic pointer.
29 lines
1.3 KiB
C
29 lines
1.3 KiB
C
#ifndef JEMALLOC_INTERNAL_LARGE_EXTERNS_H
|
|
#define JEMALLOC_INTERNAL_LARGE_EXTERNS_H
|
|
|
|
void *large_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero);
|
|
void *large_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize,
|
|
size_t alignment, bool zero);
|
|
bool large_ralloc_no_move(tsdn_t *tsdn, extent_t *extent, size_t usize_min,
|
|
size_t usize_max, bool zero);
|
|
void *large_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent,
|
|
size_t usize, size_t alignment, bool zero, tcache_t *tcache);
|
|
#ifdef JEMALLOC_JET
|
|
typedef void (large_dalloc_junk_t)(void *, size_t);
|
|
extern large_dalloc_junk_t *large_dalloc_junk;
|
|
typedef void (large_dalloc_maybe_junk_t)(void *, size_t);
|
|
extern large_dalloc_maybe_junk_t *large_dalloc_maybe_junk;
|
|
#else
|
|
void large_dalloc_junk(void *ptr, size_t size);
|
|
void large_dalloc_maybe_junk(void *ptr, size_t size);
|
|
#endif
|
|
void large_dalloc_prep_junked_locked(tsdn_t *tsdn, extent_t *extent);
|
|
void large_dalloc_finish(tsdn_t *tsdn, extent_t *extent);
|
|
void large_dalloc(tsdn_t *tsdn, extent_t *extent);
|
|
size_t large_salloc(tsdn_t *tsdn, const extent_t *extent);
|
|
prof_tctx_t *large_prof_tctx_get(tsdn_t *tsdn, const extent_t *extent);
|
|
void large_prof_tctx_set(tsdn_t *tsdn, extent_t *extent, prof_tctx_t *tctx);
|
|
void large_prof_tctx_reset(tsdn_t *tsdn, extent_t *extent);
|
|
|
|
#endif /* JEMALLOC_INTERNAL_LARGE_EXTERNS_H */
|