2017-04-11 09:17:55 +08:00
|
|
|
#ifndef JEMALLOC_INTERNAL_INLINES_B_H
|
|
|
|
#define JEMALLOC_INTERNAL_INLINES_B_H
|
|
|
|
|
2020-01-28 05:55:46 +08:00
|
|
|
#include "jemalloc/internal/emap.h"
|
2019-12-17 03:05:07 +08:00
|
|
|
#include "jemalloc/internal/extent.h"
|
2017-05-24 05:26:31 +08:00
|
|
|
#include "jemalloc/internal/rtree.h"
|
|
|
|
|
2017-04-11 09:17:55 +08:00
|
|
|
/* Choose an arena based on a per-thread value. */
|
2017-04-22 00:37:34 +08:00
|
|
|
static inline arena_t *
|
2017-04-11 09:17:55 +08:00
|
|
|
arena_choose_impl(tsd_t *tsd, arena_t *arena, bool internal) {
|
|
|
|
arena_t *ret;
|
|
|
|
|
|
|
|
if (arena != NULL) {
|
|
|
|
return arena;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* During reentrancy, arena 0 is the safest bet. */
|
2017-04-13 07:16:27 +08:00
|
|
|
if (unlikely(tsd_reentrancy_level_get(tsd) > 0)) {
|
2017-04-11 09:17:55 +08:00
|
|
|
return arena_get(tsd_tsdn(tsd), 0, true);
|
|
|
|
}
|
|
|
|
|
|
|
|
ret = internal ? tsd_iarena_get(tsd) : tsd_arena_get(tsd);
|
|
|
|
if (unlikely(ret == NULL)) {
|
|
|
|
ret = arena_choose_hard(tsd, internal);
|
|
|
|
assert(ret);
|
2017-04-21 08:21:37 +08:00
|
|
|
if (tcache_available(tsd)) {
|
2017-04-11 09:17:55 +08:00
|
|
|
tcache_t *tcache = tcache_get(tsd);
|
|
|
|
if (tcache->arena != NULL) {
|
2019-10-15 00:35:51 +08:00
|
|
|
/* See comments in tsd_tcache_data_init().*/
|
2017-04-11 09:17:55 +08:00
|
|
|
assert(tcache->arena ==
|
|
|
|
arena_get(tsd_tsdn(tsd), 0, false));
|
|
|
|
if (tcache->arena != ret) {
|
|
|
|
tcache_arena_reassociate(tsd_tsdn(tsd),
|
|
|
|
tcache, ret);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
tcache_arena_associate(tsd_tsdn(tsd), tcache,
|
|
|
|
ret);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Note that for percpu arena, if the current arena is outside of the
|
|
|
|
* auto percpu arena range, (i.e. thread is assigned to a manually
|
|
|
|
* managed arena), then percpu arena is skipped.
|
|
|
|
*/
|
2017-06-01 07:45:14 +08:00
|
|
|
if (have_percpu_arena && PERCPU_ARENA_ENABLED(opt_percpu_arena) &&
|
|
|
|
!internal && (arena_ind_get(ret) <
|
|
|
|
percpu_arena_ind_limit(opt_percpu_arena)) && (ret->last_thd !=
|
|
|
|
tsd_tsdn(tsd))) {
|
2017-04-11 09:17:55 +08:00
|
|
|
unsigned ind = percpu_arena_choose();
|
|
|
|
if (arena_ind_get(ret) != ind) {
|
|
|
|
percpu_arena_update(tsd, ind);
|
|
|
|
ret = tsd_arena_get(tsd);
|
|
|
|
}
|
|
|
|
ret->last_thd = tsd_tsdn(tsd);
|
|
|
|
}
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2017-04-22 00:37:34 +08:00
|
|
|
static inline arena_t *
|
2017-04-11 09:17:55 +08:00
|
|
|
arena_choose(tsd_t *tsd, arena_t *arena) {
|
|
|
|
return arena_choose_impl(tsd, arena, false);
|
|
|
|
}
|
|
|
|
|
2017-04-22 00:37:34 +08:00
|
|
|
static inline arena_t *
|
2017-04-11 09:17:55 +08:00
|
|
|
arena_ichoose(tsd_t *tsd, arena_t *arena) {
|
|
|
|
return arena_choose_impl(tsd, arena, true);
|
|
|
|
}
|
|
|
|
|
2017-04-22 00:37:34 +08:00
|
|
|
static inline bool
|
2017-04-21 06:19:02 +08:00
|
|
|
arena_is_auto(arena_t *arena) {
|
|
|
|
assert(narenas_auto > 0);
|
2018-05-22 04:33:48 +08:00
|
|
|
|
2018-06-02 06:06:36 +08:00
|
|
|
return (arena_ind_get(arena) < manual_arena_base);
|
2017-04-21 06:19:02 +08:00
|
|
|
}
|
|
|
|
|
2019-12-10 06:36:45 +08:00
|
|
|
JEMALLOC_ALWAYS_INLINE edata_t *
|
2017-04-11 09:17:55 +08:00
|
|
|
iealloc(tsdn_t *tsdn, const void *ptr) {
|
|
|
|
rtree_ctx_t rtree_ctx_fallback;
|
|
|
|
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
|
|
|
|
2020-01-28 05:55:46 +08:00
|
|
|
return rtree_edata_read(tsdn, &emap_global.rtree, rtree_ctx,
|
2017-04-11 09:17:55 +08:00
|
|
|
(uintptr_t)ptr, true);
|
|
|
|
}
|
|
|
|
|
|
|
|
#endif /* JEMALLOC_INTERNAL_INLINES_B_H */
|