Remove --disable-tcache.
Simplify configuration by removing the --disable-tcache option, but replace the testing for that configuration with --with-malloc-conf=tcache:false. Fix the thread.arena and thread.tcache.flush mallctls to work correctly if tcache is disabled. This partially resolves #580.
This commit is contained in:
@@ -58,7 +58,7 @@ percpu_arena_update(tsd_t *tsd, unsigned cpu) {
|
||||
/* Set new arena/tcache associations. */
|
||||
arena_migrate(tsd, oldind, newind);
|
||||
tcache_t *tcache = tcache_get(tsd);
|
||||
if (config_tcache && tcache) {
|
||||
if (tcache != NULL) {
|
||||
tcache_arena_reassociate(tsd_tsdn(tsd), tcache,
|
||||
newarena);
|
||||
}
|
||||
|
@@ -154,13 +154,6 @@
|
||||
/* Use gcc intrinsics for profile backtracing if defined. */
|
||||
#undef JEMALLOC_PROF_GCC
|
||||
|
||||
/*
|
||||
* JEMALLOC_TCACHE enables a thread-specific caching layer for small objects.
|
||||
* This makes it possible to allocate/deallocate objects without any locking
|
||||
* when the cache is in the steady state.
|
||||
*/
|
||||
#undef JEMALLOC_TCACHE
|
||||
|
||||
/*
|
||||
* JEMALLOC_DSS enables use of sbrk(2) to allocate extents from the data storage
|
||||
* segment (DSS).
|
||||
|
@@ -323,7 +323,8 @@ malloc_getcpu(void) {
|
||||
JEMALLOC_ALWAYS_INLINE unsigned
|
||||
percpu_arena_choose(void) {
|
||||
unsigned arena_ind;
|
||||
assert(have_percpu_arena && (percpu_arena_mode != percpu_arena_disabled));
|
||||
assert(have_percpu_arena && (percpu_arena_mode !=
|
||||
percpu_arena_disabled));
|
||||
|
||||
malloc_cpuid_t cpuid = malloc_getcpu();
|
||||
assert(cpuid >= 0);
|
||||
@@ -420,19 +421,16 @@ tcache_large_bin_get(tcache_t *tcache, szind_t binind) {
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE bool
|
||||
tcache_available(tsd_t *tsd) {
|
||||
cassert(config_tcache);
|
||||
|
||||
/*
|
||||
* Thread specific auto tcache might be unavailable if: 1) during tcache
|
||||
* initialization, or 2) disabled through thread.tcache.enabled mallctl
|
||||
* or config options. This check covers all cases.
|
||||
*/
|
||||
if (likely(tsd_tcache_enabled_get(tsd) == true)) {
|
||||
/* Associated arena == null implies tcache init in progress. */
|
||||
if (tsd_tcachep_get(tsd)->arena != NULL) {
|
||||
assert(tcache_small_bin_get(tsd_tcachep_get(tsd),
|
||||
0)->avail != NULL);
|
||||
}
|
||||
if (likely(tsd_tcache_enabled_get(tsd))) {
|
||||
/* Associated arena == NULL implies tcache init in progress. */
|
||||
assert(tsd_tcachep_get(tsd)->arena == NULL ||
|
||||
tcache_small_bin_get(tsd_tcachep_get(tsd), 0)->avail !=
|
||||
NULL);
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -441,9 +439,6 @@ tcache_available(tsd_t *tsd) {
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE tcache_t *
|
||||
tcache_get(tsd_t *tsd) {
|
||||
if (!config_tcache) {
|
||||
return NULL;
|
||||
}
|
||||
if (!tcache_available(tsd)) {
|
||||
return NULL;
|
||||
}
|
||||
|
@@ -24,7 +24,7 @@ arena_choose_impl(tsd_t *tsd, arena_t *arena, bool internal) {
|
||||
if (unlikely(ret == NULL)) {
|
||||
ret = arena_choose_hard(tsd, internal);
|
||||
assert(ret);
|
||||
if (config_tcache && tcache_available(tsd)) {
|
||||
if (tcache_available(tsd)) {
|
||||
tcache_t *tcache = tcache_get(tsd);
|
||||
if (tcache->arena != NULL) {
|
||||
/* See comments in tcache_data_init().*/
|
||||
|
@@ -111,13 +111,6 @@ static const bool config_stats =
|
||||
false
|
||||
#endif
|
||||
;
|
||||
static const bool config_tcache =
|
||||
#ifdef JEMALLOC_TCACHE
|
||||
true
|
||||
#else
|
||||
false
|
||||
#endif
|
||||
;
|
||||
static const bool config_tls =
|
||||
#ifdef JEMALLOC_TLS
|
||||
true
|
||||
|
@@ -6,7 +6,6 @@
|
||||
|
||||
#ifndef JEMALLOC_ENABLE_INLINE
|
||||
void tcache_event(tsd_t *tsd, tcache_t *tcache);
|
||||
void tcache_flush(void);
|
||||
bool tcache_enabled_get(tsd_t *tsd);
|
||||
tcache_t *tcache_get(tsd_t *tsd);
|
||||
void tcache_enabled_set(tsd_t *tsd, bool enabled);
|
||||
@@ -25,15 +24,11 @@ tcache_t *tcaches_get(tsd_t *tsd, unsigned ind);
|
||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_TCACHE_C_))
|
||||
JEMALLOC_INLINE bool
|
||||
tcache_enabled_get(tsd_t *tsd) {
|
||||
cassert(config_tcache);
|
||||
|
||||
return tsd_tcache_enabled_get(tsd);
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE void
|
||||
tcache_enabled_set(tsd_t *tsd, bool enabled) {
|
||||
cassert(config_tcache);
|
||||
|
||||
bool was_enabled = tsd_tcache_enabled_get(tsd);
|
||||
|
||||
if (!was_enabled && enabled) {
|
||||
|
@@ -40,23 +40,14 @@ struct tcache_s {
|
||||
* element of tbins is initialized to point to the proper offset within
|
||||
* this array.
|
||||
*/
|
||||
#ifdef JEMALLOC_TCACHE
|
||||
tcache_bin_t tbins_small[NBINS];
|
||||
#else
|
||||
tcache_bin_t tbins_small[0];
|
||||
#endif
|
||||
/* Data accessed less often below. */
|
||||
ql_elm(tcache_t) link; /* Used for aggregating stats. */
|
||||
arena_t *arena; /* Associated arena. */
|
||||
szind_t next_gc_bin; /* Next bin to GC. */
|
||||
#ifdef JEMALLOC_TCACHE
|
||||
/* For small bins, fill (ncached_max >> lg_fill_div). */
|
||||
uint8_t lg_fill_div[NBINS];
|
||||
tcache_bin_t tbins_large[NSIZES-NBINS];
|
||||
#else
|
||||
uint8_t lg_fill_div[0];
|
||||
tcache_bin_t tbins_large[0];
|
||||
#endif
|
||||
};
|
||||
|
||||
/* Linkage for list of available (previously used) explicit tcache IDs. */
|
||||
|
Reference in New Issue
Block a user