2019-12-12 03:17:19 +08:00
|
|
|
#ifndef JEMALLOC_INTERNAL_EDATA_CACHE_H
|
|
|
|
#define JEMALLOC_INTERNAL_EDATA_CACHE_H
|
|
|
|
|
2020-02-18 04:00:57 +08:00
|
|
|
#include "jemalloc/internal/base.h"
|
|
|
|
|
2020-10-29 20:11:16 +08:00
|
|
|
/*
|
|
|
|
* Public for tests. When we go to the fallback when the small cache is empty,
|
|
|
|
* we grab up to 8 items (grabbing less only if the fallback is exhausted).
|
|
|
|
* When we exceed 16, we flush. This caps the maximum memory lost per cache to
|
|
|
|
* 16 * sizeof(edata_t), a max of 2k on architectures where the edata_t is 128
|
|
|
|
* bytes.
|
|
|
|
*/
|
|
|
|
#define EDATA_CACHE_SMALL_MAX 16
|
|
|
|
#define EDATA_CACHE_SMALL_FILL 8
|
|
|
|
|
2019-12-12 03:17:19 +08:00
|
|
|
/*
|
|
|
|
* A cache of edata_t structures allocated via base_alloc_edata (as opposed to
|
|
|
|
* the underlying extents they describe). The contents of returned edata_t
|
|
|
|
* objects are garbage and cannot be relied upon.
|
|
|
|
*/
|
|
|
|
|
|
|
|
typedef struct edata_cache_s edata_cache_t;
|
|
|
|
struct edata_cache_s {
|
2020-11-05 08:00:52 +08:00
|
|
|
edata_avail_t avail;
|
2019-12-12 03:17:19 +08:00
|
|
|
atomic_zu_t count;
|
|
|
|
malloc_mutex_t mtx;
|
2019-12-13 10:28:37 +08:00
|
|
|
base_t *base;
|
2019-12-12 03:17:19 +08:00
|
|
|
};
|
|
|
|
|
2019-12-13 10:28:37 +08:00
|
|
|
bool edata_cache_init(edata_cache_t *edata_cache, base_t *base);
|
|
|
|
edata_t *edata_cache_get(tsdn_t *tsdn, edata_cache_t *edata_cache);
|
2019-12-12 03:17:19 +08:00
|
|
|
void edata_cache_put(tsdn_t *tsdn, edata_cache_t *edata_cache, edata_t *edata);
|
2019-12-13 10:28:37 +08:00
|
|
|
|
2019-12-12 03:17:19 +08:00
|
|
|
void edata_cache_prefork(tsdn_t *tsdn, edata_cache_t *edata_cache);
|
|
|
|
void edata_cache_postfork_parent(tsdn_t *tsdn, edata_cache_t *edata_cache);
|
|
|
|
void edata_cache_postfork_child(tsdn_t *tsdn, edata_cache_t *edata_cache);
|
|
|
|
|
2020-10-29 20:11:16 +08:00
|
|
|
/*
|
|
|
|
* An edata_cache_small is like an edata_cache, but it relies on external
|
|
|
|
* synchronization and avoids first-fit strategies.
|
|
|
|
*/
|
|
|
|
|
2020-02-19 09:21:40 +08:00
|
|
|
typedef struct edata_cache_small_s edata_cache_small_t;
|
|
|
|
struct edata_cache_small_s {
|
2020-06-12 06:15:51 +08:00
|
|
|
edata_list_inactive_t list;
|
2020-02-19 09:21:40 +08:00
|
|
|
size_t count;
|
|
|
|
edata_cache_t *fallback;
|
2020-10-29 20:11:16 +08:00
|
|
|
bool disabled;
|
2020-02-19 09:21:40 +08:00
|
|
|
};
|
|
|
|
|
|
|
|
void edata_cache_small_init(edata_cache_small_t *ecs, edata_cache_t *fallback);
|
2020-10-29 20:11:16 +08:00
|
|
|
edata_t *edata_cache_small_get(tsdn_t *tsdn, edata_cache_small_t *ecs);
|
|
|
|
void edata_cache_small_put(tsdn_t *tsdn, edata_cache_small_t *ecs,
|
|
|
|
edata_t *edata);
|
|
|
|
void edata_cache_small_disable(tsdn_t *tsdn, edata_cache_small_t *ecs);
|
2020-02-19 09:21:40 +08:00
|
|
|
|
2019-12-12 03:17:19 +08:00
|
|
|
#endif /* JEMALLOC_INTERNAL_EDATA_CACHE_H */
|