2017-04-11 09:17:55 +08:00
|
|
|
#ifndef JEMALLOC_INTERNAL_EXTERNS_H
|
|
|
|
#define JEMALLOC_INTERNAL_EXTERNS_H
|
|
|
|
|
2017-04-11 10:04:40 +08:00
|
|
|
#include "jemalloc/internal/atomic.h"
|
2017-04-27 09:37:44 +08:00
|
|
|
#include "jemalloc/internal/size_classes.h"
|
|
|
|
#include "jemalloc/internal/tsd_types.h"
|
2017-04-11 10:04:40 +08:00
|
|
|
|
2017-04-12 14:13:45 +08:00
|
|
|
/* TSD checks this to set thread local slow state accordingly. */
|
2017-05-26 06:30:11 +08:00
|
|
|
extern bool malloc_slow;
|
2017-04-12 14:13:45 +08:00
|
|
|
|
|
|
|
/* Run-time options. */
|
2017-05-26 06:30:11 +08:00
|
|
|
extern bool opt_abort;
|
|
|
|
extern bool opt_abort_conf;
|
|
|
|
extern const char *opt_junk;
|
|
|
|
extern bool opt_junk_alloc;
|
|
|
|
extern bool opt_junk_free;
|
|
|
|
extern bool opt_utrace;
|
|
|
|
extern bool opt_xmalloc;
|
|
|
|
extern bool opt_zero;
|
|
|
|
extern unsigned opt_narenas;
|
2017-04-11 09:17:55 +08:00
|
|
|
|
|
|
|
/* Number of CPUs. */
|
2017-05-26 06:30:11 +08:00
|
|
|
extern unsigned ncpus;
|
2017-04-11 09:17:55 +08:00
|
|
|
|
|
|
|
/* Number of arenas used for automatic multiplexing of threads and arenas. */
|
2017-05-26 06:30:11 +08:00
|
|
|
extern unsigned narenas_auto;
|
2017-04-11 09:17:55 +08:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Arenas that are used to service external requests. Not all elements of the
|
|
|
|
* arenas array are necessarily used; arenas are created lazily as needed.
|
|
|
|
*/
|
|
|
|
extern atomic_p_t arenas[];
|
|
|
|
|
|
|
|
/*
|
|
|
|
* pind2sz_tab encodes the same information as could be computed by
|
|
|
|
* pind2sz_compute().
|
|
|
|
*/
|
2017-05-26 06:30:11 +08:00
|
|
|
extern size_t const pind2sz_tab[NPSIZES+1];
|
2017-04-11 09:17:55 +08:00
|
|
|
/*
|
|
|
|
* index2size_tab encodes the same information as could be computed (at
|
|
|
|
* unacceptable cost in some code paths) by index2size_compute().
|
|
|
|
*/
|
2017-05-26 06:30:11 +08:00
|
|
|
extern size_t const index2size_tab[NSIZES];
|
2017-04-11 09:17:55 +08:00
|
|
|
/*
|
|
|
|
* size2index_tab is a compact lookup table that rounds request sizes up to
|
|
|
|
* size classes. In order to reduce cache footprint, the table is compressed,
|
|
|
|
* and all accesses are via size2index().
|
|
|
|
*/
|
2017-05-26 06:30:11 +08:00
|
|
|
extern uint8_t const size2index_tab[];
|
2017-04-11 09:17:55 +08:00
|
|
|
|
|
|
|
void *a0malloc(size_t size);
|
|
|
|
void a0dalloc(void *ptr);
|
|
|
|
void *bootstrap_malloc(size_t size);
|
|
|
|
void *bootstrap_calloc(size_t num, size_t size);
|
|
|
|
void bootstrap_free(void *ptr);
|
|
|
|
void arena_set(unsigned ind, arena_t *arena);
|
|
|
|
unsigned narenas_total_get(void);
|
|
|
|
arena_t *arena_init(tsdn_t *tsdn, unsigned ind, extent_hooks_t *extent_hooks);
|
|
|
|
arena_tdata_t *arena_tdata_get_hard(tsd_t *tsd, unsigned ind);
|
|
|
|
arena_t *arena_choose_hard(tsd_t *tsd, bool internal);
|
|
|
|
void arena_migrate(tsd_t *tsd, unsigned oldind, unsigned newind);
|
|
|
|
void iarena_cleanup(tsd_t *tsd);
|
|
|
|
void arena_cleanup(tsd_t *tsd);
|
|
|
|
void arenas_tdata_cleanup(tsd_t *tsd);
|
|
|
|
void jemalloc_prefork(void);
|
|
|
|
void jemalloc_postfork_parent(void);
|
|
|
|
void jemalloc_postfork_child(void);
|
|
|
|
bool malloc_initialized(void);
|
|
|
|
|
|
|
|
#endif /* JEMALLOC_INTERNAL_EXTERNS_H */
|