Rename "dirty chunks" to "cached chunks".

Rename "dirty chunks" to "cached chunks", in order to avoid overloading
the term "dirty".

Fix the regression caused by 339c2b23b2
(Fix chunk_unmap() to propagate dirty state.), and actually address what
that change attempted, which is to only purge chunks once, and propagate
whether zeroed pages resulted into chunk_record().
This commit is contained in:
Jason Evans
2015-02-18 01:15:50 -08:00
parent 339c2b23b2
commit 738e089a2e
7 changed files with 91 additions and 86 deletions

View File

@@ -318,14 +318,14 @@ struct arena_s {
/*
* Unused dirty memory this arena manages. Dirty memory is conceptually
* tracked as an arbitrarily interleaved LRU of runs and chunks, but the
* list linkage is actually semi-duplicated in order to avoid extra
* arena_chunk_map_misc_t space overhead.
* tracked as an arbitrarily interleaved LRU of dirty runs and cached
* chunks, but the list linkage is actually semi-duplicated in order to
* avoid extra arena_chunk_map_misc_t space overhead.
*
* LRU-----------------------------------------------------------MRU
*
* ______________ ___ ___
* ...-->|chunks_dirty|<--------->|c|<-------------------->|c|<--...
* ...-->|chunks_cache|<--------->|c|<-------------------->|c|<--...
* -------------- |h| |h|
* ____________ _____ |u| _____ _____ |u|
* ...-->|runs_dirty|<-->|run|<-->|n|<-->|run|<-->|run|<-->|n|<--...
@@ -333,7 +333,7 @@ struct arena_s {
* --- ---
*/
arena_chunk_map_misc_t runs_dirty;
extent_node_t chunks_dirty;
extent_node_t chunks_cache;
/* Extant huge allocations. */
ql_head(extent_node_t) huge;
@@ -347,8 +347,8 @@ struct arena_s {
* orderings are needed, which is why there are two trees with the same
* contents.
*/
extent_tree_t chunks_szad_dirty;
extent_tree_t chunks_ad_dirty;
extent_tree_t chunks_szad_cache;
extent_tree_t chunks_ad_cache;
extent_tree_t chunks_szad_mmap;
extent_tree_t chunks_ad_mmap;
extent_tree_t chunks_szad_dss;
@@ -384,10 +384,10 @@ extern size_t arena_maxclass; /* Max size class for arenas. */
extern unsigned nlclasses; /* Number of large size classes. */
extern unsigned nhclasses; /* Number of huge size classes. */
void arena_chunk_dirty_maybe_insert(arena_t *arena, extent_node_t *node,
bool dirty);
void arena_chunk_dirty_maybe_remove(arena_t *arena, extent_node_t *node,
bool dirty);
void arena_chunk_cache_maybe_insert(arena_t *arena, extent_node_t *node,
bool cache);
void arena_chunk_cache_maybe_remove(arena_t *arena, extent_node_t *node,
bool cache);
extent_node_t *arena_node_alloc(arena_t *arena);
void arena_node_dalloc(arena_t *arena, extent_node_t *node);
void *arena_chunk_alloc_huge(arena_t *arena, size_t usize, size_t alignment,

View File

@@ -45,9 +45,10 @@ void *chunk_alloc_arena(chunk_alloc_t *chunk_alloc,
void *chunk_alloc_default(void *new_addr, size_t size, size_t alignment,
bool *zero, unsigned arena_ind);
void chunk_record(arena_t *arena, extent_tree_t *chunks_szad,
extent_tree_t *chunks_ad, bool dirty, void *chunk, size_t size);
extent_tree_t *chunks_ad, bool cache, void *chunk, size_t size,
bool zeroed);
bool chunk_dalloc_default(void *chunk, size_t size, unsigned arena_ind);
void chunk_unmap(arena_t *arena, bool dirty, void *chunk, size_t size);
void chunk_unmap(arena_t *arena, void *chunk, size_t size, bool zeroed);
bool chunk_boot(void);
void chunk_prefork(void);
void chunk_postfork_parent(void);

View File

@@ -33,9 +33,9 @@ struct extent_node_s {
/* Profile counters, used for huge objects. */
prof_tctx_t *en_prof_tctx;
/* Linkage for arena's runs_dirty and chunks_dirty rings. */
qr(extent_node_t) cd_link;
/* Linkage for arena's runs_dirty and chunks_cache rings. */
arena_chunk_map_misc_t runs_dirty;
qr(extent_node_t) cc_link;
union {
/* Linkage for the size/address-ordered tree. */
@@ -78,6 +78,9 @@ void extent_node_prof_tctx_set(extent_node_t *node, prof_tctx_t *tctx);
void extent_node_init(extent_node_t *node, arena_t *arena, void *addr,
size_t size, bool zeroed);
void extent_node_dirty_linkage_init(extent_node_t *node);
void extent_node_dirty_insert(extent_node_t *node,
arena_chunk_map_misc_t *runs_dirty, extent_node_t *chunks_dirty);
void extent_node_dirty_remove(extent_node_t *node);
#endif
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_EXTENT_C_))
@@ -183,9 +186,27 @@ JEMALLOC_INLINE void
extent_node_dirty_linkage_init(extent_node_t *node)
{
qr_new(node, cd_link);
qr_new(&node->runs_dirty, rd_link);
qr_new(node, cc_link);
}
JEMALLOC_INLINE void
extent_node_dirty_insert(extent_node_t *node,
arena_chunk_map_misc_t *runs_dirty, extent_node_t *chunks_dirty)
{
qr_meld(runs_dirty, &node->runs_dirty, rd_link);
qr_meld(chunks_dirty, node, cc_link);
}
JEMALLOC_INLINE void
extent_node_dirty_remove(extent_node_t *node)
{
qr_remove(&node->runs_dirty, rd_link);
qr_remove(node, cc_link);
}
#endif
#endif /* JEMALLOC_H_INLINES */

View File

@@ -12,9 +12,9 @@ arena_boot
arena_choose
arena_choose_hard
arena_chunk_alloc_huge
arena_chunk_cache_maybe_insert
arena_chunk_cache_maybe_remove
arena_chunk_dalloc_huge
arena_chunk_dirty_maybe_insert
arena_chunk_dirty_maybe_remove
arena_chunk_ralloc_huge_expand
arena_chunk_ralloc_huge_shrink
arena_chunk_ralloc_huge_similar
@@ -182,7 +182,9 @@ extent_node_addr_get
extent_node_addr_set
extent_node_arena_get
extent_node_arena_set
extent_node_dirty_insert
extent_node_dirty_linkage_init
extent_node_dirty_remove
extent_node_init
extent_node_prof_tctx_get
extent_node_prof_tctx_set