Normalize *_link and link_* fields to all be *_link.

This commit is contained in:
Jason Evans 2015-02-15 16:43:52 -08:00
parent b01186cebd
commit 2195ba4e1f
4 changed files with 12 additions and 13 deletions

View File

@ -34,14 +34,14 @@ struct extent_node_s {
union { union {
/* Linkage for the size/address-ordered tree. */ /* Linkage for the size/address-ordered tree. */
rb_node(extent_node_t) link_szad; rb_node(extent_node_t) szad_link;
/* Linkage for huge allocations and cached chunks nodes. */ /* Linkage for huge allocations and cached chunks nodes. */
ql_elm(extent_node_t) link_ql; ql_elm(extent_node_t) ql_link;
}; };
/* Linkage for the address-ordered tree. */ /* Linkage for the address-ordered tree. */
rb_node(extent_node_t) link_ad; rb_node(extent_node_t) ad_link;
}; };
typedef rb_tree(extent_node_t) extent_tree_t; typedef rb_tree(extent_node_t) extent_tree_t;

View File

@ -607,12 +607,12 @@ arena_node_alloc(arena_t *arena)
extent_node_t *node; extent_node_t *node;
malloc_mutex_lock(&arena->node_cache_mtx); malloc_mutex_lock(&arena->node_cache_mtx);
node = ql_last(&arena->node_cache, link_ql); node = ql_last(&arena->node_cache, ql_link);
if (node == NULL) { if (node == NULL) {
malloc_mutex_unlock(&arena->node_cache_mtx); malloc_mutex_unlock(&arena->node_cache_mtx);
return (base_alloc(sizeof(extent_node_t))); return (base_alloc(sizeof(extent_node_t)));
} }
ql_tail_remove(&arena->node_cache, extent_node_t, link_ql); ql_tail_remove(&arena->node_cache, extent_node_t, ql_link);
malloc_mutex_unlock(&arena->node_cache_mtx); malloc_mutex_unlock(&arena->node_cache_mtx);
return (node); return (node);
} }
@ -622,8 +622,8 @@ arena_node_dalloc(arena_t *arena, extent_node_t *node)
{ {
malloc_mutex_lock(&arena->node_cache_mtx); malloc_mutex_lock(&arena->node_cache_mtx);
ql_elm_new(node, link_ql); ql_elm_new(node, ql_link);
ql_tail_insert(&arena->node_cache, node, link_ql); ql_tail_insert(&arena->node_cache, node, ql_link);
malloc_mutex_unlock(&arena->node_cache_mtx); malloc_mutex_unlock(&arena->node_cache_mtx);
} }

View File

@ -22,7 +22,7 @@ extent_szad_comp(extent_node_t *a, extent_node_t *b)
} }
/* Generate red-black tree functions. */ /* Generate red-black tree functions. */
rb_gen(, extent_tree_szad_, extent_tree_t, extent_node_t, link_szad, rb_gen(, extent_tree_szad_, extent_tree_t, extent_node_t, szad_link,
extent_szad_comp) extent_szad_comp)
JEMALLOC_INLINE_C int JEMALLOC_INLINE_C int
@ -35,5 +35,4 @@ extent_ad_comp(extent_node_t *a, extent_node_t *b)
} }
/* Generate red-black tree functions. */ /* Generate red-black tree functions. */
rb_gen(, extent_tree_ad_, extent_tree_t, extent_node_t, link_ad, rb_gen(, extent_tree_ad_, extent_tree_t, extent_node_t, ad_link, extent_ad_comp)
extent_ad_comp)

View File

@ -86,8 +86,8 @@ huge_palloc(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment,
/* Insert node into huge. */ /* Insert node into huge. */
malloc_mutex_lock(&arena->huge_mtx); malloc_mutex_lock(&arena->huge_mtx);
ql_elm_new(node, link_ql); ql_elm_new(node, ql_link);
ql_tail_insert(&arena->huge, node, link_ql); ql_tail_insert(&arena->huge, node, ql_link);
malloc_mutex_unlock(&arena->huge_mtx); malloc_mutex_unlock(&arena->huge_mtx);
if (zero || (config_fill && unlikely(opt_zero))) { if (zero || (config_fill && unlikely(opt_zero))) {
@ -361,7 +361,7 @@ huge_dalloc(tsd_t *tsd, void *ptr, tcache_t *tcache)
arena = node->arena; arena = node->arena;
huge_node_unset(ptr, node); huge_node_unset(ptr, node);
malloc_mutex_lock(&arena->huge_mtx); malloc_mutex_lock(&arena->huge_mtx);
ql_remove(&arena->huge, node, link_ql); ql_remove(&arena->huge, node, ql_link);
malloc_mutex_unlock(&arena->huge_mtx); malloc_mutex_unlock(&arena->huge_mtx);
huge_dalloc_junk(node->addr, node->size); huge_dalloc_junk(node->addr, node->size);