Rewrite red-black trees.

Use left-leaning 2-3 red-black trees instead of left-leaning 2-3-4
red-black trees.  This reduces maximum tree height from (3 lg n) to
(2 lg n).

Do lazy balance fixup, rather than transforming the tree during the down
pass.  This improves insert/remove speed by ~30%.

Use callback-based iteration rather than macros.
This commit is contained in:
Jason Evans 2010-02-28 15:00:18 -08:00
parent fbb504def6
commit f3ff75289b
4 changed files with 892 additions and 853 deletions

View File

@ -31,6 +31,7 @@
#include <dlfcn.h>
#endif
#define RB_COMPACT
#include "jemalloc/internal/rb.h"
#include "jemalloc/internal/qr.h"
#include "jemalloc/internal/ql.h"

File diff suppressed because it is too large Load Diff

View File

@ -200,8 +200,8 @@ arena_chunk_comp(arena_chunk_t *a, arena_chunk_t *b)
return ((a_chunk > b_chunk) - (a_chunk < b_chunk));
}
/* Wrap red-black tree macros in functions. */
rb_wrap(static JEMALLOC_ATTR(unused), arena_chunk_tree_dirty_,
/* Generate red-black tree functions. */
rb_gen(static JEMALLOC_ATTR(unused), arena_chunk_tree_dirty_,
arena_chunk_tree_t, arena_chunk_t, link_dirty, arena_chunk_comp)
static inline int
@ -216,8 +216,8 @@ arena_run_comp(arena_chunk_map_t *a, arena_chunk_map_t *b)
return ((a_mapelm > b_mapelm) - (a_mapelm < b_mapelm));
}
/* Wrap red-black tree macros in functions. */
rb_wrap(static JEMALLOC_ATTR(unused), arena_run_tree_, arena_run_tree_t,
/* Generate red-black tree functions. */
rb_gen(static JEMALLOC_ATTR(unused), arena_run_tree_, arena_run_tree_t,
arena_chunk_map_t, link, arena_run_comp)
static inline int
@ -248,8 +248,8 @@ arena_avail_comp(arena_chunk_map_t *a, arena_chunk_map_t *b)
return (ret);
}
/* Wrap red-black tree macros in functions. */
rb_wrap(static JEMALLOC_ATTR(unused), arena_avail_tree_, arena_avail_tree_t,
/* Generate red-black tree functions. */
rb_gen(static JEMALLOC_ATTR(unused), arena_avail_tree_, arena_avail_tree_t,
arena_chunk_map_t, link, arena_avail_comp)
static inline void
@ -689,6 +689,18 @@ arena_run_alloc(arena_t *arena, size_t size, bool large, bool zero)
return (run);
}
#ifdef JEMALLOC_DEBUG
static arena_chunk_t *
chunks_dirty_iter_cb(arena_chunk_tree_t *tree, arena_chunk_t *chunk, void *arg)
{
size_t *ndirty = (size_t *)arg;
assert(chunk->dirtied);
*ndirty += chunk->ndirty;
return (NULL);
}
#endif
static void
arena_purge(arena_t *arena)
{
@ -697,11 +709,8 @@ arena_purge(arena_t *arena)
#ifdef JEMALLOC_DEBUG
size_t ndirty = 0;
rb_foreach_begin(arena_chunk_t, link_dirty, &arena->chunks_dirty,
chunk) {
assert(chunk->dirtied);
ndirty += chunk->ndirty;
} rb_foreach_end(arena_chunk_t, link_dirty, &arena->chunks_dirty, chunk)
arena_chunk_tree_dirty_iter(&arena->chunks_dirty, NULL,
chunks_dirty_iter_cb, (void *)&ndirty);
assert(ndirty == arena->ndirty);
#endif
assert((arena->nactive >> opt_lg_dirty_mult) < arena->ndirty);

View File

@ -22,8 +22,8 @@ extent_szad_comp(extent_node_t *a, extent_node_t *b)
return (ret);
}
/* Wrap red-black tree macros in functions. */
rb_wrap(, extent_tree_szad_, extent_tree_t, extent_node_t, link_szad,
/* Generate red-black tree functions. */
rb_gen(, extent_tree_szad_, extent_tree_t, extent_node_t, link_szad,
extent_szad_comp)
#endif
@ -36,6 +36,6 @@ extent_ad_comp(extent_node_t *a, extent_node_t *b)
return ((a_addr > b_addr) - (a_addr < b_addr));
}
/* Wrap red-black tree macros in functions. */
rb_wrap(, extent_tree_ad_, extent_tree_t, extent_node_t, link_ad,
/* Generate red-black tree functions. */
rb_gen(, extent_tree_ad_, extent_tree_t, extent_node_t, link_ad,
extent_ad_comp)