Rewrite red-black trees to do lazy balance fixup. This improves

insert/remove speed by ~30%.
This commit is contained in:
Jason Evans 2010-02-28 22:57:13 +00:00
parent 78fffe9d7b
commit 15f8d49756
Notes: svn2git 2020-12-20 02:59:44 +00:00
svn path=/head/; revision=204493
2 changed files with 890 additions and 825 deletions

View file

@ -194,6 +194,7 @@ __FBSDID("$FreeBSD$");
#include "un-namespace.h"
#define RB_COMPACT
#include "rb.h"
#if (defined(MALLOC_TCACHE) && defined(MALLOC_STATS))
#include "qr.h"
@ -1746,7 +1747,7 @@ extent_szad_comp(extent_node_t *a, extent_node_t *b)
}
/* Wrap red-black tree macros in functions. */
rb_wrap(__unused static, extent_tree_szad_, extent_tree_t, extent_node_t,
rb_gen(__unused static, extent_tree_szad_, extent_tree_t, extent_node_t,
link_szad, extent_szad_comp)
#endif
@ -1760,7 +1761,7 @@ extent_ad_comp(extent_node_t *a, extent_node_t *b)
}
/* Wrap red-black tree macros in functions. */
rb_wrap(__unused static, extent_tree_ad_, extent_tree_t, extent_node_t, link_ad,
rb_gen(__unused static, extent_tree_ad_, extent_tree_t, extent_node_t, link_ad,
extent_ad_comp)
/*
@ -2346,7 +2347,7 @@ arena_chunk_comp(arena_chunk_t *a, arena_chunk_t *b)
}
/* Wrap red-black tree macros in functions. */
rb_wrap(__unused static, arena_chunk_tree_dirty_, arena_chunk_tree_t,
rb_gen(__unused static, arena_chunk_tree_dirty_, arena_chunk_tree_t,
arena_chunk_t, link_dirty, arena_chunk_comp)
static inline int
@ -2362,7 +2363,7 @@ arena_run_comp(arena_chunk_map_t *a, arena_chunk_map_t *b)
}
/* Wrap red-black tree macros in functions. */
rb_wrap(__unused static, arena_run_tree_, arena_run_tree_t, arena_chunk_map_t,
rb_gen(__unused static, arena_run_tree_, arena_run_tree_t, arena_chunk_map_t,
link, arena_run_comp)
static inline int
@ -2394,7 +2395,7 @@ arena_avail_comp(arena_chunk_map_t *a, arena_chunk_map_t *b)
}
/* Wrap red-black tree macros in functions. */
rb_wrap(__unused static, arena_avail_tree_, arena_avail_tree_t,
rb_gen(__unused static, arena_avail_tree_, arena_avail_tree_t,
arena_chunk_map_t, link, arena_avail_comp)
static inline void
@ -2824,6 +2825,18 @@ arena_run_alloc(arena_t *arena, size_t size, bool large, bool zero)
return (run);
}
#ifdef MALLOC_DEBUG
static arena_chunk_t *
chunks_dirty_iter_cb(arena_chunk_tree_t *tree, arena_chunk_t *chunk, void *arg)
{
size_t *ndirty = (size_t *)arg;
assert(chunk->dirtied);
*ndirty += chunk->ndirty;
return (NULL);
}
#endif
static void
arena_purge(arena_t *arena)
{
@ -2832,11 +2845,8 @@ arena_purge(arena_t *arena)
#ifdef MALLOC_DEBUG
size_t ndirty = 0;
rb_foreach_begin(arena_chunk_t, link_dirty, &arena->chunks_dirty,
chunk) {
assert(chunk->dirtied);
ndirty += chunk->ndirty;
} rb_foreach_end(arena_chunk_t, link_dirty, &arena->chunks_dirty, chunk)
arena_chunk_tree_dirty_iter(&arena->chunks_dirty, NULL,
chunks_dirty_iter_cb, (void *)&ndirty);
assert(ndirty == arena->ndirty);
#endif
assert((arena->nactive >> opt_lg_dirty_mult) < arena->ndirty);

File diff suppressed because it is too large Load diff