Use regular arena allocation for huge tree nodes.
This avoids grabbing the base mutex, as a step towards fine-grained locking for huge allocations. The thread cache also provides a tiny (~3%) improvement for serial huge allocations.
This commit is contained in:
committed by
Jason Evans
parent
8bb3198f72
commit
f22214a29d
@@ -8,7 +8,16 @@ const char *malloc_conf =
|
||||
static arena_dalloc_junk_small_t *arena_dalloc_junk_small_orig;
|
||||
static arena_dalloc_junk_large_t *arena_dalloc_junk_large_orig;
|
||||
static huge_dalloc_junk_t *huge_dalloc_junk_orig;
|
||||
static void *most_recently_junked;
|
||||
static void *watch_for_junking;
|
||||
static bool saw_junking;
|
||||
|
||||
static void
|
||||
watch_junking(void *p)
|
||||
{
|
||||
|
||||
watch_for_junking = p;
|
||||
saw_junking = false;
|
||||
}
|
||||
|
||||
static void
|
||||
arena_dalloc_junk_small_intercept(void *ptr, arena_bin_info_t *bin_info)
|
||||
@@ -21,7 +30,8 @@ arena_dalloc_junk_small_intercept(void *ptr, arena_bin_info_t *bin_info)
|
||||
"Missing junk fill for byte %zu/%zu of deallocated region",
|
||||
i, bin_info->reg_size);
|
||||
}
|
||||
most_recently_junked = ptr;
|
||||
if (ptr == watch_for_junking)
|
||||
saw_junking = true;
|
||||
}
|
||||
|
||||
static void
|
||||
@@ -35,7 +45,8 @@ arena_dalloc_junk_large_intercept(void *ptr, size_t usize)
|
||||
"Missing junk fill for byte %zu/%zu of deallocated region",
|
||||
i, usize);
|
||||
}
|
||||
most_recently_junked = ptr;
|
||||
if (ptr == watch_for_junking)
|
||||
saw_junking = true;
|
||||
}
|
||||
|
||||
static void
|
||||
@@ -48,7 +59,8 @@ huge_dalloc_junk_intercept(void *ptr, size_t usize)
|
||||
* enough that it doesn't make sense to duplicate the decision logic in
|
||||
* test code, so don't actually check that the region is junk-filled.
|
||||
*/
|
||||
most_recently_junked = ptr;
|
||||
if (ptr == watch_for_junking)
|
||||
saw_junking = true;
|
||||
}
|
||||
|
||||
static void
|
||||
@@ -87,18 +99,19 @@ test_junk(size_t sz_min, size_t sz_max)
|
||||
}
|
||||
|
||||
if (xallocx(s, sz+1, 0, 0) == sz) {
|
||||
void *junked = (void *)s;
|
||||
watch_junking(s);
|
||||
s = (char *)rallocx(s, sz+1, 0);
|
||||
assert_ptr_not_null((void *)s,
|
||||
"Unexpected rallocx() failure");
|
||||
assert_ptr_eq(most_recently_junked, junked,
|
||||
assert_true(saw_junking,
|
||||
"Expected region of size %zu to be junk-filled",
|
||||
sz);
|
||||
}
|
||||
}
|
||||
|
||||
watch_junking(s);
|
||||
dallocx(s, 0);
|
||||
assert_ptr_eq(most_recently_junked, (void *)s,
|
||||
assert_true(saw_junking,
|
||||
"Expected region of size %zu to be junk-filled", sz);
|
||||
|
||||
arena_dalloc_junk_small = arena_dalloc_junk_small_orig;
|
||||
|
Reference in New Issue
Block a user