mark huge allocations as unlikely
This cleans up the fast path a bit more by moving away more code.
This commit is contained in:
committed by
Jason Evans
parent
c93ed81cd0
commit
809b0ac391
@@ -2095,7 +2095,7 @@ arena_ralloc_large(void *ptr, size_t oldsize, size_t size, size_t extra,
|
||||
size_t usize;
|
||||
|
||||
/* Make sure extra can't cause size_t overflow. */
|
||||
if (extra >= arena_maxclass)
|
||||
if (unlikely(extra >= arena_maxclass))
|
||||
return (true);
|
||||
|
||||
usize = s2u(size + extra);
|
||||
@@ -2142,7 +2142,7 @@ arena_ralloc_no_move(void *ptr, size_t oldsize, size_t size, size_t extra,
|
||||
/*
|
||||
* Avoid moving the allocation if the size class can be left the same.
|
||||
*/
|
||||
if (oldsize <= arena_maxclass) {
|
||||
if (likely(oldsize <= arena_maxclass)) {
|
||||
if (oldsize <= SMALL_MAXCLASS) {
|
||||
assert(arena_bin_info[size2index(oldsize)].reg_size
|
||||
== oldsize);
|
||||
|
@@ -264,7 +264,7 @@ a0alloc(size_t size, bool zero)
|
||||
if (size == 0)
|
||||
size = 1;
|
||||
|
||||
if (size <= arena_maxclass)
|
||||
if (likely(size <= arena_maxclass))
|
||||
ret = arena_malloc(NULL, a0get(), size, zero, false);
|
||||
else
|
||||
ret = huge_malloc(NULL, a0get(), size, zero, false);
|
||||
@@ -295,7 +295,7 @@ a0free(void *ptr)
|
||||
return;
|
||||
|
||||
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
|
||||
if (chunk != ptr)
|
||||
if (likely(chunk != ptr))
|
||||
arena_dalloc(NULL, chunk, ptr, false);
|
||||
else
|
||||
huge_dalloc(NULL, ptr, false);
|
||||
|
Reference in New Issue
Block a user