Implement the *allocx() API.

Implement the *allocx() API, which is a successor to the *allocm() API.
The *allocx() functions are slightly simpler to use because they have
fewer parameters, they directly return the results of primary interest,
and mallocx()/rallocx() avoid the strict aliasing pitfall that
allocm()/rallocx() share with posix_memalign().  The following code
violates strict aliasing rules:

    foo_t *foo;
    allocm((void **)&foo, NULL, 42, 0);

whereas the following is safe:

    foo_t *foo;
    void *p;
    allocm(&p, NULL, 42, 0);
    foo = (foo_t *)p;

mallocx() does not have this problem:

    foo_t *foo = (foo_t *)mallocx(42, 0);
This commit is contained in:
Jason Evans 2013-12-12 22:35:52 -08:00
parent 0ac396a06a
commit d82a5e6a34
19 changed files with 954 additions and 365 deletions

View File

@ -112,13 +112,16 @@ TESTS_UNIT := $(srcroot)test/unit/bitmap.c $(srcroot)test/unit/math.c \
$(srcroot)test/unit/SFMT.c $(srcroot)test/unit/tsd.c $(srcroot)test/unit/SFMT.c $(srcroot)test/unit/tsd.c
TESTS_INTEGRATION := $(srcroot)test/integration/aligned_alloc.c \ TESTS_INTEGRATION := $(srcroot)test/integration/aligned_alloc.c \
$(srcroot)test/integration/allocated.c \ $(srcroot)test/integration/allocated.c \
$(srcroot)test/integration/ALLOCM_ARENA.c \ $(srcroot)test/integration/mallocx.c \
$(srcroot)test/integration/mremap.c \ $(srcroot)test/integration/mremap.c \
$(srcroot)test/integration/posix_memalign.c \ $(srcroot)test/integration/posix_memalign.c \
$(srcroot)test/integration/rallocx.c \
$(srcroot)test/integration/thread_arena.c \ $(srcroot)test/integration/thread_arena.c \
$(srcroot)test/integration/thread_tcache_enabled.c $(srcroot)test/integration/thread_tcache_enabled.c \
$(srcroot)test/integration/xallocx.c
ifeq ($(enable_experimental), 1) ifeq ($(enable_experimental), 1)
TESTS_INTEGRATION += $(srcroot)test/integration/allocm.c \ TESTS_INTEGRATION += $(srcroot)test/integration/allocm.c \
$(srcroot)test/integration/ALLOCM_ARENA.c \
$(srcroot)test/integration/rallocm.c $(srcroot)test/integration/rallocm.c
endif endif
TESTS_STRESS := TESTS_STRESS :=

View File

@ -417,7 +417,7 @@ AC_PROG_RANLIB
AC_PATH_PROG([LD], [ld], [false], [$PATH]) AC_PATH_PROG([LD], [ld], [false], [$PATH])
AC_PATH_PROG([AUTOCONF], [autoconf], [false], [$PATH]) AC_PATH_PROG([AUTOCONF], [autoconf], [false], [$PATH])
public_syms="malloc_conf malloc_message malloc calloc posix_memalign aligned_alloc realloc free malloc_usable_size malloc_stats_print mallctl mallctlnametomib mallctlbymib" public_syms="malloc_conf malloc_message malloc calloc posix_memalign aligned_alloc realloc free mallocx rallocx xallocx sallocx dallocx nallocx mallctl mallctlnametomib mallctlbymib malloc_stats_print malloc_usable_size"
dnl Check for allocator-related functions that should be wrapped. dnl Check for allocator-related functions that should be wrapped.
AC_CHECK_FUNC([memalign], AC_CHECK_FUNC([memalign],

View File

@ -33,11 +33,17 @@
<refname>aligned_alloc</refname> <refname>aligned_alloc</refname>
<refname>realloc</refname> <refname>realloc</refname>
<refname>free</refname> <refname>free</refname>
<refname>malloc_usable_size</refname> <refname>mallocx</refname>
<refname>malloc_stats_print</refname> <refname>rallocx</refname>
<refname>xallocx</refname>
<refname>sallocx</refname>
<refname>dallocx</refname>
<refname>nallocx</refname>
<refname>mallctl</refname> <refname>mallctl</refname>
<refname>mallctlnametomib</refname> <refname>mallctlnametomib</refname>
<refname>mallctlbymib</refname> <refname>mallctlbymib</refname>
<refname>malloc_stats_print</refname>
<refname>malloc_usable_size</refname>
<refname>allocm</refname> <refname>allocm</refname>
<refname>rallocm</refname> <refname>rallocm</refname>
<refname>sallocm</refname> <refname>sallocm</refname>
@ -92,16 +98,37 @@
<refsect2> <refsect2>
<title>Non-standard API</title> <title>Non-standard API</title>
<funcprototype> <funcprototype>
<funcdef>size_t <function>malloc_usable_size</function></funcdef> <funcdef>void *<function>mallocx</function></funcdef>
<paramdef>const void *<parameter>ptr</parameter></paramdef> <paramdef>size_t <parameter>size</parameter></paramdef>
<paramdef>int <parameter>flags</parameter></paramdef>
</funcprototype> </funcprototype>
<funcprototype> <funcprototype>
<funcdef>void <function>malloc_stats_print</function></funcdef> <funcdef>void *<function>rallocx</function></funcdef>
<paramdef>void <parameter>(*write_cb)</parameter> <paramdef>void *<parameter>ptr</parameter></paramdef>
<funcparams>void *, const char *</funcparams> <paramdef>size_t <parameter>size</parameter></paramdef>
</paramdef> <paramdef>int <parameter>flags</parameter></paramdef>
<paramdef>void *<parameter>cbopaque</parameter></paramdef> </funcprototype>
<paramdef>const char *<parameter>opts</parameter></paramdef> <funcprototype>
<funcdef>size_t <function>xallocx</function></funcdef>
<paramdef>void *<parameter>ptr</parameter></paramdef>
<paramdef>size_t <parameter>size</parameter></paramdef>
<paramdef>size_t <parameter>extra</parameter></paramdef>
<paramdef>int <parameter>flags</parameter></paramdef>
</funcprototype>
<funcprototype>
<funcdef>size_t <function>sallocx</function></funcdef>
<paramdef>void *<parameter>ptr</parameter></paramdef>
<paramdef>int <parameter>flags</parameter></paramdef>
</funcprototype>
<funcprototype>
<funcdef>void <function>dallocx</function></funcdef>
<paramdef>void *<parameter>ptr</parameter></paramdef>
<paramdef>int <parameter>flags</parameter></paramdef>
</funcprototype>
<funcprototype>
<funcdef>size_t <function>nallocx</function></funcdef>
<paramdef>size_t <parameter>size</parameter></paramdef>
<paramdef>int <parameter>flags</parameter></paramdef>
</funcprototype> </funcprototype>
<funcprototype> <funcprototype>
<funcdef>int <function>mallctl</function></funcdef> <funcdef>int <function>mallctl</function></funcdef>
@ -126,6 +153,18 @@
<paramdef>void *<parameter>newp</parameter></paramdef> <paramdef>void *<parameter>newp</parameter></paramdef>
<paramdef>size_t <parameter>newlen</parameter></paramdef> <paramdef>size_t <parameter>newlen</parameter></paramdef>
</funcprototype> </funcprototype>
<funcprototype>
<funcdef>void <function>malloc_stats_print</function></funcdef>
<paramdef>void <parameter>(*write_cb)</parameter>
<funcparams>void *, const char *</funcparams>
</paramdef>
<paramdef>void *<parameter>cbopaque</parameter></paramdef>
<paramdef>const char *<parameter>opts</parameter></paramdef>
</funcprototype>
<funcprototype>
<funcdef>size_t <function>malloc_usable_size</function></funcdef>
<paramdef>const void *<parameter>ptr</parameter></paramdef>
</funcprototype>
<funcprototype> <funcprototype>
<funcdef>void <function>(*malloc_message)</function></funcdef> <funcdef>void <function>(*malloc_message)</function></funcdef>
<paramdef>void *<parameter>cbopaque</parameter></paramdef> <paramdef>void *<parameter>cbopaque</parameter></paramdef>
@ -225,41 +264,99 @@
</refsect2> </refsect2>
<refsect2> <refsect2>
<title>Non-standard API</title> <title>Non-standard API</title>
<para>The <function>mallocx<parameter/></function>,
<function>rallocx<parameter/></function>,
<function>xallocx<parameter/></function>,
<function>sallocx<parameter/></function>,
<function>dallocx<parameter/></function>, and
<function>nallocx<parameter/></function> functions all have a
<parameter>flags</parameter> argument that can be used to specify
options. The functions only check the options that are contextually
relevant. Use bitwise or (<code language="C">|</code>) operations to
specify one or more of the following:
<variablelist>
<varlistentry>
<term><constant>MALLOCX_LG_ALIGN(<parameter>la</parameter>)
</constant></term>
<para>The <function>malloc_usable_size<parameter/></function> function <listitem><para>Align the memory allocation to start at an address
returns the usable size of the allocation pointed to by that is a multiple of <code language="C">(1 &lt;&lt;
<parameter>ptr</parameter>. The return value may be larger than the size <parameter>la</parameter>)</code>. This macro does not validate
that was requested during allocation. The that <parameter>la</parameter> is within the valid
<function>malloc_usable_size<parameter/></function> function is not a range.</para></listitem>
mechanism for in-place <function>realloc<parameter/></function>; rather </varlistentry>
it is provided solely as a tool for introspection purposes. Any <varlistentry>
discrepancy between the requested allocation size and the size reported <term><constant>MALLOCX_ALIGN(<parameter>a</parameter>)
by <function>malloc_usable_size<parameter/></function> should not be </constant></term>
depended on, since such behavior is entirely implementation-dependent.
<listitem><para>Align the memory allocation to start at an address
that is a multiple of <parameter>a</parameter>, where
<parameter>a</parameter> is a power of two. This macro does not
validate that <parameter>a</parameter> is a power of 2.
</para></listitem>
</varlistentry>
<varlistentry>
<term><constant>MALLOCX_ZERO</constant></term>
<listitem><para>Initialize newly allocated memory to contain zero
bytes. In the growing reallocation case, the real size prior to
reallocation defines the boundary between untouched bytes and those
that are initialized to contain zero bytes. If this macro is
absent, newly allocated memory is uninitialized.</para></listitem>
</varlistentry>
<varlistentry>
<term><constant>MALLOCX_ARENA(<parameter>a</parameter>)
</constant></term>
<listitem><para>Use the arena specified by the index
<parameter>a</parameter> (and by necessity bypass the thread
cache). This macro has no effect for huge regions, nor for regions
that were allocated via an arena other than the one specified.
This macro does not validate that <parameter>a</parameter>
specifies an arena index in the valid range.</para></listitem>
</varlistentry>
</variablelist>
</para> </para>
<para>The <function>malloc_stats_print<parameter/></function> function <para>The <function>mallocx<parameter/></function> function allocates at
writes human-readable summary statistics via the least <parameter>size</parameter> bytes of memory, and returns a pointer
<parameter>write_cb</parameter> callback function pointer and to the base address of the allocation. Behavior is undefined if
<parameter>cbopaque</parameter> data passed to <parameter>size</parameter> is <constant>0</constant>.</para>
<parameter>write_cb</parameter>, or
<function>malloc_message<parameter/></function> if <para>The <function>rallocx<parameter/></function> function resizes the
<parameter>write_cb</parameter> is <constant>NULL</constant>. This allocation at <parameter>ptr</parameter> to be at least
function can be called repeatedly. General information that never <parameter>size</parameter> bytes, and returns a pointer to the base
changes during execution can be omitted by specifying "g" as a character address of the resulting allocation, which may or may not have moved from
within the <parameter>opts</parameter> string. Note that its original location. Behavior is undefined if
<function>malloc_message<parameter/></function> uses the <parameter>size</parameter> is <constant>0</constant>.</para>
<function>mallctl*<parameter/></function> functions internally, so
inconsistent statistics can be reported if multiple threads use these <para>The <function>xallocx<parameter/></function> function resizes the
functions simultaneously. If <option>--enable-stats</option> is allocation at <parameter>ptr</parameter> in place to be at least
specified during configuration, &ldquo;m&rdquo; and &ldquo;a&rdquo; can <parameter>size</parameter> bytes, and returns the real size of the
be specified to omit merged arena and per arena statistics, respectively; allocation. If <parameter>extra</parameter> is non-zero, an attempt is
&ldquo;b&rdquo; and &ldquo;l&rdquo; can be specified to omit per size made to resize the allocation to be at least <code
class statistics for bins and large objects, respectively. Unrecognized language="C">(<parameter>size</parameter> +
characters are silently ignored. Note that thread caching may prevent <parameter>extra</parameter>)</code> bytes, though inability to allocate
some statistics from being completely up to date, since extra locking the extra byte(s) will not by itself result in failure to resize.
would be required to merge counters that track thread cache operations. Behavior is undefined if <parameter>size</parameter> is
</para> <constant>0</constant>, or if <code
language="C">(<parameter>size</parameter> + <parameter>extra</parameter>
&gt; <constant>SIZE_T_MAX</constant>)</code>.</para>
<para>The <function>sallocx<parameter/></function> function returns the
real size of the allocation at <parameter>ptr</parameter>.</para>
<para>The <function>dallocx<parameter/></function> function causes the
memory referenced by <parameter>ptr</parameter> to be made available for
future allocations.</para>
<para>The <function>nallocx<parameter/></function> function allocates no
memory, but it performs the same size computation as the
<function>mallocx<parameter/></function> function, and returns the real
size of the allocation that would result from the equivalent
<function>mallocx<parameter/></function> function call. Behavior is
undefined if <parameter>size</parameter> is
<constant>0</constant>.</para>
<para>The <function>mallctl<parameter/></function> function provides a <para>The <function>mallctl<parameter/></function> function provides a
general interface for introspecting the memory allocator, as well as general interface for introspecting the memory allocator, as well as
@ -314,6 +411,41 @@ for (i = 0; i < nbins; i++) {
mallctlbymib(mib, miblen, &bin_size, &len, NULL, 0); mallctlbymib(mib, miblen, &bin_size, &len, NULL, 0);
/* Do something with bin_size... */ /* Do something with bin_size... */
}]]></programlisting></para> }]]></programlisting></para>
<para>The <function>malloc_stats_print<parameter/></function> function
writes human-readable summary statistics via the
<parameter>write_cb</parameter> callback function pointer and
<parameter>cbopaque</parameter> data passed to
<parameter>write_cb</parameter>, or
<function>malloc_message<parameter/></function> if
<parameter>write_cb</parameter> is <constant>NULL</constant>. This
function can be called repeatedly. General information that never
changes during execution can be omitted by specifying "g" as a character
within the <parameter>opts</parameter> string. Note that
<function>malloc_message<parameter/></function> uses the
<function>mallctl*<parameter/></function> functions internally, so
inconsistent statistics can be reported if multiple threads use these
functions simultaneously. If <option>--enable-stats</option> is
specified during configuration, &ldquo;m&rdquo; and &ldquo;a&rdquo; can
be specified to omit merged arena and per arena statistics, respectively;
&ldquo;b&rdquo; and &ldquo;l&rdquo; can be specified to omit per size
class statistics for bins and large objects, respectively. Unrecognized
characters are silently ignored. Note that thread caching may prevent
some statistics from being completely up to date, since extra locking
would be required to merge counters that track thread cache operations.
</para>
<para>The <function>malloc_usable_size<parameter/></function> function
returns the usable size of the allocation pointed to by
<parameter>ptr</parameter>. The return value may be larger than the size
that was requested during allocation. The
<function>malloc_usable_size<parameter/></function> function is not a
mechanism for in-place <function>realloc<parameter/></function>; rather
it is provided solely as a tool for introspection purposes. Any
discrepancy between the requested allocation size and the size reported
by <function>malloc_usable_size<parameter/></function> should not be
depended on, since such behavior is entirely implementation-dependent.
</para>
</refsect2> </refsect2>
<refsect2> <refsect2>
<title>Experimental API</title> <title>Experimental API</title>
@ -398,7 +530,7 @@ for (i = 0; i < nbins; i++) {
<parameter>rsize</parameter> is not <constant>NULL</constant>. If <parameter>rsize</parameter> is not <constant>NULL</constant>. If
<parameter>extra</parameter> is non-zero, an attempt is made to resize <parameter>extra</parameter> is non-zero, an attempt is made to resize
the allocation to be at least <code the allocation to be at least <code
language="C"><parameter>size</parameter> + language="C">(<parameter>size</parameter> +
<parameter>extra</parameter>)</code> bytes, though inability to allocate <parameter>extra</parameter>)</code> bytes, though inability to allocate
the extra byte(s) will not by itself result in failure. Behavior is the extra byte(s) will not by itself result in failure. Behavior is
undefined if <parameter>size</parameter> is <constant>0</constant>, or if undefined if <parameter>size</parameter> is <constant>0</constant>, or if
@ -936,7 +1068,8 @@ for (i = 0; i < nbins; i++) {
<listitem><para>Zero filling enabled/disabled. If enabled, each byte <listitem><para>Zero filling enabled/disabled. If enabled, each byte
of uninitialized allocated memory will be initialized to 0. Note that of uninitialized allocated memory will be initialized to 0. Note that
this initialization only happens once for each byte, so this initialization only happens once for each byte, so
<function>realloc<parameter/></function> and <function>realloc<parameter/></function>,
<function>rallocx<parameter/></function> and
<function>rallocm<parameter/></function> calls do not zero memory that <function>rallocm<parameter/></function> calls do not zero memory that
was previously allocated. This is intended for debugging and will was previously allocated. This is intended for debugging and will
impact performance negatively. This option is disabled by default. impact performance negatively. This option is disabled by default.
@ -2039,9 +2172,26 @@ malloc_conf = "xmalloc:true";]]></programlisting>
</refsect2> </refsect2>
<refsect2> <refsect2>
<title>Non-standard API</title> <title>Non-standard API</title>
<para>The <function>malloc_usable_size<parameter/></function> function <para>The <function>mallocx<parameter/></function> and
returns the usable size of the allocation pointed to by <function>rallocx<parameter/></function> functions return a pointer to
<parameter>ptr</parameter>. </para> the allocated memory if successful; otherwise a <constant>NULL</constant>
pointer is returned to indicate insufficient contiguous memory was
available to service the allocation request. </para>
<para>The <function>xallocx<parameter/></function> function returns the
real size of the resulting resized allocation pointed to by
<parameter>ptr</parameter>, which is a value less than
<parameter>size</parameter> if the allocation could not be adequately
grown in place. </para>
<para>The <function>sallocx<parameter/></function> function returns the
real size of the allocation pointed to by <parameter>ptr</parameter>.
</para>
<para>The <function>nallocx<parameter/></function> returns the real size
that would result from a successful equivalent
<function>mallocx<parameter/></function> function call, or zero if
insufficient memory is available to perform the size computation. </para>
<para>The <function>mallctl<parameter/></function>, <para>The <function>mallctl<parameter/></function>,
<function>mallctlnametomib<parameter/></function>, and <function>mallctlnametomib<parameter/></function>, and
@ -2092,6 +2242,10 @@ malloc_conf = "xmalloc:true";]]></programlisting>
</varlistentry> </varlistentry>
</variablelist> </variablelist>
</para> </para>
<para>The <function>malloc_usable_size<parameter/></function> function
returns the usable size of the allocation pointed to by
<parameter>ptr</parameter>. </para>
</refsect2> </refsect2>
<refsect2> <refsect2>
<title>Experimental API</title> <title>Experimental API</title>

View File

@ -228,6 +228,7 @@ static const bool config_ivsalloc =
#include "jemalloc/internal/jemalloc_internal_macros.h" #include "jemalloc/internal/jemalloc_internal_macros.h"
#define MALLOCX_LG_ALIGN_MASK ((int)0x3f)
#define ALLOCM_LG_ALIGN_MASK ((int)0x3f) #define ALLOCM_LG_ALIGN_MASK ((int)0x3f)
/* Smallest size class to support. */ /* Smallest size class to support. */
@ -731,22 +732,22 @@ choose_arena(arena_t *arena)
#include "jemalloc/internal/quarantine.h" #include "jemalloc/internal/quarantine.h"
#ifndef JEMALLOC_ENABLE_INLINE #ifndef JEMALLOC_ENABLE_INLINE
void *imallocx(size_t size, bool try_tcache, arena_t *arena); void *imalloct(size_t size, bool try_tcache, arena_t *arena);
void *imalloc(size_t size); void *imalloc(size_t size);
void *icallocx(size_t size, bool try_tcache, arena_t *arena); void *icalloct(size_t size, bool try_tcache, arena_t *arena);
void *icalloc(size_t size); void *icalloc(size_t size);
void *ipallocx(size_t usize, size_t alignment, bool zero, bool try_tcache, void *ipalloct(size_t usize, size_t alignment, bool zero, bool try_tcache,
arena_t *arena); arena_t *arena);
void *ipalloc(size_t usize, size_t alignment, bool zero); void *ipalloc(size_t usize, size_t alignment, bool zero);
size_t isalloc(const void *ptr, bool demote); size_t isalloc(const void *ptr, bool demote);
size_t ivsalloc(const void *ptr, bool demote); size_t ivsalloc(const void *ptr, bool demote);
size_t u2rz(size_t usize); size_t u2rz(size_t usize);
size_t p2rz(const void *ptr); size_t p2rz(const void *ptr);
void idallocx(void *ptr, bool try_tcache); void idalloct(void *ptr, bool try_tcache);
void idalloc(void *ptr); void idalloc(void *ptr);
void iqallocx(void *ptr, bool try_tcache); void iqalloct(void *ptr, bool try_tcache);
void iqalloc(void *ptr); void iqalloc(void *ptr);
void *irallocx(void *ptr, size_t size, size_t extra, size_t alignment, void *iralloct(void *ptr, size_t size, size_t extra, size_t alignment,
bool zero, bool no_move, bool try_tcache_alloc, bool try_tcache_dalloc, bool zero, bool no_move, bool try_tcache_alloc, bool try_tcache_dalloc,
arena_t *arena); arena_t *arena);
void *iralloc(void *ptr, size_t size, size_t extra, size_t alignment, void *iralloc(void *ptr, size_t size, size_t extra, size_t alignment,
@ -756,7 +757,7 @@ malloc_tsd_protos(JEMALLOC_ATTR(unused), thread_allocated, thread_allocated_t)
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_)) #if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
JEMALLOC_ALWAYS_INLINE void * JEMALLOC_ALWAYS_INLINE void *
imallocx(size_t size, bool try_tcache, arena_t *arena) imalloct(size_t size, bool try_tcache, arena_t *arena)
{ {
assert(size != 0); assert(size != 0);
@ -771,11 +772,11 @@ JEMALLOC_ALWAYS_INLINE void *
imalloc(size_t size) imalloc(size_t size)
{ {
return (imallocx(size, true, NULL)); return (imalloct(size, true, NULL));
} }
JEMALLOC_ALWAYS_INLINE void * JEMALLOC_ALWAYS_INLINE void *
icallocx(size_t size, bool try_tcache, arena_t *arena) icalloct(size_t size, bool try_tcache, arena_t *arena)
{ {
if (size <= arena_maxclass) if (size <= arena_maxclass)
@ -788,11 +789,11 @@ JEMALLOC_ALWAYS_INLINE void *
icalloc(size_t size) icalloc(size_t size)
{ {
return (icallocx(size, true, NULL)); return (icalloct(size, true, NULL));
} }
JEMALLOC_ALWAYS_INLINE void * JEMALLOC_ALWAYS_INLINE void *
ipallocx(size_t usize, size_t alignment, bool zero, bool try_tcache, ipalloct(size_t usize, size_t alignment, bool zero, bool try_tcache,
arena_t *arena) arena_t *arena)
{ {
void *ret; void *ret;
@ -820,7 +821,7 @@ JEMALLOC_ALWAYS_INLINE void *
ipalloc(size_t usize, size_t alignment, bool zero) ipalloc(size_t usize, size_t alignment, bool zero)
{ {
return (ipallocx(usize, alignment, zero, true, NULL)); return (ipalloct(usize, alignment, zero, true, NULL));
} }
/* /*
@ -881,7 +882,7 @@ p2rz(const void *ptr)
} }
JEMALLOC_ALWAYS_INLINE void JEMALLOC_ALWAYS_INLINE void
idallocx(void *ptr, bool try_tcache) idalloct(void *ptr, bool try_tcache)
{ {
arena_chunk_t *chunk; arena_chunk_t *chunk;
@ -898,28 +899,28 @@ JEMALLOC_ALWAYS_INLINE void
idalloc(void *ptr) idalloc(void *ptr)
{ {
idallocx(ptr, true); idalloct(ptr, true);
} }
JEMALLOC_ALWAYS_INLINE void JEMALLOC_ALWAYS_INLINE void
iqallocx(void *ptr, bool try_tcache) iqalloct(void *ptr, bool try_tcache)
{ {
if (config_fill && opt_quarantine) if (config_fill && opt_quarantine)
quarantine(ptr); quarantine(ptr);
else else
idallocx(ptr, try_tcache); idalloct(ptr, try_tcache);
} }
JEMALLOC_ALWAYS_INLINE void JEMALLOC_ALWAYS_INLINE void
iqalloc(void *ptr) iqalloc(void *ptr)
{ {
iqallocx(ptr, true); iqalloct(ptr, true);
} }
JEMALLOC_ALWAYS_INLINE void * JEMALLOC_ALWAYS_INLINE void *
irallocx(void *ptr, size_t size, size_t extra, size_t alignment, bool zero, iralloct(void *ptr, size_t size, size_t extra, size_t alignment, bool zero,
bool no_move, bool try_tcache_alloc, bool try_tcache_dalloc, arena_t *arena) bool no_move, bool try_tcache_alloc, bool try_tcache_dalloc, arena_t *arena)
{ {
void *ret; void *ret;
@ -943,7 +944,7 @@ irallocx(void *ptr, size_t size, size_t extra, size_t alignment, bool zero,
usize = sa2u(size + extra, alignment); usize = sa2u(size + extra, alignment);
if (usize == 0) if (usize == 0)
return (NULL); return (NULL);
ret = ipallocx(usize, alignment, zero, try_tcache_alloc, arena); ret = ipalloct(usize, alignment, zero, try_tcache_alloc, arena);
if (ret == NULL) { if (ret == NULL) {
if (extra == 0) if (extra == 0)
return (NULL); return (NULL);
@ -951,7 +952,7 @@ irallocx(void *ptr, size_t size, size_t extra, size_t alignment, bool zero,
usize = sa2u(size, alignment); usize = sa2u(size, alignment);
if (usize == 0) if (usize == 0)
return (NULL); return (NULL);
ret = ipallocx(usize, alignment, zero, try_tcache_alloc, ret = ipalloct(usize, alignment, zero, try_tcache_alloc,
arena); arena);
if (ret == NULL) if (ret == NULL)
return (NULL); return (NULL);
@ -963,7 +964,7 @@ irallocx(void *ptr, size_t size, size_t extra, size_t alignment, bool zero,
*/ */
copysize = (size < oldsize) ? size : oldsize; copysize = (size < oldsize) ? size : oldsize;
memcpy(ret, ptr, copysize); memcpy(ret, ptr, copysize);
iqallocx(ptr, try_tcache_dalloc); iqalloct(ptr, try_tcache_dalloc);
return (ret); return (ret);
} }
@ -992,7 +993,7 @@ iralloc(void *ptr, size_t size, size_t extra, size_t alignment, bool zero,
bool no_move) bool no_move)
{ {
return (irallocx(ptr, size, extra, alignment, zero, no_move, true, true, return (iralloct(ptr, size, extra, alignment, zero, no_move, true, true,
NULL)); NULL));
} }

View File

@ -207,17 +207,17 @@ huge_ralloc_no_move
huge_salloc huge_salloc
iallocm iallocm
icalloc icalloc
icallocx icalloct
idalloc idalloc
idallocx idalloct
imalloc imalloc
imallocx imalloct
ipalloc ipalloc
ipallocx ipalloct
iqalloc iqalloc
iqallocx iqalloct
iralloc iralloc
irallocx iralloct
isalloc isalloc
isthreaded isthreaded
ivsalloc ivsalloc

View File

@ -6,11 +6,17 @@ posix_memalign
aligned_alloc aligned_alloc
realloc realloc
free free
malloc_usable_size mallocx
malloc_stats_print rallocx
xallocx
sallocx
dallocx
nallocx
mallctl mallctl
mallctlnametomib mallctlnametomib
mallctlbymib mallctlbymib
malloc_stats_print
malloc_usable_size
memalign memalign
valloc valloc
allocm allocm

View File

@ -8,6 +8,17 @@
#define JEMALLOC_VERSION_NREV @jemalloc_version_nrev@ #define JEMALLOC_VERSION_NREV @jemalloc_version_nrev@
#define JEMALLOC_VERSION_GID "@jemalloc_version_gid@" #define JEMALLOC_VERSION_GID "@jemalloc_version_gid@"
# define MALLOCX_LG_ALIGN(la) (la)
# if LG_SIZEOF_PTR == 2
# define MALLOCX_ALIGN(a) (ffs(a)-1)
# else
# define MALLOCX_ALIGN(a) \
((a < (size_t)INT_MAX) ? ffs(a)-1 : ffs(a>>32)+31)
# endif
# define MALLOCX_ZERO ((int)0x40)
/* Bias arena index bits so that 0 encodes "MALLOCX_ARENA() unspecified". */
# define MALLOCX_ARENA(a) ((int)(((a)+1) << 8))
#ifdef JEMALLOC_EXPERIMENTAL #ifdef JEMALLOC_EXPERIMENTAL
# define ALLOCM_LG_ALIGN(la) (la) # define ALLOCM_LG_ALIGN(la) (la)
# if LG_SIZEOF_PTR == 2 # if LG_SIZEOF_PTR == 2
@ -39,11 +50,17 @@
# undef je_aligned_alloc # undef je_aligned_alloc
# undef je_realloc # undef je_realloc
# undef je_free # undef je_free
# undef je_malloc_usable_size # undef je_mallocx
# undef je_malloc_stats_print # undef je_rallocx
# undef je_xallocx
# undef je_sallocx
# undef je_dallocx
# undef je_nallocx
# undef je_mallctl # undef je_mallctl
# undef je_mallctlnametomib # undef je_mallctlnametomib
# undef je_mallctlbymib # undef je_mallctlbymib
# undef je_malloc_stats_print
# undef je_malloc_usable_size
# undef je_memalign # undef je_memalign
# undef je_valloc # undef je_valloc
# undef je_allocm # undef je_allocm

View File

@ -17,11 +17,17 @@
# define aligned_alloc je_aligned_alloc # define aligned_alloc je_aligned_alloc
# define realloc je_realloc # define realloc je_realloc
# define free je_free # define free je_free
# define malloc_usable_size je_malloc_usable_size # define mallocx je_mallocx
# define malloc_stats_print je_malloc_stats_print # define rallocx je_rallocx
# define xallocx je_xallocx
# define sallocx je_sallocx
# define dallocx je_dallocx
# define nallocx je_nallocx
# define mallctl je_mallctl # define mallctl je_mallctl
# define mallctlnametomib je_mallctlnametomib # define mallctlnametomib je_mallctlnametomib
# define mallctlbymib je_mallctlbymib # define mallctlbymib je_mallctlbymib
# define malloc_stats_print je_malloc_stats_print
# define malloc_usable_size je_malloc_usable_size
# define memalign je_memalign # define memalign je_memalign
# define valloc je_valloc # define valloc je_valloc
# ifdef JEMALLOC_EXPERIMENTAL # ifdef JEMALLOC_EXPERIMENTAL
@ -56,6 +62,12 @@
# undef je_mallctlbymib # undef je_mallctlbymib
# undef je_memalign # undef je_memalign
# undef je_valloc # undef je_valloc
# undef je_mallocx
# undef je_rallocx
# undef je_xallocx
# undef je_sallocx
# undef je_dallocx
# undef je_nallocx
# ifdef JEMALLOC_EXPERIMENTAL # ifdef JEMALLOC_EXPERIMENTAL
# undef je_allocm # undef je_allocm
# undef je_rallocm # undef je_rallocm

View File

@ -17,6 +17,25 @@ JEMALLOC_EXPORT void *@je_@aligned_alloc(size_t alignment, size_t size)
JEMALLOC_EXPORT void *@je_@realloc(void *ptr, size_t size); JEMALLOC_EXPORT void *@je_@realloc(void *ptr, size_t size);
JEMALLOC_EXPORT void @je_@free(void *ptr); JEMALLOC_EXPORT void @je_@free(void *ptr);
JEMALLOC_EXPORT void *@je_@mallocx(size_t size, int flags);
JEMALLOC_EXPORT void *@je_@rallocx(void *ptr, size_t size, int flags);
JEMALLOC_EXPORT size_t @je_@xallocx(void *ptr, size_t size, size_t extra,
int flags);
JEMALLOC_EXPORT size_t @je_@sallocx(const void *ptr, int flags);
JEMALLOC_EXPORT void @je_@dallocx(void *ptr, int flags);
JEMALLOC_EXPORT size_t @je_@nallocx(size_t size, int flags);
JEMALLOC_EXPORT int @je_@mallctl(const char *name, void *oldp,
size_t *oldlenp, void *newp, size_t newlen);
JEMALLOC_EXPORT int @je_@mallctlnametomib(const char *name, size_t *mibp,
size_t *miblenp);
JEMALLOC_EXPORT int @je_@mallctlbymib(const size_t *mib, size_t miblen,
void *oldp, size_t *oldlenp, void *newp, size_t newlen);
JEMALLOC_EXPORT void @je_@malloc_stats_print(void (*write_cb)(void *,
const char *), void *@je_@cbopaque, const char *opts);
JEMALLOC_EXPORT size_t @je_@malloc_usable_size(
JEMALLOC_USABLE_SIZE_CONST void *ptr);
#ifdef JEMALLOC_OVERRIDE_MEMALIGN #ifdef JEMALLOC_OVERRIDE_MEMALIGN
JEMALLOC_EXPORT void * @je_@memalign(size_t alignment, size_t size) JEMALLOC_EXPORT void * @je_@memalign(size_t alignment, size_t size)
JEMALLOC_ATTR(malloc); JEMALLOC_ATTR(malloc);
@ -26,17 +45,6 @@ JEMALLOC_EXPORT void * @je_@memalign(size_t alignment, size_t size)
JEMALLOC_EXPORT void * @je_@valloc(size_t size) JEMALLOC_ATTR(malloc); JEMALLOC_EXPORT void * @je_@valloc(size_t size) JEMALLOC_ATTR(malloc);
#endif #endif
JEMALLOC_EXPORT size_t @je_@malloc_usable_size(
JEMALLOC_USABLE_SIZE_CONST void *ptr);
JEMALLOC_EXPORT void @je_@malloc_stats_print(void (*write_cb)(void *,
const char *), void *@je_@cbopaque, const char *opts);
JEMALLOC_EXPORT int @je_@mallctl(const char *name, void *oldp,
size_t *oldlenp, void *newp, size_t newlen);
JEMALLOC_EXPORT int @je_@mallctlnametomib(const char *name, size_t *mibp,
size_t *miblenp);
JEMALLOC_EXPORT int @je_@mallctlbymib(const size_t *mib, size_t miblen,
void *oldp, size_t *oldlenp, void *newp, size_t newlen);
#ifdef JEMALLOC_EXPERIMENTAL #ifdef JEMALLOC_EXPERIMENTAL
JEMALLOC_EXPORT int @je_@allocm(void **ptr, size_t *rsize, size_t size, JEMALLOC_EXPORT int @je_@allocm(void **ptr, size_t *rsize, size_t size,
int flags) JEMALLOC_ATTR(nonnull(1)); int flags) JEMALLOC_ATTR(nonnull(1));

View File

@ -2031,7 +2031,7 @@ arena_ralloc(arena_t *arena, void *ptr, size_t oldsize, size_t size,
size_t usize = sa2u(size + extra, alignment); size_t usize = sa2u(size + extra, alignment);
if (usize == 0) if (usize == 0)
return (NULL); return (NULL);
ret = ipallocx(usize, alignment, zero, try_tcache_alloc, arena); ret = ipalloct(usize, alignment, zero, try_tcache_alloc, arena);
} else } else
ret = arena_malloc(arena, size + extra, zero, try_tcache_alloc); ret = arena_malloc(arena, size + extra, zero, try_tcache_alloc);
@ -2043,7 +2043,7 @@ arena_ralloc(arena_t *arena, void *ptr, size_t oldsize, size_t size,
size_t usize = sa2u(size, alignment); size_t usize = sa2u(size, alignment);
if (usize == 0) if (usize == 0)
return (NULL); return (NULL);
ret = ipallocx(usize, alignment, zero, try_tcache_alloc, ret = ipalloct(usize, alignment, zero, try_tcache_alloc,
arena); arena);
} else } else
ret = arena_malloc(arena, size, zero, try_tcache_alloc); ret = arena_malloc(arena, size, zero, try_tcache_alloc);
@ -2061,7 +2061,7 @@ arena_ralloc(arena_t *arena, void *ptr, size_t oldsize, size_t size,
copysize = (size < oldsize) ? size : oldsize; copysize = (size < oldsize) ? size : oldsize;
VALGRIND_MAKE_MEM_UNDEFINED(ret, copysize); VALGRIND_MAKE_MEM_UNDEFINED(ret, copysize);
memcpy(ret, ptr, copysize); memcpy(ret, ptr, copysize);
iqallocx(ptr, try_tcache_dalloc); iqalloct(ptr, try_tcache_dalloc);
return (ret); return (ret);
} }

View File

@ -181,7 +181,7 @@ huge_ralloc(void *ptr, size_t oldsize, size_t size, size_t extra,
#endif #endif
{ {
memcpy(ret, ptr, copysize); memcpy(ret, ptr, copysize);
iqallocx(ptr, try_tcache_dalloc); iqalloct(ptr, try_tcache_dalloc);
} }
return (ret); return (ret);
} }

View File

@ -1337,28 +1337,363 @@ JEMALLOC_EXPORT void *(* __memalign_hook)(size_t alignment, size_t size) =
* Begin non-standard functions. * Begin non-standard functions.
*/ */
size_t JEMALLOC_ALWAYS_INLINE_C void *
je_malloc_usable_size(JEMALLOC_USABLE_SIZE_CONST void *ptr) imallocx(size_t usize, size_t alignment, bool zero, bool try_tcache,
arena_t *arena)
{ {
size_t ret;
assert(usize == ((alignment == 0) ? s2u(usize) : sa2u(usize,
alignment)));
if (alignment != 0)
return (ipalloct(usize, alignment, zero, try_tcache, arena));
else if (zero)
return (icalloct(usize, try_tcache, arena));
else
return (imalloct(usize, try_tcache, arena));
}
void *
je_mallocx(size_t size, int flags)
{
void *p;
size_t usize;
size_t alignment = (ZU(1) << (flags & MALLOCX_LG_ALIGN_MASK)
& (SIZE_T_MAX-1));
bool zero = flags & MALLOCX_ZERO;
unsigned arena_ind = ((unsigned)(flags >> 8)) - 1;
arena_t *arena;
bool try_tcache;
assert(size != 0);
if (malloc_init())
goto label_oom;
if (arena_ind != UINT_MAX) {
arena = arenas[arena_ind];
try_tcache = false;
} else {
arena = NULL;
try_tcache = true;
}
usize = (alignment == 0) ? s2u(size) : sa2u(size, alignment);
if (usize == 0)
goto label_oom;
if (config_prof && opt_prof) {
prof_thr_cnt_t *cnt;
PROF_ALLOC_PREP(1, usize, cnt);
if (cnt == NULL)
goto label_oom;
if (prof_promote && (uintptr_t)cnt != (uintptr_t)1U && usize <=
SMALL_MAXCLASS) {
size_t usize_promoted = (alignment == 0) ?
s2u(SMALL_MAXCLASS+1) : sa2u(SMALL_MAXCLASS+1,
alignment);
assert(usize_promoted != 0);
p = imallocx(usize_promoted, alignment, zero,
try_tcache, arena);
if (p == NULL)
goto label_oom;
arena_prof_promoted(p, usize);
} else {
p = imallocx(usize, alignment, zero, try_tcache, arena);
if (p == NULL)
goto label_oom;
}
prof_malloc(p, usize, cnt);
} else {
p = imallocx(usize, alignment, zero, try_tcache, arena);
if (p == NULL)
goto label_oom;
}
if (config_stats) {
assert(usize == isalloc(p, config_prof));
thread_allocated_tsd_get()->allocated += usize;
}
UTRACE(0, size, p);
JEMALLOC_VALGRIND_MALLOC(true, p, usize, zero);
return (p);
label_oom:
if (config_xmalloc && opt_xmalloc) {
malloc_write("<jemalloc>: Error in mallocx(): out of memory\n");
abort();
}
UTRACE(0, size, 0);
return (NULL);
}
void *
je_rallocx(void *ptr, size_t size, int flags)
{
void *p;
size_t usize;
size_t old_size;
UNUSED size_t old_rzsize JEMALLOC_CC_SILENCE_INIT(0);
size_t alignment = (ZU(1) << (flags & MALLOCX_LG_ALIGN_MASK)
& (SIZE_T_MAX-1));
bool zero = flags & MALLOCX_ZERO;
unsigned arena_ind = ((unsigned)(flags >> 8)) - 1;
bool try_tcache_alloc, try_tcache_dalloc;
arena_t *arena;
assert(ptr != NULL);
assert(size != 0);
assert(malloc_initialized || IS_INITIALIZER);
malloc_thread_init();
if (arena_ind != UINT_MAX) {
arena_chunk_t *chunk;
try_tcache_alloc = false;
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
try_tcache_dalloc = (chunk == ptr || chunk->arena !=
arenas[arena_ind]);
arena = arenas[arena_ind];
} else {
try_tcache_alloc = true;
try_tcache_dalloc = true;
arena = NULL;
}
if (config_prof && opt_prof) {
prof_thr_cnt_t *cnt;
usize = (alignment == 0) ? s2u(size) : sa2u(size,
alignment);
prof_ctx_t *old_ctx = prof_ctx_get(ptr);
old_size = isalloc(ptr, true);
if (config_valgrind && opt_valgrind)
old_rzsize = p2rz(ptr);
PROF_ALLOC_PREP(1, usize, cnt);
if (cnt == NULL)
goto label_oom;
if (prof_promote && (uintptr_t)cnt != (uintptr_t)1U && usize <=
SMALL_MAXCLASS) {
p = iralloct(ptr, SMALL_MAXCLASS+1, (SMALL_MAXCLASS+1 >=
size) ? 0 : size - (SMALL_MAXCLASS+1), alignment,
zero, false, try_tcache_alloc, try_tcache_dalloc,
arena);
if (p == NULL)
goto label_oom;
if (usize < PAGE)
arena_prof_promoted(p, usize);
} else {
p = iralloct(ptr, size, 0, alignment, zero, false,
try_tcache_alloc, try_tcache_dalloc, arena);
if (p == NULL)
goto label_oom;
}
prof_realloc(p, usize, cnt, old_size, old_ctx);
} else {
if (config_stats) {
old_size = isalloc(ptr, false);
if (config_valgrind && opt_valgrind)
old_rzsize = u2rz(old_size);
} else if (config_valgrind && opt_valgrind) {
old_size = isalloc(ptr, false);
old_rzsize = u2rz(old_size);
}
p = iralloct(ptr, size, 0, alignment, zero, false,
try_tcache_alloc, try_tcache_dalloc, arena);
if (p == NULL)
goto label_oom;
if (config_stats || (config_valgrind && opt_valgrind))
usize = isalloc(p, config_prof);
}
if (config_stats) {
thread_allocated_t *ta;
ta = thread_allocated_tsd_get();
ta->allocated += usize;
ta->deallocated += old_size;
}
UTRACE(ptr, size, p);
JEMALLOC_VALGRIND_REALLOC(p, usize, ptr, old_size, old_rzsize, zero);
return (p);
label_oom:
if (config_xmalloc && opt_xmalloc) {
malloc_write("<jemalloc>: Error in rallocx(): out of memory\n");
abort();
}
UTRACE(ptr, size, 0);
return (NULL);
}
size_t
je_xallocx(void *ptr, size_t size, size_t extra, int flags)
{
size_t usize;
size_t old_size;
UNUSED size_t old_rzsize JEMALLOC_CC_SILENCE_INIT(0);
size_t alignment = (ZU(1) << (flags & MALLOCX_LG_ALIGN_MASK)
& (SIZE_T_MAX-1));
bool zero = flags & MALLOCX_ZERO;
unsigned arena_ind = ((unsigned)(flags >> 8)) - 1;
bool try_tcache_alloc, try_tcache_dalloc;
arena_t *arena;
assert(ptr != NULL);
assert(size != 0);
assert(SIZE_T_MAX - size >= extra);
assert(malloc_initialized || IS_INITIALIZER);
malloc_thread_init();
if (arena_ind != UINT_MAX) {
arena_chunk_t *chunk;
try_tcache_alloc = false;
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
try_tcache_dalloc = (chunk == ptr || chunk->arena !=
arenas[arena_ind]);
arena = arenas[arena_ind];
} else {
try_tcache_alloc = true;
try_tcache_dalloc = true;
arena = NULL;
}
if (config_prof && opt_prof) {
prof_thr_cnt_t *cnt;
/*
* usize isn't knowable before iralloc() returns when extra is
* non-zero. Therefore, compute its maximum possible value and
* use that in PROF_ALLOC_PREP() to decide whether to capture a
* backtrace. prof_realloc() will use the actual usize to
* decide whether to sample.
*/
size_t max_usize = (alignment == 0) ? s2u(size+extra) :
sa2u(size+extra, alignment);
prof_ctx_t *old_ctx = prof_ctx_get(ptr);
old_size = isalloc(ptr, true);
if (config_valgrind && opt_valgrind)
old_rzsize = p2rz(ptr);
PROF_ALLOC_PREP(1, max_usize, cnt);
if (cnt == NULL) {
usize = isalloc(ptr, config_prof);
goto label_not_moved;
}
/*
* Use minimum usize to determine whether promotion may happen.
*/
if (prof_promote && (uintptr_t)cnt != (uintptr_t)1U
&& ((alignment == 0) ? s2u(size) : sa2u(size, alignment))
<= SMALL_MAXCLASS) {
if (iralloct(ptr, SMALL_MAXCLASS+1, (SMALL_MAXCLASS+1 >=
size+extra) ? 0 : size+extra - (SMALL_MAXCLASS+1),
alignment, zero, true, try_tcache_alloc,
try_tcache_dalloc, arena) == NULL)
goto label_not_moved;
if (max_usize < PAGE) {
usize = max_usize;
arena_prof_promoted(ptr, usize);
} else
usize = isalloc(ptr, config_prof);
} else {
if (iralloct(ptr, size, extra, alignment, zero, true,
try_tcache_alloc, try_tcache_dalloc, arena) == NULL)
goto label_not_moved;
usize = isalloc(ptr, config_prof);
}
prof_realloc(ptr, usize, cnt, old_size, old_ctx);
} else {
if (config_stats) {
old_size = isalloc(ptr, false);
if (config_valgrind && opt_valgrind)
old_rzsize = u2rz(old_size);
} else if (config_valgrind && opt_valgrind) {
old_size = isalloc(ptr, false);
old_rzsize = u2rz(old_size);
}
if (iralloct(ptr, size, extra, alignment, zero, true,
try_tcache_alloc, try_tcache_dalloc, arena) == NULL) {
usize = isalloc(ptr, config_prof);
goto label_not_moved;
}
usize = isalloc(ptr, config_prof);
}
if (config_stats) {
thread_allocated_t *ta;
ta = thread_allocated_tsd_get();
ta->allocated += usize;
ta->deallocated += old_size;
}
JEMALLOC_VALGRIND_REALLOC(ptr, usize, ptr, old_size, old_rzsize, zero);
label_not_moved:
UTRACE(ptr, size, ptr);
return (usize);
}
size_t
je_sallocx(const void *ptr, int flags)
{
size_t usize;
assert(malloc_initialized || IS_INITIALIZER); assert(malloc_initialized || IS_INITIALIZER);
malloc_thread_init(); malloc_thread_init();
if (config_ivsalloc) if (config_ivsalloc)
ret = ivsalloc(ptr, config_prof); usize = ivsalloc(ptr, config_prof);
else else {
ret = (ptr != NULL) ? isalloc(ptr, config_prof) : 0; assert(ptr != NULL);
usize = isalloc(ptr, config_prof);
}
return (ret); return (usize);
} }
void void
je_malloc_stats_print(void (*write_cb)(void *, const char *), void *cbopaque, je_dallocx(void *ptr, int flags)
const char *opts)
{ {
size_t usize;
UNUSED size_t rzsize JEMALLOC_CC_SILENCE_INIT(0);
unsigned arena_ind = ((unsigned)(flags >> 8)) - 1;
bool try_tcache;
stats_print(write_cb, cbopaque, opts); assert(ptr != NULL);
assert(malloc_initialized || IS_INITIALIZER);
if (arena_ind != UINT_MAX) {
arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
try_tcache = (chunk == ptr || chunk->arena !=
arenas[arena_ind]);
} else
try_tcache = true;
UTRACE(ptr, 0, 0);
if (config_stats || config_valgrind)
usize = isalloc(ptr, config_prof);
if (config_prof && opt_prof) {
if (config_stats == false && config_valgrind == false)
usize = isalloc(ptr, config_prof);
prof_free(ptr, usize);
}
if (config_stats)
thread_allocated_tsd_get()->deallocated += usize;
if (config_valgrind && opt_valgrind)
rzsize = p2rz(ptr);
iqalloct(ptr, try_tcache);
JEMALLOC_VALGRIND_FREE(ptr, rzsize);
}
size_t
je_nallocx(size_t size, int flags)
{
size_t usize;
size_t alignment = (ZU(1) << (flags & MALLOCX_LG_ALIGN_MASK)
& (SIZE_T_MAX-1));
assert(size != 0);
if (malloc_init())
return (0);
usize = (alignment == 0) ? s2u(size) : sa2u(size, alignment);
return (usize);
} }
int int
@ -1393,6 +1728,30 @@ je_mallctlbymib(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
return (ctl_bymib(mib, miblen, oldp, oldlenp, newp, newlen)); return (ctl_bymib(mib, miblen, oldp, oldlenp, newp, newlen));
} }
void
je_malloc_stats_print(void (*write_cb)(void *, const char *), void *cbopaque,
const char *opts)
{
stats_print(write_cb, cbopaque, opts);
}
size_t
je_malloc_usable_size(JEMALLOC_USABLE_SIZE_CONST void *ptr)
{
size_t ret;
assert(malloc_initialized || IS_INITIALIZER);
malloc_thread_init();
if (config_ivsalloc)
ret = ivsalloc(ptr, config_prof);
else
ret = (ptr != NULL) ? isalloc(ptr, config_prof) : 0;
return (ret);
}
/* /*
* End non-standard functions. * End non-standard functions.
*/ */
@ -1402,284 +1761,65 @@ je_mallctlbymib(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
*/ */
#ifdef JEMALLOC_EXPERIMENTAL #ifdef JEMALLOC_EXPERIMENTAL
JEMALLOC_ALWAYS_INLINE_C void *
iallocm(size_t usize, size_t alignment, bool zero, bool try_tcache,
arena_t *arena)
{
assert(usize == ((alignment == 0) ? s2u(usize) : sa2u(usize,
alignment)));
if (alignment != 0)
return (ipallocx(usize, alignment, zero, try_tcache, arena));
else if (zero)
return (icallocx(usize, try_tcache, arena));
else
return (imallocx(usize, try_tcache, arena));
}
int int
je_allocm(void **ptr, size_t *rsize, size_t size, int flags) je_allocm(void **ptr, size_t *rsize, size_t size, int flags)
{ {
void *p; void *p;
size_t usize;
size_t alignment = (ZU(1) << (flags & ALLOCM_LG_ALIGN_MASK)
& (SIZE_T_MAX-1));
bool zero = flags & ALLOCM_ZERO;
unsigned arena_ind = ((unsigned)(flags >> 8)) - 1;
arena_t *arena;
bool try_tcache;
assert(ptr != NULL); assert(ptr != NULL);
assert(size != 0);
if (malloc_init()) p = je_mallocx(size, flags);
goto label_oom;
if (arena_ind != UINT_MAX) {
arena = arenas[arena_ind];
try_tcache = false;
} else {
arena = NULL;
try_tcache = true;
}
usize = (alignment == 0) ? s2u(size) : sa2u(size, alignment);
if (usize == 0)
goto label_oom;
if (config_prof && opt_prof) {
prof_thr_cnt_t *cnt;
PROF_ALLOC_PREP(1, usize, cnt);
if (cnt == NULL)
goto label_oom;
if (prof_promote && (uintptr_t)cnt != (uintptr_t)1U && usize <=
SMALL_MAXCLASS) {
size_t usize_promoted = (alignment == 0) ?
s2u(SMALL_MAXCLASS+1) : sa2u(SMALL_MAXCLASS+1,
alignment);
assert(usize_promoted != 0);
p = iallocm(usize_promoted, alignment, zero,
try_tcache, arena);
if (p == NULL) if (p == NULL)
goto label_oom;
arena_prof_promoted(p, usize);
} else {
p = iallocm(usize, alignment, zero, try_tcache, arena);
if (p == NULL)
goto label_oom;
}
prof_malloc(p, usize, cnt);
} else {
p = iallocm(usize, alignment, zero, try_tcache, arena);
if (p == NULL)
goto label_oom;
}
if (rsize != NULL)
*rsize = usize;
*ptr = p;
if (config_stats) {
assert(usize == isalloc(p, config_prof));
thread_allocated_tsd_get()->allocated += usize;
}
UTRACE(0, size, p);
JEMALLOC_VALGRIND_MALLOC(true, p, usize, zero);
return (ALLOCM_SUCCESS);
label_oom:
if (config_xmalloc && opt_xmalloc) {
malloc_write("<jemalloc>: Error in allocm(): "
"out of memory\n");
abort();
}
*ptr = NULL;
UTRACE(0, size, 0);
return (ALLOCM_ERR_OOM); return (ALLOCM_ERR_OOM);
if (rsize != NULL)
*rsize = isalloc(p, config_prof);
*ptr = p;
return (ALLOCM_SUCCESS);
} }
int int
je_rallocm(void **ptr, size_t *rsize, size_t size, size_t extra, int flags) je_rallocm(void **ptr, size_t *rsize, size_t size, size_t extra, int flags)
{ {
void *p, *q; int ret;
size_t usize;
size_t old_size;
UNUSED size_t old_rzsize JEMALLOC_CC_SILENCE_INIT(0);
size_t alignment = (ZU(1) << (flags & ALLOCM_LG_ALIGN_MASK)
& (SIZE_T_MAX-1));
bool zero = flags & ALLOCM_ZERO;
bool no_move = flags & ALLOCM_NO_MOVE; bool no_move = flags & ALLOCM_NO_MOVE;
unsigned arena_ind = ((unsigned)(flags >> 8)) - 1;
bool try_tcache_alloc, try_tcache_dalloc;
arena_t *arena;
assert(ptr != NULL); assert(ptr != NULL);
assert(*ptr != NULL); assert(*ptr != NULL);
assert(size != 0); assert(size != 0);
assert(SIZE_T_MAX - size >= extra); assert(SIZE_T_MAX - size >= extra);
assert(malloc_initialized || IS_INITIALIZER);
malloc_thread_init();
if (arena_ind != UINT_MAX) { if (no_move) {
arena_chunk_t *chunk; size_t usize = je_xallocx(*ptr, size, extra, flags);
try_tcache_alloc = false; ret = (usize >= size) ? ALLOCM_SUCCESS : ALLOCM_ERR_NOT_MOVED;
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(*ptr);
try_tcache_dalloc = (chunk == *ptr || chunk->arena !=
arenas[arena_ind]);
arena = arenas[arena_ind];
} else {
try_tcache_alloc = true;
try_tcache_dalloc = true;
arena = NULL;
}
p = *ptr;
if (config_prof && opt_prof) {
prof_thr_cnt_t *cnt;
/*
* usize isn't knowable before iralloc() returns when extra is
* non-zero. Therefore, compute its maximum possible value and
* use that in PROF_ALLOC_PREP() to decide whether to capture a
* backtrace. prof_realloc() will use the actual usize to
* decide whether to sample.
*/
size_t max_usize = (alignment == 0) ? s2u(size+extra) :
sa2u(size+extra, alignment);
prof_ctx_t *old_ctx = prof_ctx_get(p);
old_size = isalloc(p, true);
if (config_valgrind && opt_valgrind)
old_rzsize = p2rz(p);
PROF_ALLOC_PREP(1, max_usize, cnt);
if (cnt == NULL)
goto label_oom;
/*
* Use minimum usize to determine whether promotion may happen.
*/
if (prof_promote && (uintptr_t)cnt != (uintptr_t)1U
&& ((alignment == 0) ? s2u(size) : sa2u(size, alignment))
<= SMALL_MAXCLASS) {
q = irallocx(p, SMALL_MAXCLASS+1, (SMALL_MAXCLASS+1 >=
size+extra) ? 0 : size+extra - (SMALL_MAXCLASS+1),
alignment, zero, no_move, try_tcache_alloc,
try_tcache_dalloc, arena);
if (q == NULL)
goto label_err;
if (max_usize < PAGE) {
usize = max_usize;
arena_prof_promoted(q, usize);
} else
usize = isalloc(q, config_prof);
} else {
q = irallocx(p, size, extra, alignment, zero, no_move,
try_tcache_alloc, try_tcache_dalloc, arena);
if (q == NULL)
goto label_err;
usize = isalloc(q, config_prof);
}
prof_realloc(q, usize, cnt, old_size, old_ctx);
if (rsize != NULL) if (rsize != NULL)
*rsize = usize; *rsize = usize;
} else { } else {
if (config_stats) { void *p = je_rallocx(*ptr, size+extra, flags);
old_size = isalloc(p, false); if (p != NULL) {
if (config_valgrind && opt_valgrind) *ptr = p;
old_rzsize = u2rz(old_size); ret = ALLOCM_SUCCESS;
} else if (config_valgrind && opt_valgrind) { } else
old_size = isalloc(p, false); ret = ALLOCM_ERR_OOM;
old_rzsize = u2rz(old_size); if (rsize != NULL)
*rsize = isalloc(*ptr, config_prof);
} }
q = irallocx(p, size, extra, alignment, zero, no_move, return (ret);
try_tcache_alloc, try_tcache_dalloc, arena);
if (q == NULL)
goto label_err;
if (config_stats)
usize = isalloc(q, config_prof);
if (rsize != NULL) {
if (config_stats == false)
usize = isalloc(q, config_prof);
*rsize = usize;
}
}
*ptr = q;
if (config_stats) {
thread_allocated_t *ta;
ta = thread_allocated_tsd_get();
ta->allocated += usize;
ta->deallocated += old_size;
}
UTRACE(p, size, q);
JEMALLOC_VALGRIND_REALLOC(q, usize, p, old_size, old_rzsize, zero);
return (ALLOCM_SUCCESS);
label_err:
if (no_move) {
UTRACE(p, size, q);
return (ALLOCM_ERR_NOT_MOVED);
}
label_oom:
if (config_xmalloc && opt_xmalloc) {
malloc_write("<jemalloc>: Error in rallocm(): "
"out of memory\n");
abort();
}
UTRACE(p, size, 0);
return (ALLOCM_ERR_OOM);
} }
int int
je_sallocm(const void *ptr, size_t *rsize, int flags) je_sallocm(const void *ptr, size_t *rsize, int flags)
{ {
size_t sz;
assert(malloc_initialized || IS_INITIALIZER);
malloc_thread_init();
if (config_ivsalloc)
sz = ivsalloc(ptr, config_prof);
else {
assert(ptr != NULL);
sz = isalloc(ptr, config_prof);
}
assert(rsize != NULL); assert(rsize != NULL);
*rsize = sz; *rsize = je_sallocx(ptr, flags);
return (ALLOCM_SUCCESS); return (ALLOCM_SUCCESS);
} }
int int
je_dallocm(void *ptr, int flags) je_dallocm(void *ptr, int flags)
{ {
size_t usize;
UNUSED size_t rzsize JEMALLOC_CC_SILENCE_INIT(0);
unsigned arena_ind = ((unsigned)(flags >> 8)) - 1;
bool try_tcache;
assert(ptr != NULL);
assert(malloc_initialized || IS_INITIALIZER);
if (arena_ind != UINT_MAX) {
arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
try_tcache = (chunk == ptr || chunk->arena !=
arenas[arena_ind]);
} else
try_tcache = true;
UTRACE(ptr, 0, 0);
if (config_stats || config_valgrind)
usize = isalloc(ptr, config_prof);
if (config_prof && opt_prof) {
if (config_stats == false && config_valgrind == false)
usize = isalloc(ptr, config_prof);
prof_free(ptr, usize);
}
if (config_stats)
thread_allocated_tsd_get()->deallocated += usize;
if (config_valgrind && opt_valgrind)
rzsize = p2rz(ptr);
iqallocx(ptr, try_tcache);
JEMALLOC_VALGRIND_FREE(ptr, rzsize);
je_dallocx(ptr, flags);
return (ALLOCM_SUCCESS); return (ALLOCM_SUCCESS);
} }
@ -1687,18 +1827,10 @@ int
je_nallocm(size_t *rsize, size_t size, int flags) je_nallocm(size_t *rsize, size_t size, int flags)
{ {
size_t usize; size_t usize;
size_t alignment = (ZU(1) << (flags & ALLOCM_LG_ALIGN_MASK)
& (SIZE_T_MAX-1));
assert(size != 0); usize = je_nallocx(size, flags);
if (malloc_init())
return (ALLOCM_ERR_OOM);
usize = (alignment == 0) ? s2u(size) : sa2u(size, alignment);
if (usize == 0) if (usize == 0)
return (ALLOCM_ERR_OOM); return (ALLOCM_ERR_OOM);
if (rsize != NULL) if (rsize != NULL)
*rsize = usize; *rsize = usize;
return (ALLOCM_SUCCESS); return (ALLOCM_SUCCESS);

View File

@ -292,7 +292,7 @@ tcache_create(arena_t *arena)
else if (size <= tcache_maxclass) else if (size <= tcache_maxclass)
tcache = (tcache_t *)arena_malloc_large(arena, size, true); tcache = (tcache_t *)arena_malloc_large(arena, size, true);
else else
tcache = (tcache_t *)icallocx(size, false, arena); tcache = (tcache_t *)icalloct(size, false, arena);
if (tcache == NULL) if (tcache == NULL)
return (NULL); return (NULL);
@ -366,7 +366,7 @@ tcache_destroy(tcache_t *tcache)
arena_dalloc_large(arena, chunk, tcache); arena_dalloc_large(arena, chunk, tcache);
} else } else
idallocx(tcache, false); idalloct(tcache, false);
} }
void void

View File

@ -21,7 +21,7 @@ void
malloc_tsd_dalloc(void *wrapper) malloc_tsd_dalloc(void *wrapper)
{ {
idallocx(wrapper, false); idalloct(wrapper, false);
} }
void void

149
test/integration/mallocx.c Normal file
View File

@ -0,0 +1,149 @@
#include "test/jemalloc_test.h"
#define CHUNK 0x400000
/* #define MAXALIGN ((size_t)UINT64_C(0x80000000000)) */
#define MAXALIGN ((size_t)0x2000000LU)
#define NITER 4
TEST_BEGIN(test_basic)
{
size_t nsz, rsz, sz;
void *p;
sz = 42;
nsz = nallocx(sz, 0);
assert_zu_ne(nsz, 0, "Unexpected nallocx() error");
p = mallocx(sz, 0);
assert_ptr_not_null(p, "Unexpected mallocx() error");
rsz = sallocx(p, 0);
assert_zu_ge(rsz, sz, "Real size smaller than expected");
assert_zu_eq(nsz, rsz, "nallocx()/sallocx() size mismatch");
dallocx(p, 0);
p = mallocx(sz, 0);
assert_ptr_not_null(p, "Unexpected mallocx() error");
dallocx(p, 0);
nsz = nallocx(sz, MALLOCX_ZERO);
assert_zu_ne(nsz, 0, "Unexpected nallocx() error");
p = mallocx(sz, MALLOCX_ZERO);
assert_ptr_not_null(p, "Unexpected mallocx() error");
rsz = sallocx(p, 0);
assert_zu_eq(nsz, rsz, "nallocx()/sallocx() rsize mismatch");
dallocx(p, 0);
}
TEST_END
TEST_BEGIN(test_alignment_errors)
{
void *p;
size_t nsz, sz, alignment;
#if LG_SIZEOF_PTR == 3
alignment = UINT64_C(0x8000000000000000);
sz = UINT64_C(0x8000000000000000);
#else
alignment = 0x80000000LU;
sz = 0x80000000LU;
#endif
nsz = nallocx(sz, MALLOCX_ALIGN(alignment));
assert_zu_eq(nsz, 0, "Expected error for nallocx(%zu, %#x)", sz,
MALLOCX_ALIGN(alignment));
p = mallocx(sz, MALLOCX_ALIGN(alignment));
assert_ptr_null(p, "Expected error for mallocx(%zu, %#x)", sz,
MALLOCX_ALIGN(alignment));
#if LG_SIZEOF_PTR == 3
alignment = UINT64_C(0x4000000000000000);
sz = UINT64_C(0x8400000000000001);
#else
alignment = 0x40000000LU;
sz = 0x84000001LU;
#endif
nsz = nallocx(sz, MALLOCX_ALIGN(alignment));
assert_zu_ne(nsz, 0, "Unexpected nallocx() error");
p = mallocx(sz, MALLOCX_ALIGN(alignment));
assert_ptr_null(p, "Expected error for mallocx(%zu, %#x)", sz,
MALLOCX_ALIGN(alignment));
alignment = 0x10LU;
#if LG_SIZEOF_PTR == 3
sz = UINT64_C(0xfffffffffffffff0);
#else
sz = 0xfffffff0LU;
#endif
nsz = nallocx(sz, MALLOCX_ALIGN(alignment));
assert_zu_eq(nsz, 0, "Expected error for nallocx(%zu, %#x)", sz,
MALLOCX_ALIGN(alignment));
nsz = nallocx(sz, MALLOCX_ALIGN(alignment));
assert_zu_eq(nsz, 0, "Expected error for nallocx(%zu, %#x)", sz,
MALLOCX_ALIGN(alignment));
p = mallocx(sz, MALLOCX_ALIGN(alignment));
assert_ptr_null(p, "Expected error for mallocx(%zu, %#x)", sz,
MALLOCX_ALIGN(alignment));
}
TEST_END
TEST_BEGIN(test_alignment_and_size)
{
size_t nsz, rsz, sz, alignment, total;
unsigned i;
void *ps[NITER];
for (i = 0; i < NITER; i++)
ps[i] = NULL;
for (alignment = 8;
alignment <= MAXALIGN;
alignment <<= 1) {
total = 0;
for (sz = 1;
sz < 3 * alignment && sz < (1U << 31);
sz += (alignment >> (LG_SIZEOF_PTR-1)) - 1) {
for (i = 0; i < NITER; i++) {
nsz = nallocx(sz, MALLOCX_ALIGN(alignment) |
MALLOCX_ZERO);
assert_zu_ne(nsz, 0,
"nallocx() error for alignment=%zu, "
"size=%zu (%#zx)", alignment, sz, sz);
ps[i] = mallocx(sz, MALLOCX_ALIGN(alignment) |
MALLOCX_ZERO);
assert_ptr_not_null(ps[i],
"mallocx() error for alignment=%zu, "
"size=%zu (%#zx)", alignment, sz, sz);
rsz = sallocx(ps[i], 0);
assert_zu_ge(rsz, sz,
"Real size smaller than expected for "
"alignment=%zu, size=%zu", alignment, sz);
assert_zu_eq(nsz, rsz,
"nallocx()/sallocx() size mismatch for "
"alignment=%zu, size=%zu", alignment, sz);
assert_ptr_null(
(void *)((uintptr_t)ps[i] & (alignment-1)),
"%p inadequately aligned for"
" alignment=%zu, size=%zu", ps[i],
alignment, sz);
total += rsz;
if (total >= (MAXALIGN << 1))
break;
}
for (i = 0; i < NITER; i++) {
if (ps[i] != NULL) {
dallocx(ps[i], 0);
ps[i] = NULL;
}
}
}
}
}
TEST_END
int
main(void)
{
return (test(
test_basic,
test_alignment_errors,
test_alignment_and_size));
}

View File

@ -1,5 +1,3 @@
#include <unistd.h>
#include "test/jemalloc_test.h" #include "test/jemalloc_test.h"
TEST_BEGIN(test_same_size) TEST_BEGIN(test_same_size)

View File

@ -0,0 +1,51 @@
#include "test/jemalloc_test.h"
TEST_BEGIN(test_grow_and_shrink)
{
void *p, *q;
size_t tsz;
#define NCYCLES 3
unsigned i, j;
#define NSZS 2500
size_t szs[NSZS];
#define MAXSZ ZU(12 * 1024 * 1024)
p = mallocx(1, 0);
assert_ptr_not_null(p, "Unexpected mallocx() error");
szs[0] = sallocx(p, 0);
for (i = 0; i < NCYCLES; i++) {
for (j = 1; j < NSZS && szs[j-1] < MAXSZ; j++) {
q = rallocx(p, szs[j-1]+1, 0);
assert_ptr_not_null(q,
"Unexpected rallocx() error for size=%zu-->%zu",
szs[j-1], szs[j-1]+1);
szs[j] = sallocx(q, 0);
assert_zu_ne(szs[j], szs[j-1]+1,
"Expected size to at least: %zu", szs[j-1]+1);
p = q;
}
for (j--; j > 0; j--) {
q = rallocx(p, szs[j-1], 0);
assert_ptr_not_null(q,
"Unexpected rallocx() error for size=%zu-->%zu",
szs[j], szs[j-1]);
tsz = sallocx(q, 0);
assert_zu_eq(tsz, szs[j-1],
"Expected size=%zu, got size=%zu", szs[j-1], tsz);
p = q;
}
}
dallocx(p, 0);
}
TEST_END
int
main(void)
{
return (test(
test_grow_and_shrink));
}

View File

@ -0,0 +1,59 @@
#include "test/jemalloc_test.h"
TEST_BEGIN(test_same_size)
{
void *p;
size_t sz, tsz;
p = mallocx(42, 0);
assert_ptr_not_null(p, "Unexpected mallocx() error");
sz = sallocx(p, 0);
tsz = xallocx(p, sz, 0, 0);
assert_zu_eq(tsz, sz, "Unexpected size change: %zu --> %zu", sz, tsz);
dallocx(p, 0);
}
TEST_END
TEST_BEGIN(test_extra_no_move)
{
void *p;
size_t sz, tsz;
p = mallocx(42, 0);
assert_ptr_not_null(p, "Unexpected mallocx() error");
sz = sallocx(p, 0);
tsz = xallocx(p, sz, sz-42, 0);
assert_zu_eq(tsz, sz, "Unexpected size change: %zu --> %zu", sz, tsz);
dallocx(p, 0);
}
TEST_END
TEST_BEGIN(test_no_move_fail)
{
void *p;
size_t sz, tsz;
p = mallocx(42, 0);
assert_ptr_not_null(p, "Unexpected mallocx() error");
sz = sallocx(p, 0);
tsz = xallocx(p, sz + 5, 0, 0);
assert_zu_eq(tsz, sz, "Unexpected size change: %zu --> %zu", sz, tsz);
dallocx(p, 0);
}
TEST_END
int
main(void)
{
return (test(
test_same_size,
test_extra_no_move,
test_no_move_fail));
}

View File

@ -39,8 +39,7 @@ thd_receiver_start(void *arg)
for (i = 0; i < (NSENDERS * NMSGS); i++) { for (i = 0; i < (NSENDERS * NMSGS); i++) {
mq_msg_t *msg = mq_get(mq); mq_msg_t *msg = mq_get(mq);
assert_ptr_not_null(msg, "mq_get() should never return NULL"); assert_ptr_not_null(msg, "mq_get() should never return NULL");
assert_d_eq(jet_dallocm(msg, 0), ALLOCM_SUCCESS, jet_dallocx(msg, 0);
"Unexpected dallocm() failure");
} }
return (NULL); return (NULL);
} }
@ -54,8 +53,8 @@ thd_sender_start(void *arg)
for (i = 0; i < NMSGS; i++) { for (i = 0; i < NMSGS; i++) {
mq_msg_t *msg; mq_msg_t *msg;
void *p; void *p;
assert_d_eq(jet_allocm(&p, NULL, sizeof(mq_msg_t), 0), p = jet_mallocx(sizeof(mq_msg_t), 0);
ALLOCM_SUCCESS, "Unexpected allocm() failure"); assert_ptr_not_null(p, "Unexpected allocm() failure");
msg = (mq_msg_t *)p; msg = (mq_msg_t *)p;
mq_put(mq, msg); mq_put(mq, msg);
} }