From c451831264885b84f54a05e0894ad88bb30bd5df Mon Sep 17 00:00:00 2001 From: Jason Evans Date: Thu, 7 May 2015 22:35:40 -0700 Subject: [PATCH] Fix type punning in calls to atomic operation functions. --- include/jemalloc/internal/arena.h | 9 ++++++--- include/jemalloc/internal/rtree.h | 14 +++++++++----- 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/include/jemalloc/internal/arena.h b/include/jemalloc/internal/arena.h index fba1b81f..58601954 100644 --- a/include/jemalloc/internal/arena.h +++ b/include/jemalloc/internal/arena.h @@ -145,7 +145,10 @@ struct arena_chunk_map_misc_s { arena_runs_dirty_link_t rd; /* Profile counters, used for large object runs. */ - prof_tctx_t *prof_tctx; + union { + void *prof_tctx_pun; + prof_tctx_t *prof_tctx; + }; /* Small region run metadata. */ arena_run_t run; @@ -1025,7 +1028,7 @@ arena_prof_tctx_get(const void *ptr) else { arena_chunk_map_misc_t *elm = arena_miscelm_get(chunk, pageind); - ret = atomic_read_p((void **)&elm->prof_tctx); + ret = atomic_read_p(&elm->prof_tctx_pun); } } else ret = huge_prof_tctx_get(ptr); @@ -1049,7 +1052,7 @@ arena_prof_tctx_set(const void *ptr, prof_tctx_t *tctx) if (unlikely(arena_mapbits_large_get(chunk, pageind) != 0)) { arena_chunk_map_misc_t *elm = arena_miscelm_get(chunk, pageind); - atomic_write_p((void **)&elm->prof_tctx, tctx); + atomic_write_p(&elm->prof_tctx_pun, tctx); } } else huge_prof_tctx_set(ptr, tctx); diff --git a/include/jemalloc/internal/rtree.h b/include/jemalloc/internal/rtree.h index c1fb90c4..7a8ebfd5 100644 --- a/include/jemalloc/internal/rtree.h +++ b/include/jemalloc/internal/rtree.h @@ -36,6 +36,7 @@ typedef void (rtree_node_dalloc_t)(rtree_node_elm_t *); struct rtree_node_elm_s { union { + void *pun; rtree_node_elm_t *child; extent_node_t *val; }; @@ -64,7 +65,10 @@ struct rtree_level_s { * lower 47 bits of virtual address space in userland, thus leaving * subtrees[0] unused and avoiding a level of tree traversal. */ - rtree_node_elm_t *subtree; + union { + void *subtree_pun; + rtree_node_elm_t *subtree; + }; /* Number of key bits distinguished by this level. */ unsigned bits; /* @@ -159,7 +163,7 @@ rtree_child_tryread(rtree_node_elm_t *elm) /* Double-checked read (first read may be stale. */ child = elm->child; if (!rtree_node_valid(child)) - child = atomic_read_p((void **)&elm->child); + child = atomic_read_p(&elm->pun); return (child); } @@ -178,14 +182,14 @@ JEMALLOC_INLINE extent_node_t * rtree_val_read(rtree_t *rtree, rtree_node_elm_t *elm) { - return (atomic_read_p((void **)&elm->val)); + return (atomic_read_p(&elm->pun)); } JEMALLOC_INLINE void rtree_val_write(rtree_t *rtree, rtree_node_elm_t *elm, const extent_node_t *val) { - atomic_write_p((void **)&elm->val, val); + atomic_write_p(&elm->pun, val); } JEMALLOC_INLINE rtree_node_elm_t * @@ -196,7 +200,7 @@ rtree_subtree_tryread(rtree_t *rtree, unsigned level) /* Double-checked read (first read may be stale. */ subtree = rtree->levels[level].subtree; if (!rtree_node_valid(subtree)) - subtree = atomic_read_p((void **)&rtree->levels[level].subtree); + subtree = atomic_read_p(&rtree->levels[level].subtree_pun); return (subtree); }