Fix type punning in calls to atomic operation functions.

This commit is contained in:
Jason Evans 2015-05-07 22:35:40 -07:00
parent 8a03cf039c
commit c451831264
2 changed files with 15 additions and 8 deletions

View File

@ -145,7 +145,10 @@ struct arena_chunk_map_misc_s {
arena_runs_dirty_link_t rd; arena_runs_dirty_link_t rd;
/* Profile counters, used for large object runs. */ /* Profile counters, used for large object runs. */
prof_tctx_t *prof_tctx; union {
void *prof_tctx_pun;
prof_tctx_t *prof_tctx;
};
/* Small region run metadata. */ /* Small region run metadata. */
arena_run_t run; arena_run_t run;
@ -1025,7 +1028,7 @@ arena_prof_tctx_get(const void *ptr)
else { else {
arena_chunk_map_misc_t *elm = arena_miscelm_get(chunk, arena_chunk_map_misc_t *elm = arena_miscelm_get(chunk,
pageind); pageind);
ret = atomic_read_p((void **)&elm->prof_tctx); ret = atomic_read_p(&elm->prof_tctx_pun);
} }
} else } else
ret = huge_prof_tctx_get(ptr); ret = huge_prof_tctx_get(ptr);
@ -1049,7 +1052,7 @@ arena_prof_tctx_set(const void *ptr, prof_tctx_t *tctx)
if (unlikely(arena_mapbits_large_get(chunk, pageind) != 0)) { if (unlikely(arena_mapbits_large_get(chunk, pageind) != 0)) {
arena_chunk_map_misc_t *elm = arena_miscelm_get(chunk, arena_chunk_map_misc_t *elm = arena_miscelm_get(chunk,
pageind); pageind);
atomic_write_p((void **)&elm->prof_tctx, tctx); atomic_write_p(&elm->prof_tctx_pun, tctx);
} }
} else } else
huge_prof_tctx_set(ptr, tctx); huge_prof_tctx_set(ptr, tctx);

View File

@ -36,6 +36,7 @@ typedef void (rtree_node_dalloc_t)(rtree_node_elm_t *);
struct rtree_node_elm_s { struct rtree_node_elm_s {
union { union {
void *pun;
rtree_node_elm_t *child; rtree_node_elm_t *child;
extent_node_t *val; extent_node_t *val;
}; };
@ -64,7 +65,10 @@ struct rtree_level_s {
* lower 47 bits of virtual address space in userland, thus leaving * lower 47 bits of virtual address space in userland, thus leaving
* subtrees[0] unused and avoiding a level of tree traversal. * subtrees[0] unused and avoiding a level of tree traversal.
*/ */
rtree_node_elm_t *subtree; union {
void *subtree_pun;
rtree_node_elm_t *subtree;
};
/* Number of key bits distinguished by this level. */ /* Number of key bits distinguished by this level. */
unsigned bits; unsigned bits;
/* /*
@ -159,7 +163,7 @@ rtree_child_tryread(rtree_node_elm_t *elm)
/* Double-checked read (first read may be stale. */ /* Double-checked read (first read may be stale. */
child = elm->child; child = elm->child;
if (!rtree_node_valid(child)) if (!rtree_node_valid(child))
child = atomic_read_p((void **)&elm->child); child = atomic_read_p(&elm->pun);
return (child); return (child);
} }
@ -178,14 +182,14 @@ JEMALLOC_INLINE extent_node_t *
rtree_val_read(rtree_t *rtree, rtree_node_elm_t *elm) rtree_val_read(rtree_t *rtree, rtree_node_elm_t *elm)
{ {
return (atomic_read_p((void **)&elm->val)); return (atomic_read_p(&elm->pun));
} }
JEMALLOC_INLINE void JEMALLOC_INLINE void
rtree_val_write(rtree_t *rtree, rtree_node_elm_t *elm, const extent_node_t *val) rtree_val_write(rtree_t *rtree, rtree_node_elm_t *elm, const extent_node_t *val)
{ {
atomic_write_p((void **)&elm->val, val); atomic_write_p(&elm->pun, val);
} }
JEMALLOC_INLINE rtree_node_elm_t * JEMALLOC_INLINE rtree_node_elm_t *
@ -196,7 +200,7 @@ rtree_subtree_tryread(rtree_t *rtree, unsigned level)
/* Double-checked read (first read may be stale. */ /* Double-checked read (first read may be stale. */
subtree = rtree->levels[level].subtree; subtree = rtree->levels[level].subtree;
if (!rtree_node_valid(subtree)) if (!rtree_node_valid(subtree))
subtree = atomic_read_p((void **)&rtree->levels[level].subtree); subtree = atomic_read_p(&rtree->levels[level].subtree_pun);
return (subtree); return (subtree);
} }