Directly embed prof_ctx_t's bt.

This commit is contained in:
Jason Evans 2014-08-15 15:05:12 -07:00
parent b41ccdb125
commit ab532e9799
2 changed files with 26 additions and 56 deletions

View File

@ -115,9 +115,6 @@ struct prof_thr_cnt_s {
}; };
struct prof_ctx_s { struct prof_ctx_s {
/* Associated backtrace. */
prof_bt_t *bt;
/* Protects nlimbo, cnt_merged, and cnts_ql. */ /* Protects nlimbo, cnt_merged, and cnts_ql. */
malloc_mutex_t *lock; malloc_mutex_t *lock;
@ -146,6 +143,12 @@ struct prof_ctx_s {
/* Linkage for list of contexts to be dumped. */ /* Linkage for list of contexts to be dumped. */
ql_elm(prof_ctx_t) dump_link; ql_elm(prof_ctx_t) dump_link;
/* Associated backtrace. */
prof_bt_t bt;
/* Backtrace vector, variable size, referred to by bt. */
void *vec[1];
}; };
typedef ql_head(prof_ctx_t) prof_ctx_list_t; typedef ql_head(prof_ctx_t) prof_ctx_list_t;
@ -425,7 +428,7 @@ prof_realloc(const void *ptr, size_t usize, prof_thr_cnt_t *cnt,
} }
if ((uintptr_t)old_ctx > (uintptr_t)1U) { if ((uintptr_t)old_ctx > (uintptr_t)1U) {
told_cnt = prof_lookup(old_ctx->bt); told_cnt = prof_lookup(&old_ctx->bt);
if (told_cnt == NULL) { if (told_cnt == NULL) {
/* /*
* It's too late to propagate OOM for this realloc(), * It's too late to propagate OOM for this realloc(),
@ -483,7 +486,7 @@ prof_free(const void *ptr, size_t size)
if ((uintptr_t)ctx > (uintptr_t)1) { if ((uintptr_t)ctx > (uintptr_t)1) {
prof_thr_cnt_t *tcnt; prof_thr_cnt_t *tcnt;
assert(size == isalloc(ptr, true)); assert(size == isalloc(ptr, true));
tcnt = prof_lookup(ctx->bt); tcnt = prof_lookup(&ctx->bt);
if (tcnt != NULL) { if (tcnt != NULL) {
tcnt->epoch++; tcnt->epoch++;

View File

@ -87,41 +87,6 @@ bt_init(prof_bt_t *bt, void **vec)
bt->len = 0; bt->len = 0;
} }
static void
bt_destroy(prof_bt_t *bt)
{
cassert(config_prof);
idalloc(bt);
}
static prof_bt_t *
bt_dup(prof_bt_t *bt)
{
prof_bt_t *ret;
cassert(config_prof);
/*
* Create a single allocation that has space for vec immediately
* following the prof_bt_t structure. The backtraces that get
* stored in the backtrace caches are copied from stack-allocated
* temporary variables, so size is known at creation time. Making this
* a contiguous object improves cache locality.
*/
ret = (prof_bt_t *)imalloc(QUANTUM_CEILING(sizeof(prof_bt_t)) +
(bt->len * sizeof(void *)));
if (ret == NULL)
return (NULL);
ret->vec = (void **)((uintptr_t)ret +
QUANTUM_CEILING(sizeof(prof_bt_t)));
memcpy(ret->vec, bt->vec, bt->len * sizeof(void *));
ret->len = bt->len;
return (ret);
}
static inline void static inline void
prof_enter(prof_tdata_t *prof_tdata) prof_enter(prof_tdata_t *prof_tdata)
{ {
@ -388,11 +353,16 @@ prof_ctx_mutex_choose(void)
return (&ctx_locks[(nctxs - 1) % PROF_NCTX_LOCKS]); return (&ctx_locks[(nctxs - 1) % PROF_NCTX_LOCKS]);
} }
static void static prof_ctx_t *
prof_ctx_init(prof_ctx_t *ctx, prof_bt_t *bt) prof_ctx_create(prof_bt_t *bt)
{ {
/*
ctx->bt = bt; * Create a single allocation that has space for vec of length bt->len.
*/
prof_ctx_t *ctx = (prof_ctx_t *)imalloc(offsetof(prof_ctx_t, vec) +
(bt->len * sizeof(void *)));
if (ctx == NULL)
return (NULL);
ctx->lock = prof_ctx_mutex_choose(); ctx->lock = prof_ctx_mutex_choose();
/* /*
* Set nlimbo to 1, in order to avoid a race condition with * Set nlimbo to 1, in order to avoid a race condition with
@ -402,6 +372,11 @@ prof_ctx_init(prof_ctx_t *ctx, prof_bt_t *bt)
ql_elm_new(ctx, dump_link); ql_elm_new(ctx, dump_link);
memset(&ctx->cnt_merged, 0, sizeof(prof_cnt_t)); memset(&ctx->cnt_merged, 0, sizeof(prof_cnt_t));
ql_new(&ctx->cnts_ql); ql_new(&ctx->cnts_ql);
/* Duplicate bt. */
memcpy(ctx->vec, bt->vec, bt->len * sizeof(void *));
ctx->bt.vec = ctx->vec;
ctx->bt.len = bt->len;
return (ctx);
} }
static void static void
@ -428,12 +403,11 @@ prof_ctx_destroy(prof_ctx_t *ctx)
assert(ctx->cnt_merged.accumobjs == 0); assert(ctx->cnt_merged.accumobjs == 0);
assert(ctx->cnt_merged.accumbytes == 0); assert(ctx->cnt_merged.accumbytes == 0);
/* Remove ctx from bt2ctx. */ /* Remove ctx from bt2ctx. */
if (ckh_remove(&bt2ctx, ctx->bt, NULL, NULL)) if (ckh_remove(&bt2ctx, &ctx->bt, NULL, NULL))
not_reached(); not_reached();
prof_leave(prof_tdata); prof_leave(prof_tdata);
/* Destroy ctx. */ /* Destroy ctx. */
malloc_mutex_unlock(ctx->lock); malloc_mutex_unlock(ctx->lock);
bt_destroy(ctx->bt);
idalloc(ctx); idalloc(ctx);
} else { } else {
/* /*
@ -501,22 +475,15 @@ prof_lookup_global(prof_bt_t *bt, prof_tdata_t *prof_tdata, void **p_btkey,
prof_enter(prof_tdata); prof_enter(prof_tdata);
if (ckh_search(&bt2ctx, bt, &btkey.v, &ctx.v)) { if (ckh_search(&bt2ctx, bt, &btkey.v, &ctx.v)) {
/* bt has never been seen before. Insert it. */ /* bt has never been seen before. Insert it. */
ctx.v = imalloc(sizeof(prof_ctx_t)); ctx.p = prof_ctx_create(bt);
if (ctx.v == NULL) { if (ctx.v == NULL) {
prof_leave(prof_tdata); prof_leave(prof_tdata);
return (true); return (true);
} }
btkey.p = bt_dup(bt); btkey.p = &ctx.p->bt;
if (btkey.v == NULL) {
prof_leave(prof_tdata);
idalloc(ctx.v);
return (true);
}
prof_ctx_init(ctx.p, btkey.p);
if (ckh_insert(&bt2ctx, btkey.v, ctx.v)) { if (ckh_insert(&bt2ctx, btkey.v, ctx.v)) {
/* OOM. */ /* OOM. */
prof_leave(prof_tdata); prof_leave(prof_tdata);
idalloc(btkey.v);
idalloc(ctx.v); idalloc(ctx.v);
return (true); return (true);
} }
@ -1039,7 +1006,7 @@ prof_dump(bool propagate_err, const char *filename, bool leakcheck)
/* Dump per ctx profile stats. */ /* Dump per ctx profile stats. */
while ((ctx.p = ql_first(&ctx_ql)) != NULL) { while ((ctx.p = ql_first(&ctx_ql)) != NULL) {
if (prof_dump_ctx(propagate_err, ctx.p, ctx.p->bt, &ctx_ql)) if (prof_dump_ctx(propagate_err, ctx.p, &ctx.p->bt, &ctx_ql))
goto label_write_error; goto label_write_error;
} }