Enabled -Wstrict-prototypes and fixed warnings.

This commit is contained in:
Qi Wang
2023-07-05 13:33:34 -07:00
committed by Qi Wang
parent ebd7e99f5c
commit 602edd7566
42 changed files with 82 additions and 80 deletions

View File

@@ -159,7 +159,7 @@ ehooks_default_purge_forced(extent_hooks_t *extent_hooks, void *addr,
#endif
bool
ehooks_default_split_impl() {
ehooks_default_split_impl(void) {
if (!maps_coalesce) {
/*
* Without retain, only whole regions can be purged (required by

View File

@@ -19,7 +19,7 @@ static seq_hooks_t hooks[HOOK_MAX];
static malloc_mutex_t hooks_mu;
bool
hook_boot() {
hook_boot(void) {
return malloc_mutex_init(&hooks_mu, "hooks", WITNESS_RANK_HOOK,
malloc_mutex_rank_exclusive);
}
@@ -100,7 +100,7 @@ for (int for_each_hook_counter = 0; \
}
static bool *
hook_reentrantp() {
hook_reentrantp(void) {
/*
* We prevent user reentrancy within hooks. This is basically just a
* thread-local bool that triggers an early-exit.

View File

@@ -24,7 +24,7 @@ static void hpa_dalloc_batch(tsdn_t *tsdn, pai_t *self,
static uint64_t hpa_time_until_deferred_work(tsdn_t *tsdn, pai_t *self);
bool
hpa_supported() {
hpa_supported(void) {
#ifdef _WIN32
/*
* At least until the API and implementation is somewhat settled, we

View File

@@ -766,7 +766,7 @@ malloc_ncpus(void) {
* Since otherwise tricky things is possible with percpu arenas in use.
*/
static bool
malloc_cpu_count_is_deterministic()
malloc_cpu_count_is_deterministic(void)
{
#ifdef _WIN32
return true;
@@ -1807,7 +1807,7 @@ malloc_init_hard_needed(void) {
}
static bool
malloc_init_hard_a0_locked() {
malloc_init_hard_a0_locked(void) {
malloc_initializer = INITIALIZER;
JEMALLOC_DIAGNOSTIC_PUSH

View File

@@ -66,7 +66,7 @@ static int madvise_dont_need_zeros_is_faulty = -1;
*
* [1]: https://patchwork.kernel.org/patch/10576637/
*/
static int madvise_MADV_DONTNEED_zeroes_pages()
static int madvise_MADV_DONTNEED_zeroes_pages(void)
{
size_t size = PAGE;

View File

@@ -562,7 +562,7 @@ prof_backtrace_hook_set(prof_backtrace_hook_t hook) {
}
prof_backtrace_hook_t
prof_backtrace_hook_get() {
prof_backtrace_hook_get(void) {
return (prof_backtrace_hook_t)atomic_load_p(&prof_backtrace_hook,
ATOMIC_ACQUIRE);
}
@@ -573,7 +573,7 @@ prof_dump_hook_set(prof_dump_hook_t hook) {
}
prof_dump_hook_t
prof_dump_hook_get() {
prof_dump_hook_get(void) {
return (prof_dump_hook_t)atomic_load_p(&prof_dump_hook,
ATOMIC_ACQUIRE);
}
@@ -584,7 +584,7 @@ prof_sample_hook_set(prof_sample_hook_t hook) {
}
prof_sample_hook_t
prof_sample_hook_get() {
prof_sample_hook_get(void) {
return (prof_sample_hook_t)atomic_load_p(&prof_sample_hook,
ATOMIC_ACQUIRE);
}
@@ -595,7 +595,7 @@ prof_sample_free_hook_set(prof_sample_free_hook_t hook) {
}
prof_sample_free_hook_t
prof_sample_free_hook_get() {
prof_sample_free_hook_get(void) {
return (prof_sample_free_hook_t)atomic_load_p(&prof_sample_free_hook,
ATOMIC_ACQUIRE);
}

View File

@@ -503,7 +503,7 @@ prof_double_uint64_cast(double d) {
}
#endif
void prof_unbias_map_init() {
void prof_unbias_map_init(void) {
/* See the comment in prof_sample_new_event_wait */
#ifdef JEMALLOC_PROF
for (szind_t i = 0; i < SC_NSIZES; i++) {

View File

@@ -16,13 +16,13 @@ prof_recent_list_t prof_recent_alloc_list;
malloc_mutex_t prof_recent_dump_mtx; /* Protects dumping. */
static void
prof_recent_alloc_max_init() {
prof_recent_alloc_max_init(void) {
atomic_store_zd(&prof_recent_alloc_max, opt_prof_recent_alloc_max,
ATOMIC_RELAXED);
}
static inline ssize_t
prof_recent_alloc_max_get_no_lock() {
prof_recent_alloc_max_get_no_lock(void) {
return atomic_load_zd(&prof_recent_alloc_max, ATOMIC_RELAXED);
}
@@ -403,7 +403,7 @@ label_rollback:
}
ssize_t
prof_recent_alloc_max_ctl_read() {
prof_recent_alloc_max_ctl_read(void) {
cassert(config_prof);
/* Don't bother to acquire the lock. */
return prof_recent_alloc_max_get_no_lock();
@@ -582,7 +582,7 @@ prof_recent_alloc_dump(tsd_t *tsd, write_cb_t *write_cb, void *cbopaque) {
#undef PROF_RECENT_PRINT_BUFSIZE
bool
prof_recent_init() {
prof_recent_init(void) {
cassert(config_prof);
prof_recent_alloc_max_init();

View File

@@ -428,7 +428,7 @@ prof_backtrace(tsd_t *tsd, prof_bt_t *bt) {
}
void
prof_hooks_init() {
prof_hooks_init(void) {
prof_backtrace_hook_set(&prof_backtrace_impl);
prof_dump_hook_set(NULL);
prof_sample_hook_set(NULL);
@@ -436,7 +436,7 @@ prof_hooks_init() {
}
void
prof_unwind_init() {
prof_unwind_init(void) {
#ifdef JEMALLOC_PROF_LIBGCC
/*
* Cause the backtracing machinery to allocate its internal
@@ -596,7 +596,7 @@ prof_open_maps_internal(const char *format, ...) {
#endif
static int
prof_dump_open_maps_impl() {
prof_dump_open_maps_impl(void) {
int mfd;
cassert(config_prof);

View File

@@ -6,7 +6,7 @@
* from outside the generated library, so that we can use them in test code.
*/
JEMALLOC_EXPORT
void (*test_hooks_arena_new_hook)() = NULL;
void (*test_hooks_arena_new_hook)(void) = NULL;
JEMALLOC_EXPORT
void (*test_hooks_libc_hook)() = NULL;
void (*test_hooks_libc_hook)(void) = NULL;

View File

@@ -148,7 +148,7 @@ tsd_local_slow(tsd_t *tsd) {
}
bool
tsd_global_slow() {
tsd_global_slow(void) {
return atomic_load_u32(&tsd_global_slow_count, ATOMIC_RELAXED) > 0;
}