From 746e77d000d6da9bfbd824c849fc8c4e35ad9a44 Mon Sep 17 00:00:00 2001 From: Willy Tarreau Date: Mon, 11 Aug 2025 18:46:28 +0200 Subject: [PATCH] MINOR: tools: implement ha_aligned_zalloc() This one is exactly ha_aligned_alloc() followed by a memset(0), as it will be convenient for a number of call places as a replacement for calloc(). Note that ideally we should also have a calloc version that performs basic multiply overflow checks, but these are essentially used with numbers of threads times small structs so that's fine, and we already do the same everywhere in malloc() calls. --- include/haproxy/bug.h | 38 ++++++++++++++++++++++++++++++++++++++ include/haproxy/tools.h | 20 ++++++++++++++++++++ 2 files changed, 58 insertions(+) diff --git a/include/haproxy/bug.h b/include/haproxy/bug.h index 5bbe47a51..e294e377e 100644 --- a/include/haproxy/bug.h +++ b/include/haproxy/bug.h @@ -639,6 +639,24 @@ struct mem_stats { _ha_aligned_alloc(__a, __s); \ }) +#undef ha_aligned_zalloc +#define ha_aligned_zalloc(a,s) ({ \ + size_t __a = (a); \ + size_t __s = (s); \ + static struct mem_stats _ __attribute__((used,__section__("mem_stats"),__aligned__(sizeof(void*)))) = { \ + .caller = { \ + .file = __FILE__, .line = __LINE__, \ + .what = MEM_STATS_TYPE_MALLOC, \ + .func = __func__, \ + }, \ + }; \ + HA_WEAK(__start_mem_stats); \ + HA_WEAK(__stop_mem_stats); \ + _HA_ATOMIC_INC(&_.calls); \ + _HA_ATOMIC_ADD(&_.size, __s); \ + _ha_aligned_zalloc(__a, __s); \ +}) + #undef ha_aligned_alloc_safe #define ha_aligned_alloc_safe(a,s) ({ \ size_t __a = (a); \ @@ -657,6 +675,24 @@ struct mem_stats { _ha_aligned_alloc_safe(__a, __s); \ }) +#undef ha_aligned_zalloc_safe +#define ha_aligned_zalloc_safe(a,s) ({ \ + size_t __a = (a); \ + size_t __s = (s); \ + static struct mem_stats _ __attribute__((used,__section__("mem_stats"),__aligned__(sizeof(void*)))) = { \ + .caller = { \ + .file = __FILE__, .line = __LINE__, \ + .what = MEM_STATS_TYPE_MALLOC, \ + .func = __func__, \ + }, \ + }; \ + HA_WEAK(__start_mem_stats); \ + HA_WEAK(__stop_mem_stats); \ + _HA_ATOMIC_INC(&_.calls); \ + _HA_ATOMIC_ADD(&_.size, __s); \ + _ha_aligned_zalloc_safe(__a, __s); \ +}) + #undef ha_aligned_free #define ha_aligned_free(x) ({ \ typeof(x) __x = (x); \ @@ -703,7 +739,9 @@ struct mem_stats { #define will_free(x, y) do { } while (0) #define ha_aligned_alloc(a,s) _ha_aligned_alloc(a, s) +#define ha_aligned_zalloc(a,s) _ha_aligned_zalloc(a, s) #define ha_aligned_alloc_safe(a,s) _ha_aligned_alloc_safe(a, s) +#define ha_aligned_zalloc_safe(a,s) _ha_aligned_zalloc_safe(a, s) #define ha_aligned_free(p) _ha_aligned_free(p) #define ha_aligned_free_size(p,s) _ha_aligned_free(p) diff --git a/include/haproxy/tools.h b/include/haproxy/tools.h index a8bab3e65..160d5f3ac 100644 --- a/include/haproxy/tools.h +++ b/include/haproxy/tools.h @@ -1212,6 +1212,16 @@ static inline void *_ha_aligned_alloc(size_t alignment, size_t size) #endif } +/* Like above but zeroing the area */ +static inline void *_ha_aligned_zalloc(size_t alignment, size_t size) +{ + void *ret = _ha_aligned_alloc(alignment, size); + + if (ret) + memset(ret, 0, size); + return ret; +} + /* portable memalign(): tries to accommodate OS specificities, and may fall * back to plain malloc() if not supported, meaning that alignment guarantees * are only a performance bonus but not granted. The size will automatically be @@ -1239,6 +1249,16 @@ static inline void *_ha_aligned_alloc_safe(size_t alignment, size_t size) return _ha_aligned_alloc(alignment, size); } +/* Like above but zeroing the area */ +static inline void *_ha_aligned_zalloc_safe(size_t alignment, size_t size) +{ + void *ret = _ha_aligned_alloc_safe(alignment, size); + + if (ret) + memset(ret, 0, size); + return ret; +} + /* To be used to free a pointer returned by _ha_aligned_alloc() or * _ha_aligned_alloc_safe(). Please use ha_aligned_free() instead * (which does perform accounting).