diff --git a/src/zmalloc.c b/src/zmalloc.c index 1cb01ee88c..e18fa8bac2 100644 --- a/src/zmalloc.c +++ b/src/zmalloc.c @@ -90,6 +90,7 @@ void zlibc_free(void *ptr) { #define thread_local _Thread_local +#define PADDING_ELEMENT_NUM (CACHE_LINE_SIZE / sizeof(size_t) - 1) #define MAX_THREADS_NUM (IO_THREADS_MAX_NUM + 3 + 1) /* A thread-local storage which keep the current thread's index in the used_memory_thread array. */ static thread_local int thread_index = -1; @@ -101,10 +102,11 @@ static thread_local int thread_index = -1; * For the other architecture, lets fall back to the atomic operation to keep safe. */ #if defined(__i386__) || defined(__x86_64__) || defined(__amd64__) || defined(__POWERPC__) || defined(__arm__) || \ defined(__arm64__) -static __attribute__((aligned(sizeof(size_t)))) size_t used_memory_thread[MAX_THREADS_NUM]; +static __attribute__((aligned(CACHE_LINE_SIZE))) size_t used_memory_thread_padded[MAX_THREADS_NUM + PADDING_ELEMENT_NUM]; #else -static _Atomic size_t used_memory_thread[MAX_THREADS_NUM]; +static __attribute__((aligned(CACHE_LINE_SIZE))) _Atomic size_t used_memory_thread_padded[MAX_THREADS_NUM + PADDING_ELEMENT_NUM]; #endif +static size_t *used_memory_thread = &used_memory_thread_padded[PADDING_ELEMENT_NUM]; static atomic_int total_active_threads = 0; /* This is a simple protection. It's used only if some modules create a lot of threads. */ static atomic_size_t used_memory_for_additional_threads = 0;