36#ifdef WITH_MEM_VALGRIND
37# include "valgrind/memcheck.h"
43# define POISON_REDZONE_SIZE 32
45# define POISON_REDZONE_SIZE 0
51# define MAKE_ID(a, b, c, d) ((int)(a) << 24 | (int)(b) << 16 | (c) << 8 | (d))
52# define MAKE_ID_8(a, b, c, d, e, f, g, h) \
53 ((int64_t)(a) << 56 | (int64_t)(b) << 48 | (int64_t)(c) << 40 | (int64_t)(d) << 32 | \
54 (int64_t)(e) << 24 | (int64_t)(f) << 16 | (int64_t)(g) << 8 | (h))
57# define MAKE_ID(a, b, c, d) (int(d) << 24 | int(c) << 16 | (b) << 8 | (a))
58# define MAKE_ID_8(a, b, c, d, e, f, g, h) \
59 (int64_t(h) << 56 | int64_t(g) << 48 | int64_t(f) << 40 | int64_t(e) << 32 | \
60 int64_t(d) << 24 | int64_t(c) << 16 | int64_t(b) << 8 | (a))
70 ((sizeof(void *) > sizeof(int32_t)) ? MAKE_ID_8('e', 'e', 'r', 'f', 'f', 'r', 'e', 'e') : \
71 MAKE_ID('e', 'f', 'f', 'e'))
76#define USEDWORD MAKE_ID('u', 's', 'e', 'd')
135#define MEMPOOL_ELEM_SIZE_MIN (sizeof(void *) * 2)
137#define CHUNK_DATA(chunk) \
138 ((BLI_freenode *)(CHECK_TYPE_INLINE(chunk, BLI_mempool_chunk *), (void *)((chunk) + 1)))
140#define NODE_STEP_NEXT(node) ((BLI_freenode *)((char *)(node) + esize))
141#define NODE_STEP_PREV(node) ((BLI_freenode *)((char *)(node)-esize))
144#define CHUNK_OVERHEAD uint(MEM_SIZE_OVERHEAD + sizeof(BLI_mempool_chunk))
166 while (index-- && head) {
180 return (elem_num <= pchunk) ? 1 : ((elem_num / pchunk) + 1);
215 mpchunk->
next =
nullptr;
219 pool->
free = curnode;
229#ifdef WITH_MEM_VALGRIND
236#ifdef WITH_MEM_VALGRIND
237 VALGRIND_MAKE_MEM_UNDEFINED(curnode, pool->
esize);
247#ifdef WITH_MEM_VALGRIND
252#ifdef WITH_MEM_VALGRIND
253 VALGRIND_MAKE_MEM_UNDEFINED(curnode, pool->
esize);
266#ifdef WITH_MEM_VALGRIND
268 VALGRIND_MAKE_MEM_UNDEFINED(curnode, pool->
esize);
275#ifdef WITH_MEM_VALGRIND
279 curnode->
next =
nullptr;
281#ifdef WITH_MEM_VALGRIND
282 VALGRIND_MAKE_MEM_UNDEFINED(curnode, pool->
esize);
288#ifdef WITH_MEM_VALGRIND
293#ifdef WITH_MEM_VALGRIND
294 VALGRIND_MAKE_MEM_UNDEFINED(last_tail, pool->
esize);
308#ifdef WITH_MEM_VALGRIND
318 for (; mpchunk; mpchunk = mpchunk_next) {
319 mpchunk_next = mpchunk->
next;
360 pool->
csize = esize * pchunk;
363#if defined(USE_CHUNK_POW2) && !defined(NDEBUG)
372 pool->
free =
nullptr;
378 for (
i = 0;
i < maxchunks;
i++) {
384#ifdef WITH_MEM_VALGRIND
401 free_pop = pool->
free;
404#ifdef WITH_MEM_VALGRIND
425#ifdef WITH_MEM_VALGRIND
429 return (
void *)free_pop;
449 for (chunk = pool->
chunks; chunk; chunk = chunk->
next) {
456 BLI_assert_msg(0,
"Attempt to free data which is not in pool.\n");
475 pool->
free = newhead;
481#ifdef WITH_MEM_VALGRIND
482 VALGRIND_MEMPOOL_FREE(pool, addr);
494 first->
next =
nullptr;
498#ifdef WITH_MEM_VALGRIND
503 pool->
free = curnode;
520 curnode->
next =
nullptr;
523#ifdef WITH_MEM_VALGRIND
524 VALGRIND_MEMPOOL_FREE(pool,
CHUNK_DATA(first));
542 if (index < pool->totused) {
564 char *p =
static_cast<char *
>(
data);
571 memcpy(p, elem,
size_t(esize));
579 char *
data =
static_cast<char *
>(
612 for (
size_t i = 1;
i < iter_num;
i++) {
615 ((*curchunk_threaded_shared) ? (*curchunk_threaded_shared)->next :
nullptr);
657 ret = bli_mempool_iternext(iter);
678# ifdef WITH_MEM_VALGRIND
690# ifdef WITH_MEM_VALGRIND
697# ifdef WITH_MEM_VALGRIND
727# ifdef WITH_MEM_VALGRIND
750# ifdef WITH_MEM_VALGRIND
765# ifdef WITH_MEM_VALGRIND
780# ifdef WITH_MEM_VALGRIND
804#ifdef WITH_MEM_VALGRIND
809 if (elem_num_reserve == -1) {
818 if (mpchunk && mpchunk->
next) {
820 mpchunk_next = mpchunk->
next;
821 mpchunk->
next =
nullptr;
822 mpchunk = mpchunk_next;
825 mpchunk_next = mpchunk->
next;
827 }
while ((mpchunk = mpchunk_next));
831 pool->
free =
nullptr;
833 chunks_temp = pool->
chunks;
837 while ((mpchunk = chunks_temp)) {
838 chunks_temp = mpchunk->
next;
852#ifdef WITH_MEM_VALGRIND
#define BLI_asan_unpoison(addr, size)
#define BLI_asan_poison(addr, size)
#define BLI_assert_msg(a, msg)
MINLINE unsigned int power_of_2_max_u(unsigned int x)
#define VALGRIND_MEMPOOL_ALLOC(pool, addr, size)
#define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed)
#define VALGRIND_DESTROY_MEMPOOL(pool)
#define MEMPOOL_ELEM_SIZE_MIN
void BLI_mempool_clear_ex(BLI_mempool *pool, const int elem_num_reserve)
#define NODE_STEP_PREV(node)
static bool mempool_debug_memset
ParallelMempoolTaskData * mempool_iter_threadsafe_create(BLI_mempool *pool, const size_t iter_num)
void * BLI_mempool_findelem(BLI_mempool *pool, uint index)
#define CHUNK_DATA(chunk)
void BLI_mempool_as_array(BLI_mempool *pool, void *data)
static void mempool_asan_lock(BLI_mempool *pool)
void mempool_iter_threadsafe_destroy(ParallelMempoolTaskData *iter_arr)
void * BLI_mempool_as_arrayN(BLI_mempool *pool, const char *allocstr)
void * BLI_mempool_alloc(BLI_mempool *pool)
int BLI_mempool_len(const BLI_mempool *pool)
void BLI_mempool_free(BLI_mempool *pool, void *addr)
void BLI_mempool_clear(BLI_mempool *pool)
static BLI_mempool_chunk * mempool_chunk_alloc(const BLI_mempool *pool)
static void mempool_threadsafe_iternew(BLI_mempool *pool, BLI_mempool_threadsafe_iter *ts_iter)
void BLI_mempool_iternew(BLI_mempool *pool, BLI_mempool_iter *iter)
static void mempool_asan_unlock(BLI_mempool *pool)
static void mempool_chunk_free(BLI_mempool_chunk *mpchunk, BLI_mempool *pool)
void * mempool_iter_threadsafe_step(BLI_mempool_threadsafe_iter *ts_iter)
void * BLI_mempool_calloc(BLI_mempool *pool)
void BLI_mempool_destroy(BLI_mempool *pool)
void * BLI_mempool_iterstep(BLI_mempool_iter *iter)
void BLI_mempool_set_memory_debug()
#define POISON_REDZONE_SIZE
static BLI_freenode * mempool_chunk_add(BLI_mempool *pool, BLI_mempool_chunk *mpchunk, BLI_freenode *last_tail)
BLI_mempool * BLI_mempool_create(uint esize, uint elem_num, uint pchunk, uint flag)
static void mempool_chunk_free_all(BLI_mempool_chunk *mpchunk, BLI_mempool *pool)
#define NODE_STEP_NEXT(node)
BLI_INLINE BLI_mempool_chunk * mempool_chunk_find(BLI_mempool_chunk *head, uint index)
BLI_INLINE uint mempool_maxchunks(const uint elem_num, const uint pchunk)
void BLI_mutex_init(ThreadMutex *mutex)
void BLI_mutex_lock(ThreadMutex *mutex)
void BLI_mutex_unlock(ThreadMutex *mutex)
pthread_mutex_t ThreadMutex
#define ARRAY_HAS_ITEM(arr_item, arr_start, arr_len)
#define POINTER_OFFSET(v, ofs)
Read Guarded memory(de)allocation.
Provides wrapper around system-specific atomic primitives, and some extensions (faked-atomic operatio...
ATOMIC_INLINE void * atomic_cas_ptr(void **v, void *old, void *_new)
BMesh const char void * data
#define MEM_SIZE_OVERHEAD
void * MEM_mallocN(size_t len, const char *str)
void * MEM_calloc_arrayN(size_t len, size_t size, const char *str)
void * MEM_callocN(size_t len, const char *str)
void * MEM_malloc_arrayN(size_t len, size_t size, const char *str)
void MEM_freeN(void *vmemh)
struct BLI_mempool_chunk * curchunk
struct BLI_mempool_chunk ** curchunk_threaded_shared
BLI_mempool_chunk * chunks
BLI_mempool_chunk * chunk_tail
BLI_mempool_threadsafe_iter ts_iter