33 #define CHUNK_SIZE_DEFAULT (1 << 16)
35 #define CHUNK_ELEM_MIN 32
55 return ((
char *)(
queue)->chunk_first->data) + ((
queue)->elem_size * (
queue)->chunk_first_index);
60 return ((
char *)(
queue)->chunk_last->data) + ((
queue)->elem_size * (
queue)->chunk_last_index);
71 BLI_assert((elem_size != 0) && (chunk_size != 0));
73 while (
UNLIKELY(chunk_size <= elem_size_min)) {
80 return chunk_size / elem_size;
88 queue->elem_size = elem_size;
90 queue->chunk_last_index =
queue->chunk_elem_max - 1;
124 queue->chunk_last_index++;
129 if (
queue->chunk_free) {
130 chunk =
queue->chunk_free;
140 queue->chunk_first = chunk;
143 queue->chunk_last->next = chunk;
146 queue->chunk_last = chunk;
147 queue->chunk_last_index = 0;
167 queue->chunk_first_index++;
173 queue->chunk_first =
queue->chunk_first->next;
174 queue->chunk_first_index = 0;
177 queue->chunk_last_index =
queue->chunk_elem_max - 1;
181 queue->chunk_free = chunk_free;
187 return queue->totelem;
Strict compiler flags for areas of code we want to ensure don't do conversions without us knowing abo...
Read Guarded memory(de)allocation.
#define MEM_SIZE_OVERHEAD
static void * queue_get_last_elem(GSQueue *queue)
void BLI_gsqueue_free(GSQueue *queue)
static void * queue_get_first_elem(GSQueue *queue)
void BLI_gsqueue_push(GSQueue *queue, const void *item)
GSQueue * BLI_gsqueue_new(const size_t elem_size)
void BLI_gsqueue_pop(GSQueue *queue, void *r_item)
static void queue_free_chunk(struct QueueChunk *data)
bool BLI_gsqueue_is_empty(const GSQueue *queue)
static size_t queue_chunk_elem_max_calc(const size_t elem_size, size_t chunk_size)
#define CHUNK_SIZE_DEFAULT
size_t BLI_gsqueue_len(const GSQueue *queue)
void(* MEM_freeN)(void *vmemh)
void *(* MEM_callocN)(size_t len, const char *str)
void *(* MEM_mallocN)(size_t len, const char *str)
ThreadQueue * queue
all scheduled work for the cpu
struct QueueChunk * chunk_free
struct QueueChunk * chunk_last
struct QueueChunk * chunk_first