Blender  V2.93
cache.c
Go to the documentation of this file.
1 /*
2  * This program is free software; you can redistribute it and/or
3  * modify it under the terms of the GNU General Public License
4  * as published by the Free Software Foundation; either version 2
5  * of the License, or (at your option) any later version.
6  *
7  * This program is distributed in the hope that it will be useful,
8  * but WITHOUT ANY WARRANTY; without even the implied warranty of
9  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10  * GNU General Public License for more details.
11  *
12  * You should have received a copy of the GNU General Public License
13  * along with this program; if not, write to the Free Software Foundation,
14  * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
15  */
16 
21 #include "MEM_guardedalloc.h"
22 
23 #include "BLI_ghash.h"
24 #include "BLI_listbase.h"
25 #include "BLI_memarena.h"
26 #include "BLI_threads.h"
27 #include "BLI_utildefines.h"
28 
29 #include "IMB_filetype.h"
30 #include "IMB_imbuf.h"
31 #include "IMB_imbuf_types.h"
32 
33 #include "imbuf.h"
34 
35 /* -------------------------------------------------------------------- */
39 /* We use a two level cache here. A per-thread cache with limited number of
40  * tiles. This can be accessed without locking and so is hoped to lead to most
41  * tile access being lock-free. The global cache is shared between all threads
42  * and requires slow locking to access, and contains all tiles.
43  *
44  * The per-thread cache should be big enough that one might hope to not fall
45  * back to the global cache every pixel, but not to big to keep too many tiles
46  * locked and using memory. */
47 
48 #define IB_THREAD_CACHE_SIZE 100
49 
50 typedef struct ImGlobalTile {
51  struct ImGlobalTile *next, *prev;
52 
54  int tx, ty;
55  int refcount;
56  volatile int loading;
58 
59 typedef struct ImThreadTile {
60  struct ImThreadTile *next, *prev;
61 
63  int tx, ty;
64 
67 
68 typedef struct ImThreadTileCache {
73 
74 typedef struct ImGlobalTileCache {
78 
81 
83  int totthread;
84 
86 
89 
91 
94 /* -------------------------------------------------------------------- */
98 static unsigned int imb_global_tile_hash(const void *gtile_p)
99 {
100  const ImGlobalTile *gtile = gtile_p;
101 
102  return ((unsigned int)(intptr_t)gtile->ibuf) * 769 + gtile->tx * 53 + gtile->ty * 97;
103 }
104 
105 static bool imb_global_tile_cmp(const void *a_p, const void *b_p)
106 {
107  const ImGlobalTile *a = a_p;
108  const ImGlobalTile *b = b_p;
109 
110  return ((a->ibuf != b->ibuf) || (a->tx != b->tx) || (a->ty != b->ty));
111 }
112 
113 static unsigned int imb_thread_tile_hash(const void *ttile_p)
114 {
115  const ImThreadTile *ttile = ttile_p;
116 
117  return ((unsigned int)(intptr_t)ttile->ibuf) * 769 + ttile->tx * 53 + ttile->ty * 97;
118 }
119 
120 static bool imb_thread_tile_cmp(const void *a_p, const void *b_p)
121 {
122  const ImThreadTile *a = a_p;
123  const ImThreadTile *b = b_p;
124 
125  return ((a->ibuf != b->ibuf) || (a->tx != b->tx) || (a->ty != b->ty));
126 }
127 
130 /* -------------------------------------------------------------------- */
135 {
136  ImBuf *ibuf = gtile->ibuf;
137  int toffs = ibuf->xtiles * gtile->ty + gtile->tx;
138  unsigned int *rect;
139 
140  rect = MEM_callocN(sizeof(unsigned int) * ibuf->tilex * ibuf->tiley, "imb_tile");
141  imb_loadtile(ibuf, gtile->tx, gtile->ty, rect);
142  ibuf->tiles[toffs] = rect;
143 }
144 
146 {
147  ImBuf *ibuf = gtile->ibuf;
148  int toffs = ibuf->xtiles * gtile->ty + gtile->tx;
149 
150  MEM_freeN(ibuf->tiles[toffs]);
151  ibuf->tiles[toffs] = NULL;
152 
153  GLOBAL_CACHE.totmem -= sizeof(unsigned int) * ibuf->tilex * ibuf->tiley;
154 }
155 
156 /* external free */
157 void imb_tile_cache_tile_free(ImBuf *ibuf, int tx, int ty)
158 {
159  ImGlobalTile *gtile, lookuptile;
160 
162 
163  lookuptile.ibuf = ibuf;
164  lookuptile.tx = tx;
165  lookuptile.ty = ty;
166  gtile = BLI_ghash_lookup(GLOBAL_CACHE.tilehash, &lookuptile);
167 
168  if (gtile) {
169  /* in case another thread is loading this */
170  while (gtile->loading) {
171  /* pass */
172  }
173 
175  BLI_remlink(&GLOBAL_CACHE.tiles, gtile);
177  }
178 
180 }
181 
184 /* -------------------------------------------------------------------- */
189 {
190  ImThreadTile *ttile;
191  int a;
192 
193  memset(cache, 0, sizeof(ImThreadTileCache));
194 
195  cache->tilehash = BLI_ghash_new(
196  imb_thread_tile_hash, imb_thread_tile_cmp, "imb_thread_cache_init gh");
197 
198  /* pre-allocate all thread local tiles in unused list */
199  for (a = 0; a < IB_THREAD_CACHE_SIZE; a++) {
201  BLI_addtail(&cache->unused, ttile);
202  }
203 }
204 
206 {
207  BLI_ghash_free(cache->tilehash, NULL, NULL);
208 }
209 
211 {
212  memset(&GLOBAL_CACHE, 0, sizeof(ImGlobalTileCache));
213 
215 
216  /* initialize for one thread, for places that access textures
217  * outside of rendering (displace modifier, painting, ..) */
218  IMB_tile_cache_params(0, 0);
219 
221 }
222 
224 {
225  ImGlobalTile *gtile;
226  int a;
227 
229  for (gtile = GLOBAL_CACHE.tiles.first; gtile; gtile = gtile->next) {
231  }
232 
233  for (a = 0; a < GLOBAL_CACHE.totthread; a++) {
235  }
236 
237  if (GLOBAL_CACHE.memarena) {
239  }
240 
241  if (GLOBAL_CACHE.tilehash) {
243  }
244 
246 
247  memset(&GLOBAL_CACHE, 0, sizeof(ImGlobalTileCache));
248  }
249 }
250 
251 /* presumed to be called when no threads are running */
252 void IMB_tile_cache_params(int totthread, int maxmem)
253 {
254  int a;
255 
256  /* always one cache for non-threaded access */
257  totthread++;
258 
259  /* lazy initialize cache */
260  if (GLOBAL_CACHE.totthread == totthread && GLOBAL_CACHE.maxmem == maxmem) {
261  return;
262  }
263 
265 
266  memset(&GLOBAL_CACHE, 0, sizeof(ImGlobalTileCache));
267 
269  imb_global_tile_hash, imb_global_tile_cmp, "tile_cache_params gh");
270 
273 
274  GLOBAL_CACHE.maxmem = maxmem * 1024 * 1024;
275 
276  GLOBAL_CACHE.totthread = totthread;
277  for (a = 0; a < totthread; a++) {
279  }
280 
282 }
283 
286 /* -------------------------------------------------------------------- */
291  int tx,
292  int ty,
293  ImGlobalTile *replacetile)
294 {
295  ImGlobalTile *gtile, lookuptile;
296 
298 
299  if (replacetile) {
300  replacetile->refcount--;
301  }
302 
303  /* find tile in global cache */
304  lookuptile.ibuf = ibuf;
305  lookuptile.tx = tx;
306  lookuptile.ty = ty;
307  gtile = BLI_ghash_lookup(GLOBAL_CACHE.tilehash, &lookuptile);
308 
309  if (gtile) {
310  /* found tile. however it may be in the process of being loaded
311  * by another thread, in that case we do stupid busy loop waiting
312  * for the other thread to load the tile */
313  gtile->refcount++;
314 
316 
317  while (gtile->loading) {
318  /* pass */
319  }
320  }
321  else {
322  /* not found, let's load it from disk */
323 
324  /* first check if we hit the memory limit */
326  /* find an existing tile to unload */
327  for (gtile = GLOBAL_CACHE.tiles.last; gtile; gtile = gtile->prev) {
328  if (gtile->refcount == 0 && gtile->loading == 0) {
329  break;
330  }
331  }
332  }
333 
334  if (gtile) {
335  /* found a tile to unload */
338  BLI_remlink(&GLOBAL_CACHE.tiles, gtile);
339  }
340  else {
341  /* allocate a new tile or reuse unused */
342  if (GLOBAL_CACHE.unused.first) {
343  gtile = GLOBAL_CACHE.unused.first;
345  }
346  else {
348  }
349  }
350 
351  /* setup new tile */
352  gtile->ibuf = ibuf;
353  gtile->tx = tx;
354  gtile->ty = ty;
355  gtile->refcount = 1;
356  gtile->loading = 1;
357 
358  BLI_ghash_insert(GLOBAL_CACHE.tilehash, gtile, gtile);
359  BLI_addhead(&GLOBAL_CACHE.tiles, gtile);
360 
361  /* mark as being loaded and unlock to allow other threads to load too */
362  GLOBAL_CACHE.totmem += sizeof(unsigned int) * ibuf->tilex * ibuf->tiley;
363 
365 
366  /* load from disk */
368 
369  /* mark as done loading */
370  gtile->loading = 0;
371  }
372 
373  return gtile;
374 }
375 
378 /* -------------------------------------------------------------------- */
382 static unsigned int *imb_thread_cache_get_tile(ImThreadTileCache *cache,
383  ImBuf *ibuf,
384  int tx,
385  int ty)
386 {
387  ImThreadTile *ttile, lookuptile;
388  ImGlobalTile *gtile, *replacetile;
389  int toffs = ibuf->xtiles * ty + tx;
390 
391  /* test if it is already in our thread local cache */
392  if ((ttile = cache->tiles.first)) {
393  /* check last used tile before going to hash */
394  if (ttile->ibuf == ibuf && ttile->tx == tx && ttile->ty == ty) {
395  return ibuf->tiles[toffs];
396  }
397 
398  /* find tile in hash */
399  lookuptile.ibuf = ibuf;
400  lookuptile.tx = tx;
401  lookuptile.ty = ty;
402 
403  if ((ttile = BLI_ghash_lookup(cache->tilehash, &lookuptile))) {
404  BLI_remlink(&cache->tiles, ttile);
405  BLI_addhead(&cache->tiles, ttile);
406 
407  return ibuf->tiles[toffs];
408  }
409  }
410 
411  /* not found, have to do slow lookup in global cache */
412  if (BLI_listbase_is_empty(&cache->unused)) {
413  ttile = cache->tiles.last;
414  replacetile = ttile->global;
415  BLI_remlink(&cache->tiles, ttile);
416  BLI_ghash_remove(cache->tilehash, ttile, NULL, NULL);
417  }
418  else {
419  ttile = cache->unused.first;
420  replacetile = NULL;
421  BLI_remlink(&cache->unused, ttile);
422  }
423 
424  BLI_addhead(&cache->tiles, ttile);
425  BLI_ghash_insert(cache->tilehash, ttile, ttile);
426 
427  gtile = imb_global_cache_get_tile(ibuf, tx, ty, replacetile);
428 
429  ttile->ibuf = gtile->ibuf;
430  ttile->tx = gtile->tx;
431  ttile->ty = gtile->ty;
432  ttile->global = gtile;
433 
434  return ibuf->tiles[toffs];
435 }
436 
437 unsigned int *IMB_gettile(ImBuf *ibuf, int tx, int ty, int thread)
438 {
439  return imb_thread_cache_get_tile(&GLOBAL_CACHE.thread_cache[thread + 1], ibuf, tx, ty);
440 }
441 
443 {
444  ImBuf *mipbuf;
445  ImGlobalTile *gtile;
446  unsigned int *to, *from;
447  int a, tx, ty, y, w, h;
448 
449  for (a = 0; a < ibuf->miptot; a++) {
450  mipbuf = IMB_getmipmap(ibuf, a);
451 
452  /* don't call imb_addrectImBuf, it frees all mipmaps */
453  if (!mipbuf->rect) {
454  if ((mipbuf->rect = MEM_callocN(ibuf->x * ibuf->y * sizeof(unsigned int),
455  "imb_addrectImBuf"))) {
456  mipbuf->mall |= IB_rect;
457  mipbuf->flags |= IB_rect;
458  }
459  else {
460  break;
461  }
462  }
463 
464  for (ty = 0; ty < mipbuf->ytiles; ty++) {
465  for (tx = 0; tx < mipbuf->xtiles; tx++) {
466  /* acquire tile through cache, this assumes cache is initialized,
467  * which it is always now but it's a weak assumption ... */
468  gtile = imb_global_cache_get_tile(mipbuf, tx, ty, NULL);
469 
470  /* setup pointers */
471  from = mipbuf->tiles[mipbuf->xtiles * ty + tx];
472  to = mipbuf->rect + mipbuf->x * ty * mipbuf->tiley + tx * mipbuf->tilex;
473 
474  /* exception in tile width/height for tiles at end of image */
475  w = (tx == mipbuf->xtiles - 1) ? mipbuf->x - tx * mipbuf->tilex : mipbuf->tilex;
476  h = (ty == mipbuf->ytiles - 1) ? mipbuf->y - ty * mipbuf->tiley : mipbuf->tiley;
477 
478  for (y = 0; y < h; y++) {
479  memcpy(to, from, sizeof(unsigned int) * w);
480  from += mipbuf->tilex;
481  to += mipbuf->x;
482  }
483 
484  /* decrease refcount for tile again */
486  gtile->refcount--;
488  }
489  }
490  }
491 }
492 
GHash * BLI_ghash_new(GHashHashFP hashfp, GHashCmpFP cmpfp, const char *info) ATTR_MALLOC ATTR_WARN_UNUSED_RESULT
Definition: BLI_ghash.c:718
bool BLI_ghash_remove(GHash *gh, const void *key, GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp)
Definition: BLI_ghash.c:900
void BLI_ghash_insert(GHash *gh, void *key, void *val)
Definition: BLI_ghash.c:756
void BLI_ghash_free(GHash *gh, GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp)
Definition: BLI_ghash.c:1008
void * BLI_ghash_lookup(GHash *gh, const void *key) ATTR_WARN_UNUSED_RESULT
Definition: BLI_ghash.c:803
BLI_INLINE bool BLI_listbase_is_empty(const struct ListBase *lb)
Definition: BLI_listbase.h:124
void BLI_addhead(struct ListBase *listbase, void *vlink) ATTR_NONNULL(1)
Definition: listbase.c:87
void BLI_addtail(struct ListBase *listbase, void *vlink) ATTR_NONNULL(1)
Definition: listbase.c:110
void BLI_remlink(struct ListBase *listbase, void *vlink) ATTR_NONNULL(1)
Definition: listbase.c:133
void BLI_memarena_free(struct MemArena *ma) ATTR_NONNULL(1)
Definition: BLI_memarena.c:109
#define BLI_MEMARENA_STD_BUFSIZE
Definition: BLI_memarena.h:36
void BLI_memarena_use_calloc(struct MemArena *ma) ATTR_NONNULL(1)
Definition: BLI_memarena.c:91
void * BLI_memarena_alloc(struct MemArena *ma, size_t size) ATTR_WARN_UNUSED_RESULT ATTR_NONNULL(1) ATTR_MALLOC ATTR_ALLOC_SIZE(2)
Definition: BLI_memarena.c:131
struct MemArena * BLI_memarena_new(const size_t bufsize, const char *name) ATTR_WARN_UNUSED_RESULT ATTR_NONNULL(2) ATTR_MALLOC
Definition: BLI_memarena.c:79
void BLI_mutex_end(ThreadMutex *mutex)
Definition: threads.cc:416
void BLI_mutex_init(ThreadMutex *mutex)
Definition: threads.cc:396
void BLI_mutex_lock(ThreadMutex *mutex)
Definition: threads.cc:401
void BLI_mutex_unlock(ThreadMutex *mutex)
Definition: threads.cc:406
#define BLENDER_MAX_THREADS
Definition: BLI_threads.h:35
pthread_mutex_t ThreadMutex
Definition: BLI_threads.h:83
_GL_VOID GLfloat value _GL_VOID_RET _GL_VOID const GLuint GLboolean *residences _GL_BOOL_RET _GL_VOID GLsizei GLfloat GLfloat GLfloat GLfloat const GLubyte *bitmap _GL_VOID_RET _GL_VOID GLenum const void *lists _GL_VOID_RET _GL_VOID const GLdouble *equation _GL_VOID_RET _GL_VOID GLdouble GLdouble blue _GL_VOID_RET _GL_VOID GLfloat GLfloat blue _GL_VOID_RET _GL_VOID GLint GLint blue _GL_VOID_RET _GL_VOID GLshort GLshort blue _GL_VOID_RET _GL_VOID GLubyte GLubyte blue _GL_VOID_RET _GL_VOID GLuint GLuint blue _GL_VOID_RET _GL_VOID GLushort GLushort blue _GL_VOID_RET _GL_VOID GLbyte GLbyte GLbyte alpha _GL_VOID_RET _GL_VOID GLdouble GLdouble GLdouble alpha _GL_VOID_RET _GL_VOID GLfloat GLfloat GLfloat alpha _GL_VOID_RET _GL_VOID GLint GLint GLint alpha _GL_VOID_RET _GL_VOID GLshort GLshort GLshort alpha _GL_VOID_RET _GL_VOID GLubyte GLubyte GLubyte alpha _GL_VOID_RET _GL_VOID GLuint GLuint GLuint alpha _GL_VOID_RET _GL_VOID GLushort GLushort GLushort alpha _GL_VOID_RET _GL_VOID GLenum mode _GL_VOID_RET _GL_VOID GLint y
void imb_loadtile(struct ImBuf *ibuf, int tx, int ty, unsigned int *rect)
Definition: readimage.c:315
struct ImBuf * IMB_getmipmap(struct ImBuf *ibuf, int level)
Definition: filter.c:635
Contains defines and structs used throughout the imbuf module.
@ IB_rect
Read Guarded memory(de)allocation.
SIMD_FORCE_INLINE const btScalar & w() const
Return the w value.
Definition: btQuadWord.h:119
static unsigned int * imb_thread_cache_get_tile(ImThreadTileCache *cache, ImBuf *ibuf, int tx, int ty)
Definition: cache.c:382
struct ImGlobalTile ImGlobalTile
static ImGlobalTile * imb_global_cache_get_tile(ImBuf *ibuf, int tx, int ty, ImGlobalTile *replacetile)
Definition: cache.c:290
void imb_tile_cache_tile_free(ImBuf *ibuf, int tx, int ty)
Definition: cache.c:157
static bool imb_thread_tile_cmp(const void *a_p, const void *b_p)
Definition: cache.c:120
static unsigned int imb_thread_tile_hash(const void *ttile_p)
Definition: cache.c:113
unsigned int * IMB_gettile(ImBuf *ibuf, int tx, int ty, int thread)
Definition: cache.c:437
static unsigned int imb_global_tile_hash(const void *gtile_p)
Definition: cache.c:98
struct ImGlobalTileCache ImGlobalTileCache
struct ImThreadTile ImThreadTile
void IMB_tiles_to_rect(ImBuf *ibuf)
Definition: cache.c:442
void imb_tile_cache_exit(void)
Definition: cache.c:223
#define IB_THREAD_CACHE_SIZE
Definition: cache.c:48
struct ImThreadTileCache ImThreadTileCache
static void imb_thread_cache_init(ImThreadTileCache *cache)
Definition: cache.c:188
static void imb_global_cache_tile_unload(ImGlobalTile *gtile)
Definition: cache.c:145
void imb_tile_cache_init(void)
Definition: cache.c:210
static void imb_thread_cache_exit(ImThreadTileCache *cache)
Definition: cache.c:205
static ImGlobalTileCache GLOBAL_CACHE
Definition: cache.c:90
static void imb_global_cache_tile_load(ImGlobalTile *gtile)
Definition: cache.c:134
void IMB_tile_cache_params(int totthread, int maxmem)
Definition: cache.c:252
static bool imb_global_tile_cmp(const void *a_p, const void *b_p)
Definition: cache.c:105
StackEntry * from
void(* MEM_freeN)(void *vmemh)
Definition: mallocn.c:41
void *(* MEM_callocN)(size_t len, const char *str)
Definition: mallocn.c:45
static unsigned a[3]
Definition: RandGen.cpp:92
_W64 unsigned int uintptr_t
Definition: stdint.h:122
_W64 int intptr_t
Definition: stdint.h:121
unsigned int ** tiles
unsigned int * rect
ListBase tiles
Definition: cache.c:75
ImThreadTileCache thread_cache[BLENDER_MAX_THREADS+1]
Definition: cache.c:82
MemArena * memarena
Definition: cache.c:79
uintptr_t maxmem
Definition: cache.c:80
ThreadMutex mutex
Definition: cache.c:85
GHash * tilehash
Definition: cache.c:77
int initialized
Definition: cache.c:87
ListBase unused
Definition: cache.c:76
uintptr_t totmem
Definition: cache.c:80
int refcount
Definition: cache.c:55
int tx
Definition: cache.c:54
ImBuf * ibuf
Definition: cache.c:53
struct ImGlobalTile * prev
Definition: cache.c:51
volatile int loading
Definition: cache.c:56
int ty
Definition: cache.c:54
struct ImGlobalTile * next
Definition: cache.c:51
ListBase unused
Definition: cache.c:70
ListBase tiles
Definition: cache.c:69
GHash * tilehash
Definition: cache.c:71
struct ImThreadTile * prev
Definition: cache.c:60
int tx
Definition: cache.c:63
struct ImThreadTile * next
Definition: cache.c:60
ImGlobalTile * global
Definition: cache.c:65
ImBuf * ibuf
Definition: cache.c:62
int ty
Definition: cache.c:63
void * last
Definition: DNA_listBase.h:47
void * first
Definition: DNA_listBase.h:47