Blender  V3.3
cache.c
Go to the documentation of this file.
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 
7 #include "MEM_guardedalloc.h"
8 
9 #include "BLI_ghash.h"
10 #include "BLI_listbase.h"
11 #include "BLI_memarena.h"
12 #include "BLI_threads.h"
13 #include "BLI_utildefines.h"
14 
15 #include "IMB_filetype.h"
16 #include "IMB_imbuf.h"
17 #include "IMB_imbuf_types.h"
18 
19 #include "imbuf.h"
20 
21 /* -------------------------------------------------------------------- */
25 /* We use a two level cache here. A per-thread cache with limited number of
26  * tiles. This can be accessed without locking and so is hoped to lead to most
27  * tile access being lock-free. The global cache is shared between all threads
28  * and requires slow locking to access, and contains all tiles.
29  *
30  * The per-thread cache should be big enough that one might hope to not fall
31  * back to the global cache every pixel, but not to big to keep too many tiles
32  * locked and using memory. */
33 
34 #define IB_THREAD_CACHE_SIZE 100
35 
36 typedef struct ImGlobalTile {
37  struct ImGlobalTile *next, *prev;
38 
40  int tx, ty;
41  int refcount;
42  volatile int loading;
44 
45 typedef struct ImThreadTile {
46  struct ImThreadTile *next, *prev;
47 
49  int tx, ty;
50 
53 
54 typedef struct ImThreadTileCache {
59 
60 typedef struct ImGlobalTileCache {
64 
67 
69  int totthread;
70 
72 
75 
77 
80 /* -------------------------------------------------------------------- */
84 static unsigned int imb_global_tile_hash(const void *gtile_p)
85 {
86  const ImGlobalTile *gtile = gtile_p;
87 
88  return ((unsigned int)(intptr_t)gtile->ibuf) * 769 + gtile->tx * 53 + gtile->ty * 97;
89 }
90 
91 static bool imb_global_tile_cmp(const void *a_p, const void *b_p)
92 {
93  const ImGlobalTile *a = a_p;
94  const ImGlobalTile *b = b_p;
95 
96  return ((a->ibuf != b->ibuf) || (a->tx != b->tx) || (a->ty != b->ty));
97 }
98 
99 static unsigned int imb_thread_tile_hash(const void *ttile_p)
100 {
101  const ImThreadTile *ttile = ttile_p;
102 
103  return ((unsigned int)(intptr_t)ttile->ibuf) * 769 + ttile->tx * 53 + ttile->ty * 97;
104 }
105 
106 static bool imb_thread_tile_cmp(const void *a_p, const void *b_p)
107 {
108  const ImThreadTile *a = a_p;
109  const ImThreadTile *b = b_p;
110 
111  return ((a->ibuf != b->ibuf) || (a->tx != b->tx) || (a->ty != b->ty));
112 }
113 
116 /* -------------------------------------------------------------------- */
121 {
122  ImBuf *ibuf = gtile->ibuf;
123  int toffs = ibuf->xtiles * gtile->ty + gtile->tx;
124  unsigned int *rect;
125 
126  rect = MEM_callocN(sizeof(unsigned int) * ibuf->tilex * ibuf->tiley, "imb_tile");
127  imb_loadtile(ibuf, gtile->tx, gtile->ty, rect);
128  ibuf->tiles[toffs] = rect;
129 }
130 
132 {
133  ImBuf *ibuf = gtile->ibuf;
134  int toffs = ibuf->xtiles * gtile->ty + gtile->tx;
135 
136  MEM_freeN(ibuf->tiles[toffs]);
137  ibuf->tiles[toffs] = NULL;
138 
139  GLOBAL_CACHE.totmem -= sizeof(unsigned int) * ibuf->tilex * ibuf->tiley;
140 }
141 
142 void imb_tile_cache_tile_free(ImBuf *ibuf, int tx, int ty)
143 {
144  ImGlobalTile *gtile, lookuptile;
145 
147 
148  lookuptile.ibuf = ibuf;
149  lookuptile.tx = tx;
150  lookuptile.ty = ty;
151  gtile = BLI_ghash_lookup(GLOBAL_CACHE.tilehash, &lookuptile);
152 
153  if (gtile) {
154  /* in case another thread is loading this */
155  while (gtile->loading) {
156  /* pass */
157  }
158 
160  BLI_remlink(&GLOBAL_CACHE.tiles, gtile);
162  }
163 
165 }
166 
169 /* -------------------------------------------------------------------- */
174 {
175  ImThreadTile *ttile;
176  int a;
177 
178  memset(cache, 0, sizeof(ImThreadTileCache));
179 
180  cache->tilehash = BLI_ghash_new(
181  imb_thread_tile_hash, imb_thread_tile_cmp, "imb_thread_cache_init gh");
182 
183  /* pre-allocate all thread local tiles in unused list */
184  for (a = 0; a < IB_THREAD_CACHE_SIZE; a++) {
186  BLI_addtail(&cache->unused, ttile);
187  }
188 }
189 
191 {
192  BLI_ghash_free(cache->tilehash, NULL, NULL);
193 }
194 
196 {
197  memset(&GLOBAL_CACHE, 0, sizeof(ImGlobalTileCache));
198 
200 
201  /* initialize for one thread, for places that access textures
202  * outside of rendering (displace modifier, painting, ..) */
203  IMB_tile_cache_params(0, 0);
204 
206 }
207 
209 {
210  ImGlobalTile *gtile;
211  int a;
212 
214  for (gtile = GLOBAL_CACHE.tiles.first; gtile; gtile = gtile->next) {
216  }
217 
218  for (a = 0; a < GLOBAL_CACHE.totthread; a++) {
220  }
221 
222  if (GLOBAL_CACHE.memarena) {
224  }
225 
226  if (GLOBAL_CACHE.tilehash) {
228  }
229 
231 
232  memset(&GLOBAL_CACHE, 0, sizeof(ImGlobalTileCache));
233  }
234 }
235 
236 void IMB_tile_cache_params(int totthread, int maxmem)
237 {
238  int a;
239 
240  /* always one cache for non-threaded access */
241  totthread++;
242 
243  /* lazy initialize cache */
244  if (GLOBAL_CACHE.totthread == totthread && GLOBAL_CACHE.maxmem == maxmem) {
245  return;
246  }
247 
249 
250  memset(&GLOBAL_CACHE, 0, sizeof(ImGlobalTileCache));
251 
253  imb_global_tile_hash, imb_global_tile_cmp, "tile_cache_params gh");
254 
257 
258  GLOBAL_CACHE.maxmem = maxmem * 1024 * 1024;
259 
260  GLOBAL_CACHE.totthread = totthread;
261  for (a = 0; a < totthread; a++) {
263  }
264 
266 }
267 
270 /* -------------------------------------------------------------------- */
275  int tx,
276  int ty,
277  ImGlobalTile *replacetile)
278 {
279  ImGlobalTile *gtile, lookuptile;
280 
282 
283  if (replacetile) {
284  replacetile->refcount--;
285  }
286 
287  /* find tile in global cache */
288  lookuptile.ibuf = ibuf;
289  lookuptile.tx = tx;
290  lookuptile.ty = ty;
291  gtile = BLI_ghash_lookup(GLOBAL_CACHE.tilehash, &lookuptile);
292 
293  if (gtile) {
294  /* found tile. however it may be in the process of being loaded
295  * by another thread, in that case we do stupid busy loop waiting
296  * for the other thread to load the tile */
297  gtile->refcount++;
298 
300 
301  while (gtile->loading) {
302  /* pass */
303  }
304  }
305  else {
306  /* not found, let's load it from disk */
307 
308  /* first check if we hit the memory limit */
310  /* find an existing tile to unload */
311  for (gtile = GLOBAL_CACHE.tiles.last; gtile; gtile = gtile->prev) {
312  if (gtile->refcount == 0 && gtile->loading == 0) {
313  break;
314  }
315  }
316  }
317 
318  if (gtile) {
319  /* found a tile to unload */
322  BLI_remlink(&GLOBAL_CACHE.tiles, gtile);
323  }
324  else {
325  /* allocate a new tile or reuse unused */
326  if (GLOBAL_CACHE.unused.first) {
327  gtile = GLOBAL_CACHE.unused.first;
329  }
330  else {
332  }
333  }
334 
335  /* setup new tile */
336  gtile->ibuf = ibuf;
337  gtile->tx = tx;
338  gtile->ty = ty;
339  gtile->refcount = 1;
340  gtile->loading = 1;
341 
342  BLI_ghash_insert(GLOBAL_CACHE.tilehash, gtile, gtile);
343  BLI_addhead(&GLOBAL_CACHE.tiles, gtile);
344 
345  /* mark as being loaded and unlock to allow other threads to load too */
346  GLOBAL_CACHE.totmem += sizeof(unsigned int) * ibuf->tilex * ibuf->tiley;
347 
349 
350  /* load from disk */
352 
353  /* mark as done loading */
354  gtile->loading = 0;
355  }
356 
357  return gtile;
358 }
359 
362 /* -------------------------------------------------------------------- */
366 static unsigned int *imb_thread_cache_get_tile(ImThreadTileCache *cache,
367  ImBuf *ibuf,
368  int tx,
369  int ty)
370 {
371  ImThreadTile *ttile, lookuptile;
372  ImGlobalTile *gtile, *replacetile;
373  int toffs = ibuf->xtiles * ty + tx;
374 
375  /* test if it is already in our thread local cache */
376  if ((ttile = cache->tiles.first)) {
377  /* check last used tile before going to hash */
378  if (ttile->ibuf == ibuf && ttile->tx == tx && ttile->ty == ty) {
379  return ibuf->tiles[toffs];
380  }
381 
382  /* find tile in hash */
383  lookuptile.ibuf = ibuf;
384  lookuptile.tx = tx;
385  lookuptile.ty = ty;
386 
387  if ((ttile = BLI_ghash_lookup(cache->tilehash, &lookuptile))) {
388  BLI_remlink(&cache->tiles, ttile);
389  BLI_addhead(&cache->tiles, ttile);
390 
391  return ibuf->tiles[toffs];
392  }
393  }
394 
395  /* not found, have to do slow lookup in global cache */
396  if (BLI_listbase_is_empty(&cache->unused)) {
397  ttile = cache->tiles.last;
398  replacetile = ttile->global;
399  BLI_remlink(&cache->tiles, ttile);
400  BLI_ghash_remove(cache->tilehash, ttile, NULL, NULL);
401  }
402  else {
403  ttile = cache->unused.first;
404  replacetile = NULL;
405  BLI_remlink(&cache->unused, ttile);
406  }
407 
408  BLI_addhead(&cache->tiles, ttile);
409  BLI_ghash_insert(cache->tilehash, ttile, ttile);
410 
411  gtile = imb_global_cache_get_tile(ibuf, tx, ty, replacetile);
412 
413  ttile->ibuf = gtile->ibuf;
414  ttile->tx = gtile->tx;
415  ttile->ty = gtile->ty;
416  ttile->global = gtile;
417 
418  return ibuf->tiles[toffs];
419 }
420 
421 unsigned int *IMB_gettile(ImBuf *ibuf, int tx, int ty, int thread)
422 {
423  return imb_thread_cache_get_tile(&GLOBAL_CACHE.thread_cache[thread + 1], ibuf, tx, ty);
424 }
425 
427 {
428  ImBuf *mipbuf;
429  ImGlobalTile *gtile;
430  unsigned int *to, *from;
431  int a, tx, ty, y, w, h;
432 
433  for (a = 0; a < ibuf->miptot; a++) {
434  mipbuf = IMB_getmipmap(ibuf, a);
435 
436  /* don't call imb_addrectImBuf, it frees all mipmaps */
437  if (!mipbuf->rect) {
438  if ((mipbuf->rect = MEM_callocN(ibuf->x * ibuf->y * sizeof(unsigned int),
439  "imb_addrectImBuf"))) {
440  mipbuf->mall |= IB_rect;
441  mipbuf->flags |= IB_rect;
442  }
443  else {
444  break;
445  }
446  }
447 
448  for (ty = 0; ty < mipbuf->ytiles; ty++) {
449  for (tx = 0; tx < mipbuf->xtiles; tx++) {
450  /* acquire tile through cache, this assumes cache is initialized,
451  * which it is always now but it's a weak assumption ... */
452  gtile = imb_global_cache_get_tile(mipbuf, tx, ty, NULL);
453 
454  /* setup pointers */
455  from = mipbuf->tiles[mipbuf->xtiles * ty + tx];
456  to = mipbuf->rect + mipbuf->x * ty * mipbuf->tiley + tx * mipbuf->tilex;
457 
458  /* exception in tile width/height for tiles at end of image */
459  w = (tx == mipbuf->xtiles - 1) ? mipbuf->x - tx * mipbuf->tilex : mipbuf->tilex;
460  h = (ty == mipbuf->ytiles - 1) ? mipbuf->y - ty * mipbuf->tiley : mipbuf->tiley;
461 
462  for (y = 0; y < h; y++) {
463  memcpy(to, from, sizeof(unsigned int) * w);
464  from += mipbuf->tilex;
465  to += mipbuf->x;
466  }
467 
468  /* decrease refcount for tile again */
470  gtile->refcount--;
472  }
473  }
474  }
475 }
476 
GHash * BLI_ghash_new(GHashHashFP hashfp, GHashCmpFP cmpfp, const char *info) ATTR_MALLOC ATTR_WARN_UNUSED_RESULT
Definition: BLI_ghash.c:689
void * BLI_ghash_lookup(const GHash *gh, const void *key) ATTR_WARN_UNUSED_RESULT
Definition: BLI_ghash.c:734
bool BLI_ghash_remove(GHash *gh, const void *key, GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp)
Definition: BLI_ghash.c:790
void BLI_ghash_insert(GHash *gh, void *key, void *val)
Definition: BLI_ghash.c:710
void BLI_ghash_free(GHash *gh, GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp)
Definition: BLI_ghash.c:863
BLI_INLINE bool BLI_listbase_is_empty(const struct ListBase *lb)
Definition: BLI_listbase.h:269
void BLI_addhead(struct ListBase *listbase, void *vlink) ATTR_NONNULL(1)
Definition: listbase.c:60
void BLI_addtail(struct ListBase *listbase, void *vlink) ATTR_NONNULL(1)
Definition: listbase.c:80
void BLI_remlink(struct ListBase *listbase, void *vlink) ATTR_NONNULL(1)
Definition: listbase.c:100
void BLI_memarena_free(struct MemArena *ma) ATTR_NONNULL(1)
Definition: BLI_memarena.c:94
struct MemArena * BLI_memarena_new(size_t bufsize, const char *name) ATTR_WARN_UNUSED_RESULT ATTR_RETURNS_NONNULL ATTR_NONNULL(2) ATTR_MALLOC
Definition: BLI_memarena.c:64
#define BLI_MEMARENA_STD_BUFSIZE
Definition: BLI_memarena.h:20
void BLI_memarena_use_calloc(struct MemArena *ma) ATTR_NONNULL(1)
Definition: BLI_memarena.c:76
void * BLI_memarena_alloc(struct MemArena *ma, size_t size) ATTR_WARN_UNUSED_RESULT ATTR_NONNULL(1) ATTR_MALLOC ATTR_ALLOC_SIZE(2)
Definition: BLI_memarena.c:116
void BLI_mutex_end(ThreadMutex *mutex)
Definition: threads.cc:388
void BLI_mutex_init(ThreadMutex *mutex)
Definition: threads.cc:368
void BLI_mutex_lock(ThreadMutex *mutex)
Definition: threads.cc:373
void BLI_mutex_unlock(ThreadMutex *mutex)
Definition: threads.cc:378
#define BLENDER_MAX_THREADS
Definition: BLI_threads.h:19
pthread_mutex_t ThreadMutex
Definition: BLI_threads.h:82
_GL_VOID GLfloat value _GL_VOID_RET _GL_VOID const GLuint GLboolean *residences _GL_BOOL_RET _GL_VOID GLsizei GLfloat GLfloat GLfloat GLfloat const GLubyte *bitmap _GL_VOID_RET _GL_VOID GLenum const void *lists _GL_VOID_RET _GL_VOID const GLdouble *equation _GL_VOID_RET _GL_VOID GLdouble GLdouble blue _GL_VOID_RET _GL_VOID GLfloat GLfloat blue _GL_VOID_RET _GL_VOID GLint GLint blue _GL_VOID_RET _GL_VOID GLshort GLshort blue _GL_VOID_RET _GL_VOID GLubyte GLubyte blue _GL_VOID_RET _GL_VOID GLuint GLuint blue _GL_VOID_RET _GL_VOID GLushort GLushort blue _GL_VOID_RET _GL_VOID GLbyte GLbyte GLbyte alpha _GL_VOID_RET _GL_VOID GLdouble GLdouble GLdouble alpha _GL_VOID_RET _GL_VOID GLfloat GLfloat GLfloat alpha _GL_VOID_RET _GL_VOID GLint GLint GLint alpha _GL_VOID_RET _GL_VOID GLshort GLshort GLshort alpha _GL_VOID_RET _GL_VOID GLubyte GLubyte GLubyte alpha _GL_VOID_RET _GL_VOID GLuint GLuint GLuint alpha _GL_VOID_RET _GL_VOID GLushort GLushort GLushort alpha _GL_VOID_RET _GL_VOID GLenum mode _GL_VOID_RET _GL_VOID GLint y
void imb_loadtile(struct ImBuf *ibuf, int tx, int ty, unsigned int *rect)
Definition: readimage.c:355
struct ImBuf * IMB_getmipmap(struct ImBuf *ibuf, int level)
Definition: filter.c:610
Contains defines and structs used throughout the imbuf module.
@ IB_rect
Read Guarded memory(de)allocation.
SIMD_FORCE_INLINE const btScalar & w() const
Return the w value.
Definition: btQuadWord.h:119
static unsigned int * imb_thread_cache_get_tile(ImThreadTileCache *cache, ImBuf *ibuf, int tx, int ty)
Definition: cache.c:366
struct ImGlobalTile ImGlobalTile
static ImGlobalTile * imb_global_cache_get_tile(ImBuf *ibuf, int tx, int ty, ImGlobalTile *replacetile)
Definition: cache.c:274
void imb_tile_cache_tile_free(ImBuf *ibuf, int tx, int ty)
Definition: cache.c:142
static bool imb_thread_tile_cmp(const void *a_p, const void *b_p)
Definition: cache.c:106
static unsigned int imb_thread_tile_hash(const void *ttile_p)
Definition: cache.c:99
unsigned int * IMB_gettile(ImBuf *ibuf, int tx, int ty, int thread)
Definition: cache.c:421
static unsigned int imb_global_tile_hash(const void *gtile_p)
Definition: cache.c:84
struct ImGlobalTileCache ImGlobalTileCache
struct ImThreadTile ImThreadTile
void IMB_tiles_to_rect(ImBuf *ibuf)
Definition: cache.c:426
void imb_tile_cache_exit(void)
Definition: cache.c:208
#define IB_THREAD_CACHE_SIZE
Definition: cache.c:34
struct ImThreadTileCache ImThreadTileCache
static void imb_thread_cache_init(ImThreadTileCache *cache)
Definition: cache.c:173
static void imb_global_cache_tile_unload(ImGlobalTile *gtile)
Definition: cache.c:131
void imb_tile_cache_init(void)
Definition: cache.c:195
static void imb_thread_cache_exit(ImThreadTileCache *cache)
Definition: cache.c:190
static ImGlobalTileCache GLOBAL_CACHE
Definition: cache.c:76
static void imb_global_cache_tile_load(ImGlobalTile *gtile)
Definition: cache.c:120
void IMB_tile_cache_params(int totthread, int maxmem)
Definition: cache.c:236
static bool imb_global_tile_cmp(const void *a_p, const void *b_p)
Definition: cache.c:91
Definition: thread.h:34
StackEntry * from
void(* MEM_freeN)(void *vmemh)
Definition: mallocn.c:27
void *(* MEM_callocN)(size_t len, const char *str)
Definition: mallocn.c:31
static unsigned a[3]
Definition: RandGen.cpp:78
static const pxr::TfToken b("b", pxr::TfToken::Immortal)
_W64 unsigned int uintptr_t
Definition: stdint.h:119
_W64 int intptr_t
Definition: stdint.h:118
unsigned int ** tiles
unsigned int * rect
ListBase tiles
Definition: cache.c:61
ImThreadTileCache thread_cache[BLENDER_MAX_THREADS+1]
Definition: cache.c:68
MemArena * memarena
Definition: cache.c:65
uintptr_t maxmem
Definition: cache.c:66
ThreadMutex mutex
Definition: cache.c:71
GHash * tilehash
Definition: cache.c:63
int initialized
Definition: cache.c:73
ListBase unused
Definition: cache.c:62
uintptr_t totmem
Definition: cache.c:66
int refcount
Definition: cache.c:41
int tx
Definition: cache.c:40
ImBuf * ibuf
Definition: cache.c:39
struct ImGlobalTile * prev
Definition: cache.c:37
volatile int loading
Definition: cache.c:42
int ty
Definition: cache.c:40
struct ImGlobalTile * next
Definition: cache.c:37
ListBase unused
Definition: cache.c:56
ListBase tiles
Definition: cache.c:55
GHash * tilehash
Definition: cache.c:57
struct ImThreadTile * prev
Definition: cache.c:46
int tx
Definition: cache.c:49
struct ImThreadTile * next
Definition: cache.c:46
ImGlobalTile * global
Definition: cache.c:51
ImBuf * ibuf
Definition: cache.c:48
int ty
Definition: cache.c:49
void * last
Definition: DNA_listBase.h:31
void * first
Definition: DNA_listBase.h:31