Blender  V2.93
MEM_CacheLimiter.h
Go to the documentation of this file.
1 /*
2  * This program is free software; you can redistribute it and/or
3  * modify it under the terms of the GNU General Public License
4  * as published by the Free Software Foundation; either version 2
5  * of the License, or (at your option) any later version.
6  *
7  * This program is distributed in the hope that it will be useful,
8  * but WITHOUT ANY WARRANTY; without even the implied warranty of
9  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10  * GNU General Public License for more details.
11  *
12  * You should have received a copy of the GNU General Public License
13  * along with this program; if not, write to the Free Software Foundation,
14  * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
15  */
16 
21 #ifndef __MEM_CACHELIMITER_H__
22 #define __MEM_CACHELIMITER_H__
23 
56 #include "MEM_Allocator.h"
57 #include <list>
58 #include <queue>
59 #include <vector>
60 
61 template<class T> class MEM_CacheLimiter;
62 
63 #ifndef __MEM_CACHELIMITERC_API_H__
64 extern "C" {
65 void MEM_CacheLimiter_set_maximum(size_t m);
67 void MEM_CacheLimiter_set_disabled(bool disabled);
69 };
70 #endif
71 
72 template<class T> class MEM_CacheLimiterHandle {
73  public:
75  : data(data_), refcount(0), parent(parent_)
76  {
77  }
78 
79  void ref()
80  {
81  refcount++;
82  }
83 
84  void unref()
85  {
86  refcount--;
87  }
88 
89  T *get()
90  {
91  return data;
92  }
93 
94  const T *get() const
95  {
96  return data;
97  }
98 
99  int get_refcount() const
100  {
101  return refcount;
102  }
103 
104  bool can_destroy() const
105  {
106  return !data || !refcount;
107  }
108 
110  {
111  if (can_destroy()) {
112  delete data;
113  data = NULL;
114  unmanage();
115  return true;
116  }
117  return false;
118  }
119 
120  void unmanage()
121  {
122  parent->unmanage(this);
123  }
124 
125  void touch()
126  {
127  parent->touch(this);
128  }
129 
130  private:
131  friend class MEM_CacheLimiter<T>;
132 
133  T *data;
134  int refcount;
135  int pos;
136  MEM_CacheLimiter<T> *parent;
137 };
138 
139 template<class T> class MEM_CacheLimiter {
140  public:
141  typedef size_t (*MEM_CacheLimiter_DataSize_Func)(void *data);
142  typedef int (*MEM_CacheLimiter_ItemPriority_Func)(void *item, int default_priority);
143  typedef bool (*MEM_CacheLimiter_ItemDestroyable_Func)(void *item);
144 
145  MEM_CacheLimiter(MEM_CacheLimiter_DataSize_Func data_size_func) : data_size_func(data_size_func)
146  {
147  }
148 
150  {
151  int i;
152  for (i = 0; i < queue.size(); i++) {
153  delete queue[i];
154  }
155  }
156 
158  {
159  queue.push_back(new MEM_CacheLimiterHandle<T>(elem, this));
160  queue.back()->pos = queue.size() - 1;
161  return queue.back();
162  }
163 
165  {
166  int pos = handle->pos;
167  queue[pos] = queue.back();
168  queue[pos]->pos = pos;
169  queue.pop_back();
170  delete handle;
171  }
172 
174  {
175  size_t size = 0;
176  if (data_size_func) {
177  int i;
178  for (i = 0; i < queue.size(); i++) {
179  size += data_size_func(queue[i]->get()->get_data());
180  }
181  }
182  else {
184  }
185  return size;
186  }
187 
189  {
192  size_t mem_in_use, cur_size;
193 
194  if (is_disabled) {
195  return;
196  }
197 
198  if (max == 0) {
199  return;
200  }
201 
203 
204  if (mem_in_use <= max) {
205  return;
206  }
207 
208  while (!queue.empty() && mem_in_use > max) {
209  MEM_CacheElementPtr elem = get_least_priority_destroyable_element();
210 
211  if (!elem)
212  break;
213 
214  if (data_size_func) {
215  cur_size = data_size_func(elem->get()->get_data());
216  }
217  else {
218  cur_size = mem_in_use;
219  }
220 
221  if (elem->destroy_if_possible()) {
222  if (data_size_func) {
223  mem_in_use -= cur_size;
224  }
225  else {
226  mem_in_use -= cur_size - MEM_get_memory_in_use();
227  }
228  }
229  }
230  }
231 
233  {
234  /* If we're using custom priority callback re-arranging the queue
235  * doesn't make much sense because we'll iterate it all to get
236  * least priority element anyway.
237  */
238  if (item_priority_func == NULL) {
239  queue[handle->pos] = queue.back();
240  queue[handle->pos]->pos = handle->pos;
241  queue.pop_back();
242  queue.push_back(handle);
243  handle->pos = queue.size() - 1;
244  }
245  }
246 
248  {
249  this->item_priority_func = item_priority_func;
250  }
251 
253  {
254  this->item_destroyable_func = item_destroyable_func;
255  }
256 
257  private:
258  typedef MEM_CacheLimiterHandle<T> *MEM_CacheElementPtr;
259  typedef std::vector<MEM_CacheElementPtr, MEM_Allocator<MEM_CacheElementPtr>> MEM_CacheQueue;
260  typedef typename MEM_CacheQueue::iterator iterator;
261 
262  /* Check whether element can be destroyed when enforcing cache limits */
263  bool can_destroy_element(MEM_CacheElementPtr &elem)
264  {
265  if (!elem->can_destroy()) {
266  /* Element is referenced */
267  return false;
268  }
269  if (item_destroyable_func) {
270  if (!item_destroyable_func(elem->get()->get_data()))
271  return false;
272  }
273  return true;
274  }
275 
276  MEM_CacheElementPtr get_least_priority_destroyable_element(void)
277  {
278  if (queue.empty())
279  return NULL;
280 
281  MEM_CacheElementPtr best_match_elem = NULL;
282 
283  if (!item_priority_func) {
284  for (iterator it = queue.begin(); it != queue.end(); it++) {
285  MEM_CacheElementPtr elem = *it;
286  if (!can_destroy_element(elem))
287  continue;
288  best_match_elem = elem;
289  break;
290  }
291  }
292  else {
293  int best_match_priority = 0;
294  int i;
295 
296  for (i = 0; i < queue.size(); i++) {
297  MEM_CacheElementPtr elem = queue[i];
298 
299  if (!can_destroy_element(elem))
300  continue;
301 
302  /* by default 0 means highest priority element */
303  /* casting a size type to int is questionable,
304  but unlikely to cause problems */
305  int priority = -((int)(queue.size()) - i - 1);
306  priority = item_priority_func(elem->get()->get_data(), priority);
307 
308  if (priority < best_match_priority || best_match_elem == NULL) {
309  best_match_priority = priority;
310  best_match_elem = elem;
311  }
312  }
313  }
314 
315  return best_match_elem;
316  }
317 
318  MEM_CacheQueue queue;
319  MEM_CacheLimiter_DataSize_Func data_size_func;
320  MEM_CacheLimiter_ItemPriority_Func item_priority_func;
321  MEM_CacheLimiter_ItemDestroyable_Func item_destroyable_func;
322 };
323 
324 #endif // __MEM_CACHELIMITER_H__
static bool is_disabled
void MEM_CacheLimiter_set_maximum(size_t m)
void MEM_CacheLimiter_set_disabled(bool disabled)
size_t MEM_CacheLimiter_get_maximum()
bool MEM_CacheLimiter_is_disabled(void)
static DBVT_INLINE btScalar size(const btDbvtVolume &a)
Definition: btDbvt.cpp:52
const T * get() const
MEM_CacheLimiterHandle(T *data_, MEM_CacheLimiter< T > *parent_)
void unmanage(MEM_CacheLimiterHandle< T > *handle)
int(* MEM_CacheLimiter_ItemPriority_Func)(void *item, int default_priority)
bool(* MEM_CacheLimiter_ItemDestroyable_Func)(void *item)
size_t(* MEM_CacheLimiter_DataSize_Func)(void *data)
size_t get_memory_in_use()
MEM_CacheLimiterHandle< T > * insert(T *elem)
MEM_CacheLimiter(MEM_CacheLimiter_DataSize_Func data_size_func)
void set_item_destroyable_func(MEM_CacheLimiter_ItemDestroyable_Func item_destroyable_func)
void touch(MEM_CacheLimiterHandle< T > *handle)
void set_item_priority_func(MEM_CacheLimiter_ItemPriority_Func item_priority_func)
T * data_
uint pos
size_t(* MEM_get_memory_in_use)(void)
Definition: mallocn.c:59
static size_t mem_in_use
#define T
float max