Blender  V2.93
draw_cache_impl_lattice.c
Go to the documentation of this file.
1 /*
2  * This program is free software; you can redistribute it and/or
3  * modify it under the terms of the GNU General Public License
4  * as published by the Free Software Foundation; either version 2
5  * of the License, or (at your option) any later version.
6  *
7  * This program is distributed in the hope that it will be useful,
8  * but WITHOUT ANY WARRANTY; without even the implied warranty of
9  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10  * GNU General Public License for more details.
11  *
12  * You should have received a copy of the GNU General Public License
13  * along with this program; if not, write to the Free Software Foundation,
14  * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
15  *
16  * The Original Code is Copyright (C) 2017 by Blender Foundation.
17  * All rights reserved.
18  */
19 
26 #include "MEM_guardedalloc.h"
27 
28 #include "BLI_math_vector.h"
29 #include "BLI_utildefines.h"
30 
31 #include "DNA_curve_types.h"
32 #include "DNA_lattice_types.h"
33 #include "DNA_meshdata_types.h"
34 #include "DNA_userdef_types.h"
35 
36 #include "BKE_colorband.h"
37 #include "BKE_deform.h"
38 #include "BKE_lattice.h"
39 
40 #include "GPU_batch.h"
41 
42 #include "draw_cache_impl.h" /* own include */
43 
44 #define SELECT 1
45 
52 static void lattice_batch_cache_clear(Lattice *lt);
53 
54 /* ---------------------------------------------------------------------- */
55 /* Lattice Interface, direct access to basic data. */
56 
57 static int vert_len_calc(int u, int v, int w)
58 {
59  if (u <= 0 || v <= 0 || w <= 0) {
60  return 0;
61  }
62  return u * v * w;
63 }
64 
65 static int edge_len_calc(int u, int v, int w)
66 {
67  if (u <= 0 || v <= 0 || w <= 0) {
68  return 0;
69  }
70  return (((((u - 1) * v) + ((v - 1) * u)) * w) + ((w - 1) * (u * v)));
71 }
72 
74 {
75  if (lt->editlatt) {
76  lt = lt->editlatt->latt;
77  }
78 
79  const int u = lt->pntsu;
80  const int v = lt->pntsv;
81  const int w = lt->pntsw;
82 
83  if ((lt->flag & LT_OUTSIDE) == 0) {
84  return vert_len_calc(u, v, w);
85  }
86 
87  /* TODO remove internal coords */
88  return vert_len_calc(u, v, w);
89 }
90 
92 {
93  if (lt->editlatt) {
94  lt = lt->editlatt->latt;
95  }
96 
97  const int u = lt->pntsu;
98  const int v = lt->pntsv;
99  const int w = lt->pntsw;
100 
101  if ((lt->flag & LT_OUTSIDE) == 0) {
102  return edge_len_calc(u, v, w);
103  }
104 
105  /* TODO remove internal coords */
106  return edge_len_calc(u, v, w);
107 }
108 
109 /* ---------------------------------------------------------------------- */
110 /* Lattice Interface, indirect, partially cached access to complex data. */
111 
112 typedef struct LatticeRenderData {
113  int types;
114 
115  int vert_len;
116  int edge_len;
117 
118  struct {
120  } dims;
122 
125 
126  int actbp;
127 
128  const struct MDeformVert *dvert;
130 
131 enum {
135 };
136 
138 {
139  LatticeRenderData *rdata = MEM_callocN(sizeof(*rdata), __func__);
140  rdata->types = types;
141 
142  if (lt->editlatt) {
143  EditLatt *editlatt = lt->editlatt;
144  lt = editlatt->latt;
145 
146  rdata->edit_latt = editlatt;
147 
148  rdata->dvert = lt->dvert;
149 
150  if (types & (LR_DATATYPE_VERT)) {
152  }
153  if (types & (LR_DATATYPE_EDGE)) {
155  }
156  if (types & LR_DATATYPE_OVERLAY) {
157  rdata->actbp = lt->actbp;
158  }
159  }
160  else {
161  rdata->dvert = NULL;
162 
163  if (types & (LR_DATATYPE_VERT)) {
165  }
166  if (types & (LR_DATATYPE_EDGE)) {
168  /*no edge data */
169  }
170  }
171 
172  rdata->bp = lt->def;
173 
174  rdata->dims.u_len = lt->pntsu;
175  rdata->dims.v_len = lt->pntsv;
176  rdata->dims.w_len = lt->pntsw;
177 
178  rdata->show_only_outside = (lt->flag & LT_OUTSIDE) != 0;
179  rdata->actbp = lt->actbp;
180 
181  return rdata;
182 }
183 
185 {
186 #if 0
187  if (rdata->loose_verts) {
188  MEM_freeN(rdata->loose_verts);
189  }
190 #endif
191  MEM_freeN(rdata);
192 }
193 
195 {
197  return rdata->vert_len;
198 }
199 
201 {
203  return rdata->edge_len;
204 }
205 
207  const int vert_idx)
208 {
210  return &rdata->bp[vert_idx];
211 }
212 
213 /* ---------------------------------------------------------------------- */
214 /* Lattice GPUBatch Cache */
215 
216 typedef struct LatticeBatchCache {
219 
222 
224 
225  /* settings to determine if cache is invalid */
226  bool is_dirty;
227 
228  struct {
230  } dims;
232 
235 
236 /* GPUBatch cache management. */
237 
239 {
240  LatticeBatchCache *cache = lt->batch_cache;
241 
242  if (cache == NULL) {
243  return false;
244  }
245 
246  if (cache->is_editmode != (lt->editlatt != NULL)) {
247  return false;
248  }
249 
250  if (cache->is_dirty) {
251  return false;
252  }
253 
254  if ((cache->dims.u_len != lt->pntsu) || (cache->dims.v_len != lt->pntsv) ||
255  (cache->dims.w_len != lt->pntsw) ||
256  ((cache->show_only_outside != ((lt->flag & LT_OUTSIDE) != 0)))) {
257  return false;
258  }
259 
260  return true;
261 }
262 
264 {
265  LatticeBatchCache *cache = lt->batch_cache;
266 
267  if (!cache) {
268  cache = lt->batch_cache = MEM_callocN(sizeof(*cache), __func__);
269  }
270  else {
271  memset(cache, 0, sizeof(*cache));
272  }
273 
274  cache->dims.u_len = lt->pntsu;
275  cache->dims.v_len = lt->pntsv;
276  cache->dims.w_len = lt->pntsw;
277  cache->show_only_outside = (lt->flag & LT_OUTSIDE) != 0;
278 
279  cache->is_editmode = lt->editlatt != NULL;
280 
281  cache->is_dirty = false;
282 }
283 
285 {
286  if (!lattice_batch_cache_valid(lt)) {
289  }
290 }
291 
293 {
294  return lt->batch_cache;
295 }
296 
298 {
299  LatticeBatchCache *cache = lt->batch_cache;
300  if (cache == NULL) {
301  return;
302  }
303  switch (mode) {
305  cache->is_dirty = true;
306  break;
308  /* TODO Separate Flag vbo */
310  break;
311  default:
312  BLI_assert(0);
313  }
314 }
315 
317 {
318  LatticeBatchCache *cache = lt->batch_cache;
319  if (!cache) {
320  return;
321  }
322 
326 
329 }
330 
332 {
335 }
336 
337 /* GPUBatch cache usage. */
339  LatticeBatchCache *cache,
340  bool use_weight,
341  const int actdef)
342 {
344 
345  if (cache->pos == NULL) {
346  GPUVertFormat format = {0};
347  struct {
348  uint pos, col;
349  } attr_id;
350 
352  if (use_weight) {
354  }
355 
356  const int vert_len = lattice_render_data_verts_len_get(rdata);
357 
359  GPU_vertbuf_data_alloc(cache->pos, vert_len);
360  for (int i = 0; i < vert_len; i++) {
361  const BPoint *bp = lattice_render_data_vert_bpoint(rdata, i);
362  GPU_vertbuf_attr_set(cache->pos, attr_id.pos, i, bp->vec);
363 
364  if (use_weight) {
365  const float no_active_weight = 666.0f;
366  float weight = (actdef > -1) ? BKE_defvert_find_weight(rdata->dvert + i, actdef) :
367  no_active_weight;
368  GPU_vertbuf_attr_set(cache->pos, attr_id.col, i, &weight);
369  }
370  }
371  }
372 
373  return cache->pos;
374 }
375 
377  LatticeBatchCache *cache)
378 {
380 
381  if (cache->edges == NULL) {
382  const int vert_len = lattice_render_data_verts_len_get(rdata);
383  const int edge_len = lattice_render_data_edges_len_get(rdata);
384  int edge_len_real = 0;
385 
386  GPUIndexBufBuilder elb;
387  GPU_indexbuf_init(&elb, GPU_PRIM_LINES, edge_len, vert_len);
388 
389 #define LATT_INDEX(u, v, w) ((((w)*rdata->dims.v_len + (v)) * rdata->dims.u_len) + (u))
390 
391  for (int w = 0; w < rdata->dims.w_len; w++) {
392  int wxt = (ELEM(w, 0, rdata->dims.w_len - 1));
393  for (int v = 0; v < rdata->dims.v_len; v++) {
394  int vxt = (ELEM(v, 0, rdata->dims.v_len - 1));
395  for (int u = 0; u < rdata->dims.u_len; u++) {
396  int uxt = (ELEM(u, 0, rdata->dims.u_len - 1));
397 
398  if (w && ((uxt || vxt) || !rdata->show_only_outside)) {
399  GPU_indexbuf_add_line_verts(&elb, LATT_INDEX(u, v, w - 1), LATT_INDEX(u, v, w));
400  BLI_assert(edge_len_real <= edge_len);
401  edge_len_real++;
402  }
403  if (v && ((uxt || wxt) || !rdata->show_only_outside)) {
404  GPU_indexbuf_add_line_verts(&elb, LATT_INDEX(u, v - 1, w), LATT_INDEX(u, v, w));
405  BLI_assert(edge_len_real <= edge_len);
406  edge_len_real++;
407  }
408  if (u && ((vxt || wxt) || !rdata->show_only_outside)) {
409  GPU_indexbuf_add_line_verts(&elb, LATT_INDEX(u - 1, v, w), LATT_INDEX(u, v, w));
410  BLI_assert(edge_len_real <= edge_len);
411  edge_len_real++;
412  }
413  }
414  }
415  }
416 
417 #undef LATT_INDEX
418 
419  if (rdata->show_only_outside) {
420  BLI_assert(edge_len_real <= edge_len);
421  }
422  else {
423  BLI_assert(edge_len_real == edge_len);
424  }
425 
426  cache->edges = GPU_indexbuf_build(&elb);
427  }
428 
429  return cache->edges;
430 }
431 
433 {
434  /* Since LR_DATATYPE_OVERLAY is slow to generate, generate them all at once */
436 
439 
440  if (cache->overlay_verts == NULL) {
441  static GPUVertFormat format = {0};
442  static struct {
443  uint pos, data;
444  } attr_id;
445  if (format.attr_len == 0) {
446  /* initialize vertex format */
449  }
450 
451  const int vert_len = lattice_render_data_verts_len_get(rdata);
452 
454  GPU_vertbuf_data_alloc(vbo, vert_len);
455  for (int i = 0; i < vert_len; i++) {
456  const BPoint *bp = lattice_render_data_vert_bpoint(rdata, i);
457 
458  char vflag = 0;
459  if (bp->f1 & SELECT) {
460  if (i == rdata->actbp) {
461  vflag |= VFLAG_VERT_ACTIVE;
462  }
463  else {
464  vflag |= VFLAG_VERT_SELECTED;
465  }
466  }
467 
468  GPU_vertbuf_attr_set(vbo, attr_id.pos, i, bp->vec);
469  GPU_vertbuf_attr_set(vbo, attr_id.data, i, &vflag);
470  }
471 
473  }
474 
476 }
477 
478 GPUBatch *DRW_lattice_batch_cache_get_all_edges(Lattice *lt, bool use_weight, const int actdef)
479 {
481 
482  if (cache->all_edges == NULL) {
483  /* create batch from Lattice */
485 
486  cache->all_edges = GPU_batch_create(
488  lattice_batch_cache_get_pos(rdata, cache, use_weight, actdef),
489  lattice_batch_cache_get_edges(rdata, cache));
490 
492  }
493 
494  return cache->all_edges;
495 }
496 
498 {
500 
501  if (cache->all_verts == NULL) {
503 
504  cache->all_verts = GPU_batch_create(
505  GPU_PRIM_POINTS, lattice_batch_cache_get_pos(rdata, cache, false, -1), NULL);
506 
508  }
509 
510  return cache->all_verts;
511 }
512 
514 {
516 
517  if (cache->overlay_verts == NULL) {
519  }
520 
521  return cache->overlay_verts;
522 }
support for deformation groups and hooks.
float BKE_defvert_find_weight(const struct MDeformVert *dvert, const int defgroup)
Definition: deform.c:632
@ BKE_LATTICE_BATCH_DIRTY_SELECT
Definition: BKE_lattice.h:91
@ BKE_LATTICE_BATCH_DIRTY_ALL
Definition: BKE_lattice.h:90
#define BLI_assert(a)
Definition: BLI_assert.h:58
unsigned int uint
Definition: BLI_sys_types.h:83
#define ELEM(...)
#define LT_OUTSIDE
GPUBatch
Definition: GPU_batch.h:93
#define GPU_batch_create(prim, verts, elem)
Definition: GPU_batch.h:107
#define GPU_BATCH_DISCARD_SAFE(batch)
Definition: GPU_batch.h:199
GPUBatch * GPU_batch_create_ex(GPUPrimType prim, GPUVertBuf *vert, GPUIndexBuf *elem, eGPUBatchFlag owns_flag)
Definition: gpu_batch.cc:60
@ GPU_BATCH_OWNS_VBO
Definition: GPU_batch.h:45
struct GPUIndexBuf GPUIndexBuf
#define GPU_INDEXBUF_DISCARD_SAFE(elem)
void GPU_indexbuf_init(GPUIndexBufBuilder *, GPUPrimType, uint prim_len, uint vertex_len)
GPUIndexBuf * GPU_indexbuf_build(GPUIndexBufBuilder *)
void GPU_indexbuf_add_line_verts(GPUIndexBufBuilder *, uint v1, uint v2)
@ GPU_PRIM_LINES
Definition: GPU_primitive.h:36
@ GPU_PRIM_POINTS
Definition: GPU_primitive.h:35
#define GPU_vertbuf_create_with_format(format)
struct GPUVertBuf GPUVertBuf
void GPU_vertbuf_data_alloc(GPUVertBuf *, uint v_len)
#define GPU_VERTBUF_DISCARD_SAFE(verts)
void GPU_vertbuf_attr_set(GPUVertBuf *, uint a_idx, uint v_idx, const void *data)
@ GPU_FETCH_FLOAT
@ GPU_FETCH_INT
uint GPU_vertformat_attr_add(GPUVertFormat *, const char *name, GPUVertCompType, uint comp_len, GPUVertFetchMode)
@ GPU_COMP_F32
@ GPU_COMP_U8
Read Guarded memory(de)allocation.
#define MEM_SAFE_FREE(v)
ATTR_WARN_UNUSED_RESULT const BMVert * v
SIMD_FORCE_INLINE const btScalar & w() const
Return the w value.
Definition: btQuadWord.h:119
CCL_NAMESPACE_BEGIN struct Options options
@ VFLAG_VERT_SELECTED
@ VFLAG_VERT_ACTIVE
@ LR_DATATYPE_EDGE
@ LR_DATATYPE_OVERLAY
@ LR_DATATYPE_VERT
void DRW_lattice_batch_cache_free(Lattice *lt)
static void lattice_render_data_free(LatticeRenderData *rdata)
static int edge_len_calc(int u, int v, int w)
GPUBatch * DRW_lattice_batch_cache_get_edit_verts(Lattice *lt)
static GPUIndexBuf * lattice_batch_cache_get_edges(LatticeRenderData *rdata, LatticeBatchCache *cache)
#define SELECT
static int lattice_render_verts_len_get(Lattice *lt)
static const BPoint * lattice_render_data_vert_bpoint(const LatticeRenderData *rdata, const int vert_idx)
static int lattice_render_edges_len_get(Lattice *lt)
#define LATT_INDEX(u, v, w)
static int vert_len_calc(int u, int v, int w)
GPUBatch * DRW_lattice_batch_cache_get_all_verts(Lattice *lt)
GPUBatch * DRW_lattice_batch_cache_get_all_edges(Lattice *lt, bool use_weight, const int actdef)
static void lattice_batch_cache_init(Lattice *lt)
static int lattice_render_data_edges_len_get(const LatticeRenderData *rdata)
void DRW_lattice_batch_cache_dirty_tag(Lattice *lt, int mode)
static int lattice_render_data_verts_len_get(const LatticeRenderData *rdata)
static GPUVertBuf * lattice_batch_cache_get_pos(LatticeRenderData *rdata, LatticeBatchCache *cache, bool use_weight, const int actdef)
static LatticeRenderData * lattice_render_data_create(Lattice *lt, const int types)
struct LatticeRenderData LatticeRenderData
static LatticeBatchCache * lattice_batch_cache_get(Lattice *lt)
static void lattice_batch_cache_create_overlay_batches(Lattice *lt)
void DRW_lattice_batch_cache_validate(Lattice *lt)
struct LatticeBatchCache LatticeBatchCache
static bool lattice_batch_cache_valid(Lattice *lt)
static void lattice_batch_cache_clear(Lattice *lt)
uint pos
struct @612::@615 attr_id
uint col
format
Definition: logImageCore.h:47
static char ** types
Definition: makesdna.c:164
void(* MEM_freeN)(void *vmemh)
Definition: mallocn.c:41
void *(* MEM_callocN)(size_t len, const char *str)
Definition: mallocn.c:45
uint8_t f1
float vec[4]
struct Lattice * latt
struct LatticeBatchCache::@277 dims
const struct MDeformVert * dvert
struct EditLatt * edit_latt
struct LatticeRenderData::@276 dims
void * batch_cache
struct MDeformVert * dvert
struct EditLatt * editlatt
struct BPoint * def