Blender  V2.93
node_shader_common.c
Go to the documentation of this file.
1 /*
2  * This program is free software; you can redistribute it and/or
3  * modify it under the terms of the GNU General Public License
4  * as published by the Free Software Foundation; either version 2
5  * of the License, or (at your option) any later version.
6  *
7  * This program is distributed in the hope that it will be useful,
8  * but WITHOUT ANY WARRANTY; without even the implied warranty of
9  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10  * GNU General Public License for more details.
11  *
12  * You should have received a copy of the GNU General Public License
13  * along with this program; if not, write to the Free Software Foundation,
14  * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
15  *
16  * The Original Code is Copyright (C) 2006 Blender Foundation.
17  * All rights reserved.
18  * Juho Vepsäläinen
19  */
20 
25 #include "DNA_node_types.h"
26 
27 #include "BLI_utildefines.h"
28 
29 #include "BKE_node.h"
30 
31 #include "NOD_common.h"
32 #include "node_common.h"
33 #include "node_exec.h"
34 #include "node_shader_util.h"
35 
36 #include "RNA_access.h"
37 
39 {
40  if (to != from) {
41  copy_v4_v4(to->vec, from->vec);
42  to->data = from->data;
43  to->datatype = from->datatype;
44 
45  /* tag as copy to prevent freeing */
46  to->is_copy = 1;
47  }
48 }
49 
51 {
52  if (to != from) {
53  copy_v4_v4(to->vec, from->vec);
54  to->data = from->data;
55  to->datatype = from->datatype;
56  to->is_copy = from->is_copy;
57 
58  from->data = NULL;
59  from->is_copy = 0;
60  }
61 }
62 
63 /**** GROUP ****/
64 
66 {
67  bNodeTree *ngroup = (bNodeTree *)node->id;
69 
70  if (!ngroup) {
71  return NULL;
72  }
73 
74  /* initialize the internal node tree execution */
76 
77  return exec;
78 }
79 
80 static void group_freeexec(void *nodedata)
81 {
82  bNodeTreeExec *gexec = (bNodeTreeExec *)nodedata;
83 
84  if (gexec) {
86  }
87 }
88 
89 /* Copy inputs to the internal stack.
90  */
91 static void group_copy_inputs(bNode *gnode, bNodeStack **in, bNodeStack *gstack)
92 {
93  bNodeTree *ngroup = (bNodeTree *)gnode->id;
94  bNode *node;
95  bNodeSocket *sock;
96  bNodeStack *ns;
97  int a;
98 
99  for (node = ngroup->nodes.first; node; node = node->next) {
100  if (node->type == NODE_GROUP_INPUT) {
101  for (sock = node->outputs.first, a = 0; sock; sock = sock->next, a++) {
102  ns = node_get_socket_stack(gstack, sock);
103  if (ns) {
104  copy_stack(ns, in[a]);
105  }
106  }
107  }
108  }
109 }
110 
111 /* Copy internal results to the external outputs.
112  */
113 static void group_move_outputs(bNode *gnode, bNodeStack **out, bNodeStack *gstack)
114 {
115  bNodeTree *ngroup = (bNodeTree *)gnode->id;
116  bNode *node;
117  bNodeSocket *sock;
118  bNodeStack *ns;
119  int a;
120 
121  for (node = ngroup->nodes.first; node; node = node->next) {
122  if (node->type == NODE_GROUP_OUTPUT && (node->flag & NODE_DO_OUTPUT)) {
123  for (sock = node->inputs.first, a = 0; sock; sock = sock->next, a++) {
124  ns = node_get_socket_stack(gstack, sock);
125  if (ns) {
126  move_stack(out[a], ns);
127  }
128  }
129  break; /* only one active output node */
130  }
131  }
132 }
133 
134 static void group_execute(void *data,
135  int thread,
136  struct bNode *node,
137  bNodeExecData *execdata,
138  struct bNodeStack **in,
139  struct bNodeStack **out)
140 {
141  bNodeTreeExec *exec = execdata->data;
142  bNodeThreadStack *nts;
143 
144  if (!exec) {
145  return;
146  }
147 
148  /* XXX same behavior as trunk: all nodes inside group are executed.
149  * it's stupid, but just makes it work. compo redesign will do this better.
150  */
151  {
152  bNode *inode;
153  for (inode = exec->nodetree->nodes.first; inode; inode = inode->next) {
154  inode->need_exec = 1;
155  }
156  }
157 
159 
160  group_copy_inputs(node, in, nts->stack);
162  group_move_outputs(node, out, nts->stack);
163 
165 }
166 
167 static void group_gpu_copy_inputs(bNode *gnode, GPUNodeStack *in, bNodeStack *gstack)
168 {
169  bNodeTree *ngroup = (bNodeTree *)gnode->id;
170  bNode *node;
171  bNodeSocket *sock;
172  bNodeStack *ns;
173  int a;
174 
175  for (node = ngroup->nodes.first; node; node = node->next) {
176  if (node->type == NODE_GROUP_INPUT) {
177  for (sock = node->outputs.first, a = 0; sock; sock = sock->next, a++) {
178  ns = node_get_socket_stack(gstack, sock);
179  if (ns) {
180  /* convert the external gpu stack back to internal node stack data */
181  node_data_from_gpu_stack(ns, &in[a]);
182  }
183  }
184  }
185  }
186 }
187 
188 /* Copy internal results to the external outputs.
189  */
190 static void group_gpu_move_outputs(bNode *gnode, GPUNodeStack *out, bNodeStack *gstack)
191 {
192  bNodeTree *ngroup = (bNodeTree *)gnode->id;
193  bNode *node;
194  bNodeSocket *sock;
195  bNodeStack *ns;
196  int a;
197 
198  for (node = ngroup->nodes.first; node; node = node->next) {
199  if (node->type == NODE_GROUP_OUTPUT && (node->flag & NODE_DO_OUTPUT)) {
200  for (sock = node->inputs.first, a = 0; sock; sock = sock->next, a++) {
201  ns = node_get_socket_stack(gstack, sock);
202  if (ns) {
203  /* convert the node stack data result back to gpu stack */
204  node_gpu_stack_from_data(&out[a], sock->type, ns);
205  }
206  }
207  break; /* only one active output node */
208  }
209  }
210 }
211 
212 static int gpu_group_execute(
213  GPUMaterial *mat, bNode *node, bNodeExecData *execdata, GPUNodeStack *in, GPUNodeStack *out)
214 {
215  bNodeTreeExec *exec = execdata->data;
216 
217  if (!node->id) {
218  return 0;
219  }
220 
221  group_gpu_copy_inputs(node, in, exec->stack);
222  ntreeExecGPUNodes(exec, mat, NULL);
223  group_gpu_move_outputs(node, out, exec->stack);
224 
225  return 1;
226 }
227 
229 {
230  static bNodeType ntype;
231 
232  /* NB: cannot use sh_node_type_base for node group, because it would map the node type
233  * to the shared NODE_GROUP integer type id.
234  */
235  node_type_base_custom(&ntype, "ShaderNodeGroup", "Group", NODE_CLASS_GROUP, NODE_CONST_OUTPUT);
236  ntype.type = NODE_GROUP;
237  ntype.poll = sh_node_poll_default;
241  ntype.rna_ext.srna = RNA_struct_find("ShaderNodeGroup");
242  BLI_assert(ntype.rna_ext.srna != NULL);
244 
246  node_type_size(&ntype, 140, 60, 400);
251 
252  nodeRegisterType(&ntype);
253 }
254 
256 {
257  /* These methods can be overridden but need a default implementation otherwise. */
258  if (ntype->poll == NULL) {
259  ntype->poll = sh_node_poll_default;
260  }
261  if (ntype->insert_link == NULL) {
263  }
264  if (ntype->update_internal_links == NULL) {
266  }
267 
270 }
void node_type_gpu(struct bNodeType *ntype, NodeGPUExecFunction gpu_fn)
Definition: node.cc:4645
void node_type_size(struct bNodeType *ntype, int width, int minwidth, int maxwidth)
Definition: node.cc:4565
void node_type_socket_templates(struct bNodeType *ntype, struct bNodeSocketTemplate *inputs, struct bNodeSocketTemplate *outputs)
Definition: node.cc:4527
void node_type_base_custom(struct bNodeType *ntype, const char *idname, const char *name, short nclass, short flag)
Definition: node.cc:4478
void node_type_group_update(struct bNodeType *ntype, void(*group_update_func)(struct bNodeTree *ntree, struct bNode *node))
Definition: node.cc:4629
#define NODE_CLASS_GROUP
Definition: BKE_node.h:339
#define NODE_GROUP_INPUT
Definition: BKE_node.h:874
void node_type_exec(struct bNodeType *ntype, NodeInitExecFunction init_exec_fn, NodeFreeExecFunction free_exec_fn, NodeExecFunction exec_fn)
Definition: node.cc:4635
void nodeRegisterType(struct bNodeType *ntype)
Definition: node.cc:1298
void node_type_label(struct bNodeType *ntype, void(*labelfunc)(struct bNodeTree *ntree, struct bNode *, char *label, int maxlen))
#define BLI_assert(a)
Definition: BLI_assert.h:58
MINLINE void copy_v4_v4(float r[4], const float a[4])
#define NODE_DO_OUTPUT
#define NODE_CONST_OUTPUT
NODE_GROUP_OUTPUT
NODE_GROUP
OperationNode * node
StackEntry * from
static unsigned a[3]
Definition: RandGen.cpp:92
void node_group_update(struct bNodeTree *ntree, struct bNode *node)
Definition: node_common.c:187
bool node_group_poll_instance(bNode *node, bNodeTree *nodetree, const char **disabled_hint)
Definition: node_common.c:82
void node_group_label(bNodeTree *UNUSED(ntree), bNode *node, char *label, int maxlen)
Definition: node_common.c:77
bNodeThreadStack * ntreeGetThreadStack(bNodeTreeExec *exec, int thread)
Definition: node_exec.cc:283
bool ntreeExecThreadNodes(bNodeTreeExec *exec, bNodeThreadStack *nts, void *callerdata, int thread)
Definition: node_exec.cc:310
void ntreeReleaseThreadStack(bNodeThreadStack *nts)
Definition: node_exec.cc:305
bNodeStack * node_get_socket_stack(bNodeStack *stack, bNodeSocket *sock)
Definition: node_exec.cc:45
struct bNodeTreeExec * ntreeShaderBeginExecTree_internal(struct bNodeExecContext *context, struct bNodeTree *ntree, bNodeInstanceKey parent_key)
void ntreeShaderEndExecTree_internal(struct bNodeTreeExec *exec)
static int gpu_group_execute(GPUMaterial *mat, bNode *node, bNodeExecData *execdata, GPUNodeStack *in, GPUNodeStack *out)
void register_node_type_sh_group(void)
static void group_execute(void *data, int thread, struct bNode *node, bNodeExecData *execdata, struct bNodeStack **in, struct bNodeStack **out)
static void * group_initexec(bNodeExecContext *context, bNode *node, bNodeInstanceKey key)
static void move_stack(bNodeStack *to, bNodeStack *from)
static void group_move_outputs(bNode *gnode, bNodeStack **out, bNodeStack *gstack)
static void group_copy_inputs(bNode *gnode, bNodeStack **in, bNodeStack *gstack)
static void group_freeexec(void *nodedata)
static void copy_stack(bNodeStack *to, bNodeStack *from)
static void group_gpu_copy_inputs(bNode *gnode, GPUNodeStack *in, bNodeStack *gstack)
static void group_gpu_move_outputs(bNode *gnode, GPUNodeStack *out, bNodeStack *gstack)
void register_node_type_sh_custom_group(bNodeType *ntype)
bool sh_node_poll_default(bNodeType *UNUSED(ntype), bNodeTree *ntree, const char **r_disabled_hint)
void node_data_from_gpu_stack(bNodeStack *ns, GPUNodeStack *gs)
void ntreeExecGPUNodes(bNodeTreeExec *exec, GPUMaterial *mat, bNode *output_node)
void node_gpu_stack_from_data(struct GPUNodeStack *gs, int type, bNodeStack *ns)
static void exec(void *data, int UNUSED(thread), bNode *node, bNodeExecData *execdata, bNodeStack **in, bNodeStack **out)
void node_insert_link_default(bNodeTree *ntree, bNode *node, bNodeLink *link)
Definition: node_util.c:305
void node_update_internal_links_default(bNodeTree *ntree, bNode *node)
Definition: node_util.c:503
void RNA_struct_blender_type_set(StructRNA *srna, void *blender_type)
Definition: rna_access.c:1044
StructRNA * RNA_struct_find(const char *identifier)
Definition: rna_access.c:718
struct SELECTID_Context context
Definition: select_engine.c:47
StructRNA * srna
Definition: RNA_types.h:681
eGPUType type
Definition: GPU_material.h:117
void * first
Definition: DNA_listBase.h:47
void * data
Definition: node_util.h:53
float vec[4]
short datatype
struct bNodeStack * stack
Definition: node_exec.h:70
ListBase nodes
Defines a node type.
Definition: BKE_node.h:221
bool(* poll)(struct bNodeType *ntype, struct bNodeTree *nodetree, const char **r_disabled_hint)
Definition: BKE_node.h:301
void(* update_internal_links)(struct bNodeTree *, struct bNode *node)
Definition: BKE_node.h:312
ExtensionRNA rna_ext
Definition: BKE_node.h:330
int type
Definition: BKE_node.h:225
void(* insert_link)(struct bNodeTree *ntree, struct bNode *node, struct bNodeLink *link)
Definition: BKE_node.h:310
bool(* poll_instance)(struct bNode *node, struct bNodeTree *nodetree, const char **r_disabled_hint)
Definition: BKE_node.h:305
struct ID * id
struct bNode * next
uint8_t need_exec