307 bool needs_relations_update_ =
false;
311 : bmain_(bmain), params_(
params), relations_(bmain)
320 changed_ntrees.
append(ntree);
333 bool is_single_tree_update =
false;
335 if (root_ntrees.
size() == 1) {
341 update_result_by_tree_.add_new(ntree,
result);
342 if (!
result.interface_changed && !
result.output_changed) {
343 is_single_tree_update =
true;
347 if (!is_single_tree_update) {
349 for (
bNodeTree *ntree : ntrees_in_order) {
353 if (!update_result_by_tree_.contains(ntree)) {
355 update_result_by_tree_.add_new(ntree,
result);
359 if (
result.output_changed) {
364 if (
result.interface_changed) {
372 for (
const auto item : update_result_by_tree_.items()) {
376 this->reset_changed_flags(*ntree);
378 if (
result.interface_changed) {
380 relations_.ensure_modifier_users();
382 Object *
object = pair.first;
392 if (
result.output_changed) {
393 ntree->
runtime->geometry_nodes_lazy_function_graph_info.reset();
397 ID &owner_or_self_id = owner_id ? *owner_id : ntree->
id;
398 if (params_.tree_changed_fn) {
399 params_.tree_changed_fn(*ntree, owner_or_self_id);
401 if (params_.tree_output_changed_fn &&
result.output_changed) {
402 params_.tree_output_changed_fn(*ntree, owner_or_self_id);
406 if (needs_relations_update_) {
414 enum class ToposortMark {
434 ToposortMarkMap marks;
435 for (
bNodeTree *ntree : trees_to_update) {
436 marks.add_new(ntree, ToposortMark::None);
438 for (
bNodeTree *ntree : trees_to_update) {
439 if (marks.lookup(ntree) == ToposortMark::None) {
440 const bool cycle_detected = !this->get_tree_update_order__visit_recursive(
441 ntree, marks, sorted_ntrees);
448 std::reverse(sorted_ntrees.
begin(), sorted_ntrees.
end());
450 return sorted_ntrees;
453 bool get_tree_update_order__visit_recursive(
bNodeTree *ntree,
454 ToposortMarkMap &marks,
455 Vector<bNodeTree *> &sorted_ntrees)
457 ToposortMark &mark = marks.lookup(ntree);
458 if (mark == ToposortMark::Permanent) {
461 if (mark == ToposortMark::Temporary) {
466 mark = ToposortMark::Temporary;
468 for (
const TreeNodePair &pair : relations_.get_group_node_users(ntree)) {
469 this->get_tree_update_order__visit_recursive(pair.first, marks, sorted_ntrees);
471 sorted_ntrees.
append(ntree);
473 mark = ToposortMark::Permanent;
477 Set<bNodeTree *> get_trees_to_update(Span<bNodeTree *> root_ntrees)
479 relations_.ensure_group_node_users();
481 Set<bNodeTree *> reachable_trees;
482 VectorSet<bNodeTree *> trees_to_check = root_ntrees;
484 while (!trees_to_check.
is_empty()) {
486 if (reachable_trees.
add(ntree)) {
487 for (
const TreeNodePair &pair : relations_.get_group_node_users(ntree)) {
488 trees_to_check.
add(pair.first);
493 return reachable_trees;
496 TreeUpdateResult update_tree(
bNodeTree &ntree)
500 ntree.
runtime->link_errors.clear();
501 ntree.
runtime->invalid_zone_output_node_ids.clear();
503 if (this->update_panel_toggle_names(ntree)) {
504 result.interface_changed =
true;
507 this->update_socket_link_and_use(ntree);
508 this->update_individual_nodes(ntree);
509 this->update_internal_links(ntree);
510 this->update_generic_callback(ntree);
511 this->remove_unused_previews_when_necessary(ntree);
512 this->make_node_previews_dirty(ntree);
514 this->propagate_runtime_flags(ntree);
516 if (this->propagate_enum_definitions(ntree)) {
517 result.interface_changed =
true;
520 result.interface_changed =
true;
523 result.interface_changed =
true;
525 this->update_from_field_inference(ntree);
527 result.interface_changed =
true;
530 result.interface_changed =
true;
532 this->update_socket_shapes(ntree);
533 this->update_eval_dependencies(ntree);
536 result.output_changed = this->check_if_output_changed(ntree);
538 this->update_socket_link_and_use(ntree);
539 this->update_link_validation(ntree);
541 if (this->update_nested_node_refs(ntree)) {
542 result.interface_changed =
true;
550 result.interface_changed =
true;
555 Set<int32_t> node_identifiers;
556 const Span<const bNode *> nodes = ntree.all_nodes();
558 const bNode &node = *nodes[
i];
570 tree.ensure_topology_cache();
572 if (socket->directly_linked_links().is_empty()) {
573 socket->link =
nullptr;
576 socket->link = socket->directly_linked_links()[0];
580 this->update_socket_used_tags(
tree);
585 tree.ensure_topology_cache();
587 const bool socket_is_linked = !socket->directly_linked_links().is_empty();
592 void update_individual_nodes(
bNodeTree &ntree)
594 for (
bNode *node : ntree.all_nodes()) {
596 if (this->should_update_individual_node(ntree, *node)) {
597 bke::bNodeType &ntype = *node->typeinfo;
605 else if (node->is_undefined()) {
608 delete node->runtime->declaration;
609 node->runtime->declaration =
nullptr;
611 socket->runtime->declaration =
nullptr;
614 socket->runtime->declaration =
nullptr;
624 bool should_update_individual_node(
const bNodeTree &ntree,
const bNode &node)
637 if (node.is_group_input() || node.is_group_output()) {
644 if (
const bNode *output_node = zone_type.get_corresponding_output(ntree, node)) {
653 struct InternalLink {
656 int multi_input_sort_id = 0;
662 const Span<const bNodeLink *> links)
const
665 if (!link->fromnode->is_dangling_reroute()) {
672 void update_internal_links(
bNodeTree &ntree)
674 bke::node_tree_runtime::AllowUsingOutdatedInfo allow_outdated_info{ntree};
675 ntree.ensure_topology_cache();
676 for (
bNode *node : ntree.all_nodes()) {
677 if (!this->should_update_individual_node(ntree, *node)) {
682 for (
const bNodeSocket *output_socket : node->output_sockets()) {
683 if (!output_socket->is_available()) {
689 const bNodeSocket *input_socket = this->find_internally_linked_input(ntree, output_socket);
690 if (input_socket ==
nullptr) {
694 const Span<const bNodeLink *> connected_links = input_socket->directly_linked_links();
695 const bNodeLink *connected_link = first_non_dangling_link(ntree, connected_links);
698 std::max<int>(0, connected_links.
size() - 1);
699 expected_internal_links.
append(InternalLink{
const_cast<bNodeSocket *
>(input_socket),
705 if (node->
runtime->internal_links.size() != expected_internal_links.
size()) {
706 this->update_internal_links_in_node(ntree, *node, expected_internal_links);
710 const bool all_expected_internal_links_exist = std::all_of(
711 node->
runtime->internal_links.begin(),
712 node->
runtime->internal_links.end(),
714 const InternalLink internal_link{link.fromsock, link.tosock, link.multi_input_sort_id};
715 return expected_internal_links.
as_span().contains(internal_link);
718 if (all_expected_internal_links_exist) {
722 this->update_internal_links_in_node(ntree, *node, expected_internal_links);
729 const bNode &node = output_socket->owner_node();
730 if (node.
typeinfo->internally_linked_input) {
731 return node.
typeinfo->internally_linked_input(ntree, node, *output_socket);
735 int selected_priority = -1;
736 bool selected_is_linked =
false;
737 for (
const bNodeSocket *input_socket : node.input_sockets()) {
738 if (!input_socket->is_available()) {
749 const bool is_linked = input_socket->is_directly_linked();
750 const bool is_preferred = priority > selected_priority || (is_linked && !selected_is_linked);
754 selected_socket = input_socket;
755 selected_priority = priority;
756 selected_is_linked = is_linked;
758 return selected_socket;
761 void update_internal_links_in_node(
bNodeTree &ntree,
763 Span<InternalLink> internal_links)
765 node.
runtime->internal_links.clear();
766 node.
runtime->internal_links.reserve(internal_links.
size());
767 for (
const InternalLink &internal_link : internal_links) {
772 link.
tosock = internal_link.to;
775 node.
runtime->internal_links.append(link);
780 void update_generic_callback(
bNodeTree &ntree)
782 if (ntree.
typeinfo->update ==
nullptr) {
788 void remove_unused_previews_when_necessary(
bNodeTree &ntree)
793 if ((ntree.
runtime->changed_flag & allowed_flags) == ntree.
runtime->changed_flag) {
799 void make_node_previews_dirty(
bNodeTree &ntree)
801 ntree.
runtime->previews_refresh_state++;
802 for (
bNode *node : ntree.all_nodes()) {
803 if (!node->is_group()) {
807 this->make_node_previews_dirty(*nested_tree);
812 void propagate_runtime_flags(
const bNodeTree &ntree)
814 ntree.ensure_topology_cache();
816 ntree.
runtime->runtime_flag = 0;
818 for (
const bNode *group_node : ntree.group_nodes()) {
820 if (group !=
nullptr) {
827 for (
const StringRefNull idname : {
"ShaderNodeTexImage",
"ShaderNodeTexEnvironment"}) {
828 for (
const bNode *node : ntree.nodes_by_type(idname)) {
837 for (
const StringRefNull idname : {
"ShaderNodeOutputMaterial",
838 "ShaderNodeOutputLight",
839 "ShaderNodeOutputWorld",
840 "ShaderNodeOutputAOV"})
842 const Span<const bNode *> nodes = ntree.nodes_by_type(idname);
851 if (!ntree.nodes_by_type(
"GeometryNodeSimulationOutput").is_empty()) {
857 void update_from_field_inference(
bNodeTree &ntree)
862 const Span<bke::FieldSocketState> field_states = ntree.
runtime->field_states;
863 for (
bNode *node : ntree.nodes_by_type(
"GeometryNodeBake")) {
865 for (
const int i : IndexRange(storage.
items_num)) {
875 static bool socket_type_always_single(
const SocketDeclaration &decl)
890 static int get_input_socket_shape(
const SocketDeclaration &decl,
891 const StructureType structure_type)
896 if (socket_type_always_single(decl)) {
899 switch (structure_type) {
900 case StructureType::Single:
902 case StructureType::Dynamic:
904 case StructureType::Field:
906 case StructureType::Grid:
913 static int get_output_socket_shape(
const SocketDeclaration &decl,
915 const StructureType structure_type)
920 if (socket_type_always_single(decl)) {
923 switch (structure_type) {
924 case StructureType::Single: {
927 case StructureType::Dynamic: {
930 case StructureType::Field: {
931 switch (field_state) {
941 case StructureType::Grid: {
949 void update_socket_shapes(
bNodeTree &ntree)
951 ntree.ensure_topology_cache();
952 if (
U.experimental.use_socket_structure_type) {
954 *ntree.
runtime->structure_type_interface;
955 const Span<bke::FieldSocketState> field_states = ntree.
runtime->field_states;
956 for (
bNode *node : ntree.all_nodes()) {
957 if (node->is_undefined()) {
960 if (node->is_group_input()) {
961 const Span<bNodeSocket *> sockets = node->output_sockets();
963 sockets[
i]->display_shape = get_output_socket_shape(
964 *sockets[
i]->runtime->declaration,
965 field_states[sockets[
i]->index_in_tree()],
970 if (node->is_group_output()) {
971 const Span<bNodeSocket *> sockets = node->input_sockets();
973 sockets[
i]->display_shape = get_output_socket_shape(
974 *sockets[
i]->runtime->declaration,
975 field_states[sockets[
i]->index_in_tree()],
980 for (
bNodeSocket *socket : node->input_sockets()) {
981 if (
const SocketDeclaration *declaration = socket->runtime->declaration) {
982 socket->display_shape = get_input_socket_shape(*declaration,
983 declaration->structure_type);
986 for (
bNodeSocket *socket : node->output_sockets()) {
987 if (
const SocketDeclaration *declaration = socket->runtime->declaration) {
988 socket->display_shape = get_output_socket_shape(
989 *declaration, field_states[socket->index_in_tree()], declaration->structure_type);
995 const Span<bke::FieldSocketState> field_states = ntree.
runtime->field_states;
997 switch (field_states[socket->index_in_tree()]) {
1012 void update_eval_dependencies(
bNodeTree &ntree)
1014 ntree.ensure_topology_cache();
1015 nodes::GeometryNodesEvalDependencies new_deps =
1019 if (!ntree.
runtime->geometry_nodes_eval_dependencies ||
1020 new_deps != *ntree.
runtime->geometry_nodes_eval_dependencies)
1022 needs_relations_update_ =
true;
1023 ntree.
runtime->geometry_nodes_eval_dependencies =
1024 std::make_unique<nodes::GeometryNodesEvalDependencies>(std::move(new_deps));
1028 bool propagate_enum_definitions(
bNodeTree &ntree)
1030 ntree.ensure_interface_cache();
1034 for (
bNode *node : ntree.toposort_right_to_left()) {
1035 const bool node_updated = this->should_update_individual_node(ntree, *node);
1038 if (node->is_type(
"GeometryNodeMenuSwitch")) {
1049 enum_items->remove_user_and_delete_if_last();
1051 locally_defined_enums.
append(&enum_input);
1055 for (
bNodeSocket *socket : node->input_sockets()) {
1056 if (socket->is_available() && socket->type ==
SOCK_MENU &&
1057 !locally_defined_enums.
contains(socket))
1059 clear_enum_reference(*socket);
1062 for (
bNodeSocket *socket : node->output_sockets()) {
1063 if (socket->is_available() && socket->type ==
SOCK_MENU) {
1064 clear_enum_reference(*socket);
1082 if (node->is_group()) {
1084 if (node->
id ==
nullptr) {
1088 group_tree->ensure_interface_cache();
1090 for (
const int socket_i : group_tree->interface_inputs().index_range()) {
1096 this->update_socket_enum_definition(
1102 else if (node->is_type(
"GeometryNodeMenuSwitch")) {
1107 this->update_socket_enum_definition(
1113 else if (node->is_type(
"GeometryNodeForeachGeometryElementInput")) {
1115 BLI_assert(node->input_sockets().size() == node->output_sockets().size());
1117 const IndexRange sockets = node->input_sockets().index_range().drop_front(2);
1118 for (
const int socket_i : sockets) {
1124 this->update_socket_enum_definition(
1139 this->update_socket_enum_definition(
1150 const Span<bNode *> group_input_nodes = ntree.group_input_nodes();
1151 for (
const int interface_input_i : ntree.interface_inputs().index_range()) {
1153 *ntree.interface_inputs()[interface_input_i];
1154 if (interface_socket.
socket_type != StringRef(
"NodeSocketMenu")) {
1158 bool found_conflict =
false;
1159 for (
bNode *input_node : group_input_nodes) {
1160 const bNodeSocket &socket = input_node->output_socket(interface_input_i);
1162 if (socket_value.has_conflict()) {
1163 found_conflict =
true;
1166 if (found_enum_items ==
nullptr) {
1167 found_enum_items = socket_value.enum_items;
1169 else if (socket_value.enum_items !=
nullptr) {
1170 if (found_enum_items != socket_value.enum_items) {
1171 found_conflict =
true;
1176 if (found_conflict) {
1178 for (
bNode *input_node : group_input_nodes) {
1179 bNodeSocket &socket = input_node->output_socket(interface_input_i);
1181 if (socket_value.enum_items) {
1182 socket_value.enum_items->remove_user_and_delete_if_last();
1183 socket_value.enum_items =
nullptr;
1188 else if (found_enum_items !=
nullptr) {
1190 for (
bNode *input_node : group_input_nodes) {
1191 bNodeSocket &socket = input_node->output_socket(interface_input_i);
1193 if (socket_value.enum_items ==
nullptr) {
1194 found_enum_items->add_user();
1195 socket_value.enum_items = found_enum_items;
1203 for (
const bNode *group_input_node : ntree.group_input_nodes()) {
1204 for (
const int socket_i : ntree.interface_inputs().index_range()) {
1208 this->update_socket_enum_definition(interface_enum_items[socket_i],
1215 bool changed =
false;
1216 for (
const int socket_i : ntree.interface_inputs().index_range()) {
1224 dst.
enum_items->remove_user_and_delete_if_last();
1233 src.
enum_items->remove_user_and_delete_if_last();
1249 enum_items->items.reinitialize(enum_def.
items_num);
1250 for (
const int i : enum_def.items().index_range()) {
1255 dst.name = src.
name ? src.
name :
"";
1265 this->reset_enum_ptr(default_value);
1271 if (dst.has_conflict()) {
1277 if (src.has_conflict()) {
1279 this->reset_enum_ptr(dst);
1288 this->reset_enum_ptr(dst);
1296 dst.
enum_items->remove_user_and_delete_if_last();
1304 dst.
enum_items->remove_user_and_delete_if_last();
1308 enum_items->add_user();
1313 void update_link_validation(
bNodeTree &ntree)
1316 const auto is_invalid_enum_ref = [](
const bNodeSocket &socket) ->
bool {
1325 fallback_zones = ntree.
runtime->last_valid_zones.get();
1330 if (!link->fromsock->is_available() || !link->tosock->is_available()) {
1331 link->flag &= ~NODE_LINK_VALID;
1334 if (is_invalid_enum_ref(*link->fromsock) || is_invalid_enum_ref(*link->tosock)) {
1335 link->flag &= ~NODE_LINK_VALID;
1336 ntree.
runtime->link_errors.add(
1342 const Span<FieldSocketState> field_states = ntree.
runtime->field_states;
1346 link->flag &= ~NODE_LINK_VALID;
1347 ntree.
runtime->link_errors.add(
1352 const bNode &from_node = *link->fromnode;
1353 const bNode &to_node = *link->tonode;
1354 if (from_node.
runtime->toposort_left_to_right_index >
1355 to_node.
runtime->toposort_left_to_right_index)
1357 link->flag &= ~NODE_LINK_VALID;
1358 ntree.
runtime->link_errors.add(
1363 if (ntree.
typeinfo->validate_link) {
1367 link->flag &= ~NODE_LINK_VALID;
1368 ntree.
runtime->link_errors.add(
1372 TIP_(
"Conversion is not supported"),
1373 TIP_(link->fromsock->typeinfo->label),
1374 TIP_(link->tosock->typeinfo->label))});
1378 if (fallback_zones) {
1382 ntree.
runtime->invalid_zone_output_node_ids.add(*from_zone->output_node_id);
1385 link->flag &= ~NODE_LINK_VALID;
1386 ntree.
runtime->link_errors.add(
1397 tree.ensure_topology_cache();
1403 const uint32_t old_topology_hash =
tree.runtime->output_topology_hash;
1404 const uint32_t new_topology_hash = this->get_combined_socket_topology_hash(
1405 tree, tree_output_sockets);
1406 tree.runtime->output_topology_hash = new_topology_hash;
1418 const StringRef expression = driver->
expression;
1434 if (old_topology_hash != new_topology_hash) {
1440 if (
tree.runtime->changed_flag ==
1443 if (old_topology_hash == new_topology_hash) {
1448 if (!this->check_if_socket_outputs_changed_based_on_flags(
tree, tree_output_sockets)) {
1458 for (
const bNode *node :
tree.all_nodes()) {
1459 if (!this->is_output_node(*node)) {
1462 for (
const bNodeSocket *socket : node->input_sockets()) {
1463 if (!
STREQ(socket->
idname,
"NodeSocketVirtual")) {
1471 bool is_output_node(
const bNode &node)
const
1476 if (node.is_group_output()) {
1479 if (node.is_type(
"GeometryNodeWarning")) {
1486 if (node.is_group()) {
1488 if (node_group !=
nullptr &&
1502 Span<const bNodeSocket *> sockets)
1504 if (
tree.has_available_link_cycle()) {
1511 for (uint32_t
hash : hashes) {
1514 return combined_hash;
1518 const Span<const bNodeSocket *> sockets)
1522 Stack<const bNodeSocket *> sockets_to_check = sockets;
1524 auto get_socket_ptr_hash = [&](
const bNodeSocket &socket) {
1530 while (!sockets_to_check.
is_empty()) {
1532 const bNode &node = socket.owner_node();
1534 if (hash_by_socket_id[socket.index_in_tree()].has_value()) {
1535 sockets_to_check.
pop();
1541 if (socket.is_input()) {
1543 bool all_origins_computed =
true;
1544 bool get_value_from_origin =
false;
1546 for (
const bNodeLink *link : socket.directly_linked_links()) {
1547 if (link->is_muted()) {
1550 if (!link->is_available()) {
1553 origin_sockets.
append(link->fromsock);
1557 if (zone->output_node_id == node.
identifier) {
1558 if (
const bNode *input_node = zone->input_node()) {
1559 origin_sockets.
extend(input_node->input_sockets());
1564 for (
const bNodeSocket *origin_socket : origin_sockets) {
1565 const std::optional<uint32_t> origin_hash =
1566 hash_by_socket_id[origin_socket->index_in_tree()];
1567 if (origin_hash.has_value()) {
1568 if (get_value_from_origin || socket.
type != origin_socket->type) {
1569 socket_hash =
noise::hash(socket_hash, *origin_hash);
1573 socket_hash = *origin_hash;
1575 get_value_from_origin =
true;
1578 sockets_to_check.
push(origin_socket);
1579 all_origins_computed =
false;
1582 if (!all_origins_computed) {
1586 if (!get_value_from_origin) {
1587 socket_hash = get_socket_ptr_hash(socket);
1591 bool all_available_inputs_computed =
true;
1592 for (
const bNodeSocket *input_socket : node.input_sockets()) {
1593 if (input_socket->is_available()) {
1594 if (!hash_by_socket_id[input_socket->index_in_tree()].has_value()) {
1595 sockets_to_check.
push(input_socket);
1596 all_available_inputs_computed =
false;
1600 if (!all_available_inputs_computed) {
1603 if (node.is_reroute()) {
1604 socket_hash = *hash_by_socket_id[node.input_socket(0).index_in_tree()];
1606 else if (node.is_muted()) {
1607 const bNodeSocket *internal_input = socket.internal_link_input();
1608 if (internal_input ==
nullptr) {
1609 socket_hash = get_socket_ptr_hash(socket);
1612 if (internal_input->
type == socket.
type) {
1613 socket_hash = *hash_by_socket_id[internal_input->index_in_tree()];
1616 socket_hash = get_socket_ptr_hash(socket);
1621 socket_hash = get_socket_ptr_hash(socket);
1622 for (
const bNodeSocket *input_socket : node.input_sockets()) {
1623 if (input_socket->is_available()) {
1624 const uint32_t input_socket_hash = *hash_by_socket_id[input_socket->index_in_tree()];
1625 socket_hash =
noise::hash(socket_hash, input_socket_hash);
1631 if (node.is_type(
"ShaderNodeTexImage") && socket.index() == 0) {
1633 const bNodeSocket &alpha_socket = node.output_socket(1);
1635 if (alpha_socket.is_directly_linked()) {
1641 hash_by_socket_id[socket.index_in_tree()] = socket_hash;
1644 sockets_to_check.
pop();
1650 hashes[
i] = *hash_by_socket_id[sockets[
i]->index_in_tree()];
1659 bool check_if_socket_outputs_changed_based_on_flags(
const bNodeTree &
tree,
1660 Span<const bNodeSocket *> sockets)
1664 Stack<const bNodeSocket *> sockets_to_check = sockets;
1667 pushed_by_socket_id[socket->index_in_tree()] =
true;
1670 while (!sockets_to_check.
is_empty()) {
1672 const bNode &node = socket.owner_node();
1677 const bool only_unused_internal_link_changed = !node.is_muted() &&
1681 const bool change_affects_output = !(only_unused_internal_link_changed ||
1682 only_parent_changed);
1683 if (change_affects_output) {
1687 if (socket.is_input()) {
1688 for (
const bNodeSocket *origin_socket : socket.directly_linked_sockets()) {
1689 bool &pushed = pushed_by_socket_id[origin_socket->index_in_tree()];
1691 sockets_to_check.
push(origin_socket);
1697 for (
const bNodeSocket *input_socket : node.input_sockets()) {
1698 if (input_socket->is_available()) {
1699 bool &pushed = pushed_by_socket_id[input_socket->index_in_tree()];
1701 sockets_to_check.
push(input_socket);
1721 if (input_socket->is_available()) {
1722 bool &pushed = pushed_by_socket_id[input_socket->index_in_tree()];
1724 sockets_to_check.
push(input_socket);
1734 if ((node.is_type(
"ShaderNodeNormal") || node.is_type(
"CompositorNodeNormal")) &&
1735 socket.index() == 1)
1738 const bNodeSocket &normal_output = node.output_socket(0);
1740 bool &pushed = pushed_by_socket_id[normal_output.index_in_tree()];
1742 sockets_to_check.
push(&normal_output);
1755 bool update_nested_node_refs(
bNodeTree &ntree)
1757 ntree.ensure_topology_cache();
1760 Map<bNestedNodePath, int32_t> old_id_by_path;
1761 Set<int32_t> old_ids;
1762 for (
const bNestedNodeRef &ref : ntree.nested_node_refs_span()) {
1763 old_id_by_path.
add(ref.path, ref.id);
1764 old_ids.
add(ref.id);
1771 const bNode *node = ntree.node_by_id(path.node_id);
1772 if (node && node->is_group() && node->
id) {
1774 nested_node_paths.
append(path);
1781 for (StringRefNull idname : {
"GeometryNodeSimulationOutput",
"GeometryNodeBake"}) {
1782 for (
const bNode *node : ntree.nodes_by_type(idname)) {
1788 for (
const bNode *node : ntree.group_nodes()) {
1790 if (group ==
nullptr) {
1793 for (
const int i : group->nested_node_refs_span().index_range()) {
1802 Map<int32_t, bNestedNodePath> new_path_by_id;
1807 new_path_by_id.
add(old_id, path);
1818 new_path_by_id.
add(new_id, path);
1822 if (!this->nested_node_refs_changed(ntree, new_path_by_id)) {
1836 for (
const auto item : new_path_by_id.
items()) {
1839 ref.
path = item.value;
1849 bool nested_node_refs_changed(
const bNodeTree &ntree,
1850 const Map<int32_t, bNestedNodePath> &new_path_by_id)
1855 for (
const bNestedNodeRef &ref : ntree.nested_node_refs_span()) {
1856 if (!new_path_by_id.
contains(ref.id)) {
1863 void reset_changed_flags(
bNodeTree &ntree)
1866 for (
bNode *node : ntree.all_nodes()) {
1883 bool update_panel_toggle_names(
bNodeTree &ntree)
1885 bool changed =
false;
1886 ntree.ensure_interface_cache();
1893 if (!
STREQ(panel->
name, toggle_socket->name)) {